diff --git a/app/api/nextcloud/files/content/route.ts b/app/api/nextcloud/files/content/route.ts index 2455155b..e1c98da9 100644 --- a/app/api/nextcloud/files/content/route.ts +++ b/app/api/nextcloud/files/content/route.ts @@ -1,55 +1,43 @@ import { NextResponse } from 'next/server'; import { getServerSession } from 'next-auth'; import { authOptions } from '@/app/api/auth/[...nextauth]/route'; -import { prisma } from '@/lib/prisma'; -import { createClient } from 'webdav'; + +// This file serves as an adapter to redirect requests from the old NextCloud +// content endpoint to the new MinIO S3 content endpoint export async function GET(request: Request) { try { - const { searchParams } = new URL(request.url); - const path = searchParams.get('path'); - - if (!path) { - return NextResponse.json({ error: 'Path parameter is required' }, { status: 400 }); - } - + // Get session const session = await getServerSession(authOptions); if (!session?.user) { return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }); } - // Get credentials without logging - const credentials = await prisma.webDAVCredentials.findUnique({ - where: { userId: session.user.id } - }); - - if (!credentials) { - return NextResponse.json({ error: 'Nextcloud credentials not found' }, { status: 404 }); + // Get query parameters + const { searchParams } = new URL(request.url); + const path = searchParams.get('path'); + const id = searchParams.get('id'); + + // Create a new URL for the storage API with the same parameters + const newUrl = new URL('/api/storage/files/content', request.url); + if (path) { + newUrl.searchParams.set('path', path); } - - // Create WebDAV client - const client = createClient(process.env.NEXTCLOUD_URL!, { - username: credentials.username, - password: credentials.password, - }); - - try { - const content = await client.getFileContents(path, { format: 'text' }); - - // For VCF files, don't log the content - if (path.endsWith('.vcf')) { - return NextResponse.json({ content }); + if (id) { + newUrl.searchParams.set('id', id); + } + + // Forward the request to the new endpoint + const response = await fetch(newUrl, { + headers: { + 'Cookie': request.headers.get('cookie') || '' } - - return NextResponse.json({ content }); - } catch (error) { - // Log error without sensitive information - console.error('Error fetching file content:', error instanceof Error ? error.message : 'Unknown error'); - return NextResponse.json({ error: 'Failed to fetch file content' }, { status: 500 }); - } + }); + + // Return the response from the new endpoint + return NextResponse.json(await response.json(), { status: response.status }); } catch (error) { - // Log error without sensitive information - console.error('Error in GET request:', error instanceof Error ? error.message : 'Unknown error'); + console.error('Error in NextCloud content adapter:', error); return NextResponse.json({ error: 'Internal server error' }, { status: 500 }); } } \ No newline at end of file diff --git a/app/api/nextcloud/files/route.ts b/app/api/nextcloud/files/route.ts index 4677aa5b..8e356e8a 100644 --- a/app/api/nextcloud/files/route.ts +++ b/app/api/nextcloud/files/route.ts @@ -1,274 +1,131 @@ import { NextResponse } from 'next/server'; import { getServerSession } from 'next-auth'; -import { PrismaClient } from '@prisma/client'; import { authOptions } from '@/app/api/auth/[...nextauth]/route'; -import { createClient } from 'webdav'; -import { prisma } from '@/lib/prisma'; -import { Buffer } from 'buffer'; -// Use a single PrismaClient instance -declare global { - var prisma: PrismaClient | undefined; -} - -const prismaClient = global.prisma || new PrismaClient(); -if (process.env.NODE_ENV !== 'production') global.prisma = prismaClient; - -// Helper function to create WebDAV client -const createWebDAVClient = async (userId: string) => { - const credentials = await prismaClient.webDAVCredentials.findUnique({ - where: { userId }, - }); - - if (!credentials) { - throw new Error('No WebDAV credentials found'); - } - - const baseURL = process.env.NEXTCLOUD_URL; - if (!baseURL) { - throw new Error('NEXTCLOUD_URL environment variable is not set'); - } - - const normalizedBaseURL = baseURL.endsWith('/') ? baseURL.slice(0, -1) : baseURL; - const webdavURL = `${normalizedBaseURL}/remote.php/dav`; - - return { - client: createClient(webdavURL, { - username: credentials.username, - password: credentials.password, - authType: 'password', - }), - username: credentials.username - }; -}; - -// Helper function to extract text content from XML -function extractTextContent(xml: string, tag: string): string | null { - const regex = new RegExp(`<${tag}[^>]*>(.*?)`, 's'); - const match = xml.match(regex); - return match ? match[1].trim() : null; -} - -// Helper function to check if a response is a collection -function isCollection(xml: string): boolean { - return xml.includes(''); -} +// This file serves as an adapter to redirect requests from the old NextCloud +// endpoints to the new MinIO S3 endpoints export async function GET(request: Request) { try { - const { searchParams } = new URL(request.url); - const folder = searchParams.get('folder'); - - if (!folder) { - return NextResponse.json({ error: 'Folder parameter is required' }, { status: 400 }); - } - + // Get session const session = await getServerSession(authOptions); if (!session?.user) { return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }); } - // Get credentials without logging - const credentials = await prisma.webDAVCredentials.findUnique({ - where: { userId: session.user.id } - }); - - if (!credentials) { - return NextResponse.json({ error: 'Nextcloud credentials not found' }, { status: 404 }); + // Get query parameters + const { searchParams } = new URL(request.url); + const folder = searchParams.get('folder'); + + // Create a new URL for the storage API with the same parameters + const newUrl = new URL('/api/storage/files', request.url); + if (folder) { + newUrl.searchParams.set('folder', folder); } - - const nextcloudUrl = process.env.NEXTCLOUD_URL; - if (!nextcloudUrl) { - return NextResponse.json({ error: 'Nextcloud URL not configured' }, { status: 500 }); - } - - const path = `/files/${credentials.username}/Private/${folder}`; - const url = `${nextcloudUrl}/remote.php/dav${path}`; - - // Make PROPFIND request to get directory contents - const response = await fetch(url, { - method: 'PROPFIND', + + // Forward the request to the new endpoint + const response = await fetch(newUrl, { headers: { - 'Authorization': `Basic ${Buffer.from(`${credentials.username}:${credentials.password}`).toString('base64')}`, - 'Depth': '1', - 'Content-Type': 'application/xml', - }, - body: '', - }); - - if (!response.ok) { - console.error('Error fetching directory contents:', response.status, response.statusText); - return NextResponse.json({ error: 'Failed to fetch directory contents' }, { status: response.status }); - } - - const text = await response.text(); - const files: any[] = []; - - // Split the response into individual file entries - const fileEntries = text.split('').slice(1); - - for (const entry of fileEntries) { - const href = extractTextContent(entry, 'd:href'); - if (!href) continue; - - // Skip if it's a collection (directory) - if (isCollection(entry)) continue; - - const lastmod = extractTextContent(entry, 'd:getlastmodified'); - const size = extractTextContent(entry, 'd:getcontentlength'); - const mime = extractTextContent(entry, 'd:getcontenttype'); - const etag = extractTextContent(entry, 'd:getetag'); - - const filename = href.split('/').pop() || ''; - - // For Contacts folder, return all files - if (folder === 'Contacts') { - files.push({ - filename: href, - basename: filename, - lastmod, - size, - type: 'file', - etag, - mime - }); - } - // For other folders, only return markdown files - else if (filename.endsWith('.md')) { - files.push({ - id: href, - title: filename.replace('.md', ''), - lastModified: new Date(lastmod || '').toISOString(), - size, - type: 'file', - mime, - etag - }); + 'Cookie': request.headers.get('cookie') || '' } - } - - return NextResponse.json(files); + }); + + // Return the response from the new endpoint + return NextResponse.json(await response.json(), { status: response.status }); } catch (error) { - // Log error without sensitive information - console.error('Error fetching files:', error instanceof Error ? error.message : 'Unknown error'); + console.error('Error in NextCloud adapter (GET):', error); return NextResponse.json({ error: 'Internal server error' }, { status: 500 }); } } export async function POST(request: Request) { try { + // Get session const session = await getServerSession(authOptions); - if (!session?.user?.id) { + if (!session?.user) { return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }); } - const { title, content, folder } = await request.json(); - if (!title || !content || !folder) { - return NextResponse.json({ error: 'Missing required fields' }, { status: 400 }); - } - - const { client, username } = await createWebDAVClient(session.user.id); + // Get request body + const body = await request.json(); - try { - const path = `/files/${username}/Private/${folder}/${title}.md`; - console.log('Saving note to path:', path); - - await client.putFileContents(path, content); - - // Get the file details after saving - const fileDetails = await client.stat(path); - - return NextResponse.json({ - id: fileDetails.filename, - title: fileDetails.basename.replace('.md', ''), - lastModified: new Date(fileDetails.lastmod).toISOString(), - size: fileDetails.size, - type: 'file', - mime: fileDetails.mime, - etag: fileDetails.etag - }); - } catch (error) { - console.error('Error saving note:', error); - return NextResponse.json({ error: 'Failed to save note' }, { status: 500 }); - } + // Forward the request to the new endpoint + const response = await fetch('/api/storage/files', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Cookie': request.headers.get('cookie') || '' + }, + body: JSON.stringify(body) + }); + + // Return the response from the new endpoint + return NextResponse.json(await response.json(), { status: response.status }); } catch (error) { - console.error('Error in POST request:', error); + console.error('Error in NextCloud adapter (POST):', error); return NextResponse.json({ error: 'Internal server error' }, { status: 500 }); } } export async function PUT(request: Request) { try { + // Get session const session = await getServerSession(authOptions); - if (!session?.user?.id) { + if (!session?.user) { return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }); } - const { id, title, content, folder, mime } = await request.json(); - if (!id || !title || !content || !folder) { - return NextResponse.json({ error: 'Missing required fields' }, { status: 400 }); - } - - const { client, username } = await createWebDAVClient(session.user.id); + // Get request body + const body = await request.json(); - try { - // Use the provided path directly - const path = id; - console.log('Updating file at path:', path); - - // Set the correct content type based on file extension or provided mime type - const contentType = mime || (title.endsWith('.vcf') ? 'text/vcard' : 'text/markdown'); - await client.putFileContents(path, content, { contentType }); - - // Get the updated file details - const fileDetails = await client.stat(path); - - return NextResponse.json({ - id: fileDetails.filename, - title: fileDetails.basename, - lastModified: new Date(fileDetails.lastmod).toISOString(), - size: fileDetails.size, - type: 'file', - mime: fileDetails.mime, - etag: fileDetails.etag - }); - } catch (error) { - console.error('Error updating file:', error); - return NextResponse.json({ error: 'Failed to update file' }, { status: 500 }); - } + // Forward the request to the new endpoint + const response = await fetch('/api/storage/files', { + method: 'PUT', + headers: { + 'Content-Type': 'application/json', + 'Cookie': request.headers.get('cookie') || '' + }, + body: JSON.stringify(body) + }); + + // Return the response from the new endpoint + return NextResponse.json(await response.json(), { status: response.status }); } catch (error) { - console.error('Error in PUT request:', error); + console.error('Error in NextCloud adapter (PUT):', error); return NextResponse.json({ error: 'Internal server error' }, { status: 500 }); } } export async function DELETE(request: Request) { try { + // Get session const session = await getServerSession(authOptions); - if (!session?.user?.id) { + if (!session?.user) { return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }); } - const { id, folder } = await request.json(); - if (!id || !folder) { - return NextResponse.json({ error: 'Missing required fields' }, { status: 400 }); - } - - const { client, username } = await createWebDAVClient(session.user.id); + // Get query parameters + const { searchParams } = new URL(request.url); + const id = searchParams.get('id'); - try { - const path = `/files/${username}/Private/${folder}/${id.split('/').pop()}`; - console.log('Deleting note at path:', path); - - await client.deleteFile(path); - - return NextResponse.json({ success: true }); - } catch (error) { - console.error('Error deleting note:', error); - return NextResponse.json({ error: 'Failed to delete note' }, { status: 500 }); + // Create a new URL for the storage API with the same parameters + const newUrl = new URL('/api/storage/files', request.url); + if (id) { + newUrl.searchParams.set('id', id); } + + // Forward the request to the new endpoint + const response = await fetch(newUrl, { + method: 'DELETE', + headers: { + 'Cookie': request.headers.get('cookie') || '' + } + }); + + // Return the response from the new endpoint + return NextResponse.json(await response.json(), { status: response.status }); } catch (error) { - console.error('Error in DELETE request:', error); + console.error('Error in NextCloud adapter (DELETE):', error); return NextResponse.json({ error: 'Internal server error' }, { status: 500 }); } } \ No newline at end of file diff --git a/app/api/nextcloud/init/route.ts b/app/api/nextcloud/init/route.ts index 4cf191a1..43f904e8 100644 --- a/app/api/nextcloud/init/route.ts +++ b/app/api/nextcloud/init/route.ts @@ -1,60 +1,30 @@ import { NextResponse } from 'next/server'; import { getServerSession } from 'next-auth'; import { authOptions } from '@/app/api/auth/[...nextauth]/route'; -import { prisma } from '@/lib/prisma'; -import { Buffer } from 'buffer'; -export async function POST() { +// This file serves as an adapter to redirect requests from the old NextCloud +// init endpoint to the new MinIO S3 init endpoint + +export async function POST(request: Request) { try { + // Get session const session = await getServerSession(authOptions); if (!session?.user) { return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }); } - const nextcloudUrl = process.env.NEXTCLOUD_URL; - const adminUsername = process.env.NEXTCLOUD_ADMIN_USERNAME; - const adminPassword = process.env.NEXTCLOUD_ADMIN_PASSWORD; - - if (!nextcloudUrl || !adminUsername || !adminPassword) { - return NextResponse.json({ error: 'Nextcloud configuration missing' }, { status: 500 }); - } - - const username = `cube-${session.user.id}`; - const userId = session.user.id; - - // Get or create WebDAV credentials - const password = await getWebDAVCredentials(nextcloudUrl, username, adminUsername, adminPassword, userId); - - // Ensure folder structure exists - await ensureFolderStructure(nextcloudUrl, username, password); - - // Store initialization state in session - const updatedSession = { - ...session, - user: { - ...session.user, - nextcloudInitialized: true + // Forward the request to the new endpoint + const response = await fetch('/api/storage/init', { + method: 'POST', + headers: { + 'Cookie': request.headers.get('cookie') || '' } - }; - - return NextResponse.json({ - success: true, - message: 'Nextcloud initialized successfully' }); + + // Return the response from the new endpoint + return NextResponse.json(await response.json(), { status: response.status }); } catch (error) { - console.error('Nextcloud initialization failed:', error); - return NextResponse.json({ - error: 'Failed to initialize Nextcloud', - details: error instanceof Error ? error.message : 'Unknown error' - }, { status: 500 }); + console.error('Error in NextCloud init adapter:', error); + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }); } -} - -// Helper functions from status/route.ts -async function getWebDAVCredentials(nextcloudUrl: string, username: string, adminUsername: string, adminPassword: string, userId: string) { - // ... existing getWebDAVCredentials implementation ... -} - -async function ensureFolderStructure(nextcloudUrl: string, username: string, password: string) { - // ... existing ensureFolderStructure implementation ... } \ No newline at end of file diff --git a/app/api/nextcloud/status/route.ts b/app/api/nextcloud/status/route.ts index fed51ffa..eadfebb3 100644 --- a/app/api/nextcloud/status/route.ts +++ b/app/api/nextcloud/status/route.ts @@ -1,378 +1,29 @@ import { NextResponse } from 'next/server'; import { getServerSession } from 'next-auth'; import { authOptions } from '@/app/api/auth/[...nextauth]/route'; -import { DOMParser } from '@xmldom/xmldom'; -import { Buffer } from 'buffer'; -import { PrismaClient } from '@prisma/client'; -import { prisma } from '@/lib/prisma'; -// Cache for folder structure and credentials -const folderCache = new Map(); -const credentialsCache = new Map(); - -// Cache for Nextcloud connectivity check -let lastConnectivityCheck = 0; -let isNextcloudAccessible = false; - -async function sleep(ms: number) { - return new Promise(resolve => setTimeout(resolve, ms)); -} - -async function checkNextcloudConnectivity(nextcloudUrl: string): Promise { - const now = Date.now(); - if (now - lastConnectivityCheck < 5 * 60 * 1000) { // 5 minutes cache - return isNextcloudAccessible; - } - - try { - const testResponse = await fetch(`${nextcloudUrl}/status.php`); - isNextcloudAccessible = testResponse.ok; - lastConnectivityCheck = now; - return isNextcloudAccessible; - } catch (error) { - console.error('Nextcloud connectivity check failed:', error); - isNextcloudAccessible = false; - lastConnectivityCheck = now; - return false; - } -} - -async function parseXMLResponse(response: Response): Promise { - const text = await response.text(); - const parser = new DOMParser(); - const xmlDoc = parser.parseFromString(text, 'text/xml'); - - // Check for parsing errors - const parserError = xmlDoc.getElementsByTagName('parsererror'); - if (parserError.length > 0) { - console.error('XML Parsing Error:', parserError[0].textContent); - throw new Error('Failed to parse XML response'); - } - - const result: any = {}; - const root = xmlDoc.documentElement; - - if (root && root.nodeName === 'ocs') { - const data = root.getElementsByTagName('data')[0]; - if (data) { - const children = data.childNodes; - for (let i = 0; i < children.length; i++) { - const child = children[i]; - if (child.nodeType === 1) { // Element node - result[child.nodeName] = child.textContent; - } - } - } - } - - return result; -} - -async function createFolder(nextcloudUrl: string, username: string, password: string, folderPath: string) { - try { - // First check if folder exists - const checkResponse = await fetch(`${nextcloudUrl}/remote.php/dav/files/${encodeURIComponent(username)}/${folderPath}`, { - method: 'PROPFIND', - headers: { - 'Authorization': `Basic ${Buffer.from(`${username}:${password}`).toString('base64')}`, - 'Depth': '0', - }, - }); - - if (checkResponse.ok) { - console.log(`Folder ${folderPath} already exists`); - return; - } - - // If folder doesn't exist, create it - const response = await fetch(`${nextcloudUrl}/remote.php/dav/files/${encodeURIComponent(username)}/${folderPath}`, { - method: 'MKCOL', - headers: { - 'Authorization': `Basic ${Buffer.from(`${username}:${password}`).toString('base64')}`, - }, - }); - - if (!response.ok && response.status !== 405) { // 405 means folder already exists - const errorText = await response.text(); - console.error(`Failed to create folder ${folderPath}:`, { - status: response.status, - statusText: response.statusText, - error: errorText, - url: `${nextcloudUrl}/remote.php/dav/files/${encodeURIComponent(username)}/${folderPath}` - }); - throw new Error(`Failed to create folder ${folderPath}: ${response.status} ${response.statusText}`); - } - } catch (error) { - console.error(`Error creating folder ${folderPath}:`, error); - throw error; - } -} - -async function ensureFolderStructure(nextcloudUrl: string, username: string, password: string) { - try { - // First, ensure the Private folder exists - await createFolder(nextcloudUrl, username, password, 'Private'); - - // Create all required subfolders - const subfolders = [ - 'Private/Diary', - 'Private/Health', - 'Private/Contacts', - 'Private/Notes' - ]; - - for (const folder of subfolders) { - await createFolder(nextcloudUrl, username, password, folder); - } - } catch (error) { - console.error('Error creating folder structure:', error); - // Don't throw the error, just log it - // This way we don't trigger password regeneration - } -} - -async function getWebDAVCredentials(nextcloudUrl: string, username: string, adminUsername: string, adminPassword: string, userId: string) { - try { - // First check if user exists in Nextcloud - const userInfoResponse = await fetch(`${nextcloudUrl}/ocs/v1.php/cloud/users/${encodeURIComponent(username)}`, { - headers: { - 'Authorization': `Basic ${Buffer.from(`${adminUsername}:${adminPassword}`).toString('base64')}`, - 'OCS-APIRequest': 'true', - }, - }); - - if (userInfoResponse.status === 404) { - console.log(`User ${username} does not exist in Nextcloud`); - throw new Error(`User ${username} does not exist in Nextcloud`); - } - - if (!userInfoResponse.ok) { - throw new Error(`Failed to get user info: ${userInfoResponse.status} ${userInfoResponse.statusText}`); - } - - // Check database for existing credentials - const existingCredentials = await prisma.webDAVCredentials.findUnique({ - where: { userId } - }); - - if (existingCredentials) { - // Verify if the existing credentials still work - const verifyResponse = await fetch(`${nextcloudUrl}/remote.php/dav/files/${encodeURIComponent(username)}/`, { - method: 'PROPFIND', - headers: { - 'Authorization': `Basic ${Buffer.from(`${username}:${existingCredentials.password}`).toString('base64')}`, - 'Depth': '1', - 'Content-Type': 'application/xml', - }, - body: '', - }); - - if (verifyResponse.ok) { - console.log('Using existing credentials from database'); - // Update cache - credentialsCache.set(userId, { - password: existingCredentials.password, - timestamp: Date.now() - }); - return existingCredentials.password; - } - - // If verification failed, delete the invalid credentials - console.log('Existing credentials verification failed, removing from database'); - await prisma.webDAVCredentials.delete({ - where: { userId } - }); - } - - // If we get here, we need to generate a new password - const newPassword = Math.random().toString(36).slice(-12); - console.log('Setting new password for user'); - - // Set the user's password in Nextcloud - const setPasswordResponse = await fetch(`${nextcloudUrl}/ocs/v1.php/cloud/users/${encodeURIComponent(username)}`, { - method: 'PUT', - headers: { - 'Authorization': `Basic ${Buffer.from(`${adminUsername}:${adminPassword}`).toString('base64')}`, - 'OCS-APIRequest': 'true', - 'Content-Type': 'application/x-www-form-urlencoded', - }, - body: new URLSearchParams({ - key: 'password', - value: newPassword, - }).toString(), - }); - - if (!setPasswordResponse.ok) { - throw new Error(`Failed to set password: ${setPasswordResponse.status} ${setPasswordResponse.statusText}`); - } - - // Store the new credentials in the database - await prisma.webDAVCredentials.upsert({ - where: { userId }, - update: { - username: username, - password: newPassword - }, - create: { - userId, - username: username, - password: newPassword - } - }); - - // Update cache - credentialsCache.set(userId, { - password: newPassword, - timestamp: Date.now() - }); - - return newPassword; - } catch (error) { - console.error('Error in getWebDAVCredentials:', error); - throw error; - } -} - -async function getFolderStructure(nextcloudUrl: string, username: string, password: string): Promise { - try { - const webdavUrl = `${nextcloudUrl}/remote.php/dav/files/${encodeURIComponent(username)}/Private/`; - console.log('Fetching folders from:', webdavUrl); - - const foldersResponse = await fetch(webdavUrl, { - method: 'PROPFIND', - headers: { - 'Authorization': `Basic ${Buffer.from(`${username}:${password}`).toString('base64')}`, - 'Depth': '1', - 'Content-Type': 'application/xml', - }, - body: '', - }); - - console.log('Folders response status:', foldersResponse.status); - - if (foldersResponse.status === 429) { - // Rate limited, wait and retry - const retryAfter = foldersResponse.headers.get('Retry-After'); - console.log('Rate limited, retrying after:', retryAfter); - await sleep((retryAfter ? parseInt(retryAfter) : 5) * 1000); - return getFolderStructure(nextcloudUrl, username, password); - } - - if (!foldersResponse.ok) { - console.error('Failed to fetch folders:', { - status: foldersResponse.status, - statusText: foldersResponse.statusText, - url: webdavUrl - }); - throw new Error(`Failed to fetch folders: ${foldersResponse.status} ${foldersResponse.statusText}`); - } - - const folderData = await foldersResponse.text(); - console.log('Folder data:', folderData); - - // Parse the XML response to get folder names - const parser = new DOMParser(); - const xmlDoc = parser.parseFromString(folderData, 'text/xml'); - const responses = Array.from(xmlDoc.getElementsByTagName('d:response')); - - const folders: string[] = []; - for (const response of responses) { - const resourceType = response.getElementsByTagName('d:resourcetype')[0]; - const isCollection = resourceType?.getElementsByTagName('d:collection').length > 0; - - if (isCollection) { - const href = response.getElementsByTagName('d:href')[0]?.textContent; - if (href) { - // Extract folder name from href - const parts = href.split('/').filter(Boolean); - const folderName = decodeURIComponent(parts[parts.length - 1]); - if (folderName && folderName !== 'Private') { - folders.push(folderName); - } - } - } - } - - console.log('Parsed folders:', folders); - return folders; - } catch (error) { - console.error('Error getting folder structure:', error); - throw error; - } -} +// This file serves as an adapter to redirect requests from the old NextCloud +// status endpoint to the new MinIO S3 status endpoint export async function GET() { try { + // Get session const session = await getServerSession(authOptions); - if (!session?.user?.email || !session?.user?.id || !session?.accessToken) { + if (!session?.user) { return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }); } - const nextcloudUrl = process.env.NEXTCLOUD_URL; - const adminUsername = process.env.NEXTCLOUD_ADMIN_USERNAME; - const adminPassword = process.env.NEXTCLOUD_ADMIN_PASSWORD; - - if (!nextcloudUrl || !adminUsername || !adminPassword) { - console.error('Missing Nextcloud configuration'); - return NextResponse.json({ error: 'Nextcloud configuration is missing' }, { status: 500 }); - } - - // Check Nextcloud connectivity with caching - const isAccessible = await checkNextcloudConnectivity(nextcloudUrl); - if (!isAccessible) { - return NextResponse.json({ error: "Nextcloud n'est pas accessible" }, { status: 503 }); - } - - // Use the Keycloak ID as the Nextcloud username - const nextcloudUsername = `cube-${session.user.id}`; - console.log('Using Nextcloud username:', nextcloudUsername); - - // Check cache first - const cachedData = folderCache.get(nextcloudUsername); - if (cachedData) { - const cacheAge = Date.now() - cachedData.timestamp; - if (cacheAge < 5 * 60 * 1000) { // 5 minutes cache - return NextResponse.json({ - isConnected: true, - folders: cachedData.folders - }); + // Forward the request to the new endpoint + const response = await fetch('/api/storage/status', { + headers: { + 'Cookie': session ? 'next-auth.session-token=' + (session as any).accessToken : '' } - } - - // Get or create WebDAV credentials - const webdavPassword = await getWebDAVCredentials( - nextcloudUrl, - nextcloudUsername, - adminUsername, - adminPassword, - session.user.id - ); - - if (!webdavPassword) { - throw new Error('Failed to get WebDAV credentials'); - } - - // Ensure the folder structure exists - await ensureFolderStructure(nextcloudUrl, nextcloudUsername, webdavPassword); - - // Get folder structure - const folders = await getFolderStructure(nextcloudUrl, nextcloudUsername, webdavPassword); - - // Update cache - folderCache.set(nextcloudUsername, { - folders, - timestamp: Date.now() - }); - - return NextResponse.json({ - isConnected: true, - folders }); + + // Return the response from the new endpoint + return NextResponse.json(await response.json(), { status: response.status }); } catch (error) { - console.error('Error in Nextcloud status endpoint:', error); - return NextResponse.json( - { error: error instanceof Error ? error.message : 'An error occurred' }, - { status: 500 } - ); + console.error('Error in NextCloud status adapter:', error); + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }); } } \ No newline at end of file diff --git a/app/api/storage/files/content/route.ts b/app/api/storage/files/content/route.ts new file mode 100644 index 00000000..8f616052 --- /dev/null +++ b/app/api/storage/files/content/route.ts @@ -0,0 +1,72 @@ +import { NextResponse } from 'next/server'; +import { getServerSession } from 'next-auth'; +import { authOptions } from '@/app/api/auth/[...nextauth]/route'; +import { getObjectContent } from '@/lib/s3'; + +export async function GET(request: Request) { + try { + const session = await getServerSession(authOptions); + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }); + } + + const { searchParams } = new URL(request.url); + const path = searchParams.get('path'); + const id = searchParams.get('id'); + + if (!path && !id) { + return NextResponse.json({ error: 'Path or ID parameter is required' }, { status: 400 }); + } + + // Determine the key to use + let key: string; + + if (id) { + // If id is provided directly, use it as the key + key = id; + + // Ensure the user can only access their own files + if (!key.startsWith(`user-${session.user.id}/`)) { + return NextResponse.json({ error: 'Unauthorized access to file' }, { status: 403 }); + } + } else if (path) { + // If a path is provided, ensure it contains the user's ID + if (!path.includes(`/files/cube-${session.user.id}/`) && !path.includes(`user-${session.user.id}/`)) { + // For backward compatibility, convert NextCloud path to S3 path + if (path.startsWith('/files/') || path.includes('/Private/')) { + // Extract folder and filename from path + const parts = path.split('/').filter(Boolean); + const file = parts[parts.length - 1]; + let folder = 'notes'; // Default folder + + // Try to determine folder from path + if (path.includes('/Notes/')) folder = 'notes'; + else if (path.includes('/Diary/')) folder = 'diary'; + else if (path.includes('/Contacts/')) folder = 'contacts'; + else if (path.includes('/Health/')) folder = 'health'; + + key = `user-${session.user.id}/${folder}/${file}`; + } else { + return NextResponse.json({ error: 'Unauthorized access to file' }, { status: 403 }); + } + } else { + // If it already contains user ID, use the path directly + key = path; + } + } else { + return NextResponse.json({ error: 'Invalid parameters' }, { status: 400 }); + } + + // Get the file content + const content = await getObjectContent(key); + + if (!content) { + return NextResponse.json({ error: 'File not found' }, { status: 404 }); + } + + return NextResponse.json({ content }); + } catch (error) { + console.error('Error fetching file content:', error); + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }); + } +} \ No newline at end of file diff --git a/app/api/storage/files/route.ts b/app/api/storage/files/route.ts new file mode 100644 index 00000000..112257b3 --- /dev/null +++ b/app/api/storage/files/route.ts @@ -0,0 +1,129 @@ +import { NextResponse } from 'next/server'; +import { getServerSession } from 'next-auth'; +import { authOptions } from '@/app/api/auth/[...nextauth]/route'; +import { listUserObjects, putObject, deleteObject } from '@/lib/s3'; + +// GET endpoint to list files in a folder +export async function GET(request: Request) { + try { + const session = await getServerSession(authOptions); + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }); + } + + const { searchParams } = new URL(request.url); + const folder = searchParams.get('folder'); + + if (!folder) { + return NextResponse.json({ error: 'Folder parameter is required' }, { status: 400 }); + } + + // Normalize folder name to lowercase to match S3 convention + const normalizedFolder = folder.toLowerCase(); + + // List objects for the user in the specified folder + const files = await listUserObjects(session.user.id, normalizedFolder); + + return NextResponse.json(files); + } catch (error) { + console.error('Error listing files:', error); + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }); + } +} + +// POST endpoint to create a new file +export async function POST(request: Request) { + try { + const session = await getServerSession(authOptions); + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }); + } + + const { title, content, folder } = await request.json(); + if (!title || !content || !folder) { + return NextResponse.json({ error: 'Missing required fields' }, { status: 400 }); + } + + // Normalize folder name + const normalizedFolder = folder.toLowerCase(); + + // Create the full key (path) for the S3 object + const key = `user-${session.user.id}/${normalizedFolder}/${title}${title.endsWith('.md') ? '' : '.md'}`; + + // Save the file to S3 + const file = await putObject(key, content); + + return NextResponse.json(file); + } catch (error) { + console.error('Error creating file:', error); + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }); + } +} + +// PUT endpoint to update an existing file +export async function PUT(request: Request) { + try { + const session = await getServerSession(authOptions); + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }); + } + + const { id, title, content, folder, mime } = await request.json(); + + // Check if this is using the direct id (key) or needs to construct one + let key: string; + + if (id) { + // Ensure the user can only access their own files + if (!id.startsWith(`user-${session.user.id}/`)) { + return NextResponse.json({ error: 'Unauthorized access to file' }, { status: 403 }); + } + key = id; + } else { + // If id is not provided, construct it from folder and title + if (!title || !folder) { + return NextResponse.json({ error: 'Missing required fields' }, { status: 400 }); + } + const normalizedFolder = folder.toLowerCase(); + key = `user-${session.user.id}/${normalizedFolder}/${title}${title.endsWith('.md') ? '' : '.md'}`; + } + + // Update the file + const file = await putObject(key, content, mime); + + return NextResponse.json(file); + } catch (error) { + console.error('Error updating file:', error); + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }); + } +} + +// DELETE endpoint to delete a file +export async function DELETE(request: Request) { + try { + const session = await getServerSession(authOptions); + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }); + } + + const { searchParams } = new URL(request.url); + const id = searchParams.get('id'); + + if (!id) { + return NextResponse.json({ error: 'Missing file id' }, { status: 400 }); + } + + // Ensure the user can only delete their own files + if (!id.startsWith(`user-${session.user.id}/`)) { + return NextResponse.json({ error: 'Unauthorized access to file' }, { status: 403 }); + } + + // Delete the file + await deleteObject(id); + + return NextResponse.json({ success: true }); + } catch (error) { + console.error('Error deleting file:', error); + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }); + } +} \ No newline at end of file diff --git a/app/api/storage/init/route.ts b/app/api/storage/init/route.ts new file mode 100644 index 00000000..d628bc6f --- /dev/null +++ b/app/api/storage/init/route.ts @@ -0,0 +1,28 @@ +import { NextResponse } from 'next/server'; +import { getServerSession } from 'next-auth'; +import { authOptions } from '@/app/api/auth/[...nextauth]/route'; +import { createUserFolderStructure } from '@/lib/s3'; + +export async function POST() { + try { + const session = await getServerSession(authOptions); + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }); + } + + // Create folder structure for the user + await createUserFolderStructure(session.user.id); + + // Return success response + return NextResponse.json({ + success: true, + message: 'Storage initialized successfully' + }); + } catch (error) { + console.error('Storage initialization failed:', error); + return NextResponse.json({ + error: 'Failed to initialize storage', + details: error instanceof Error ? error.message : 'Unknown error' + }, { status: 500 }); + } +} \ No newline at end of file diff --git a/app/api/storage/status/route.ts b/app/api/storage/status/route.ts new file mode 100644 index 00000000..02ffc3a3 --- /dev/null +++ b/app/api/storage/status/route.ts @@ -0,0 +1,100 @@ +import { NextResponse } from 'next/server'; +import { getServerSession } from 'next-auth'; +import { authOptions } from '@/app/api/auth/[...nextauth]/route'; +import { S3Client, ListBucketsCommand } from '@aws-sdk/client-s3'; +import { createUserFolderStructure, listUserObjects } from '@/lib/s3'; + +// Cache for folder lists +const folderCache = new Map(); +const CACHE_TTL = 5 * 60 * 1000; // 5 minutes + +export async function GET() { + try { + const session = await getServerSession(authOptions); + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }); + } + + const userId = session.user.id; + + // Check if we have cached folders for this user + const cachedData = folderCache.get(userId); + if (cachedData && (Date.now() - cachedData.timestamp < CACHE_TTL)) { + return NextResponse.json({ + status: 'ready', + folders: cachedData.folders + }); + } + + // Check S3 connectivity + const s3Client = new S3Client({ + region: process.env.MINIO_AWS_REGION || 'eu-east-1', + endpoint: process.env.MINIO_S3_UPLOAD_BUCKET_URL || 'https://dome-api.slm-lab.net/', + forcePathStyle: true + }); + + try { + // Simple check by listing buckets + await s3Client.send(new ListBucketsCommand({})); + } catch (error) { + console.error('S3 connectivity check failed:', error); + return NextResponse.json({ + error: 'S3 storage service is not accessible', + status: 'error' + }, { status: 503 }); + } + + // List the user's base folders + try { + // Standard folder list for the user + const standardFolders = ['notes', 'diary', 'health', 'contacts']; + let userFolders: string[] = []; + + // Try to list existing folders + for (const folder of standardFolders) { + try { + const files = await listUserObjects(userId, folder); + if (files.length > 0 || folder === 'notes') { + userFolders.push(folder); + } + } catch (error) { + console.error(`Error checking folder ${folder}:`, error); + } + } + + // If no folders found, create the standard structure + if (userFolders.length === 0) { + await createUserFolderStructure(userId); + userFolders = standardFolders; + } + + // Convert to Pascal case for backwards compatibility with NextCloud + const formattedFolders = userFolders.map(folder => + folder.charAt(0).toUpperCase() + folder.slice(1) + ); + + // Update cache + folderCache.set(userId, { + folders: formattedFolders, + timestamp: Date.now() + }); + + return NextResponse.json({ + status: 'ready', + folders: formattedFolders + }); + } catch (error) { + console.error('Error fetching user folders:', error); + return NextResponse.json({ + error: 'Failed to fetch folders', + status: 'error' + }, { status: 500 }); + } + } catch (error) { + console.error('Error in storage status check:', error); + return NextResponse.json({ + error: 'Internal server error', + status: 'error' + }, { status: 500 }); + } +} \ No newline at end of file diff --git a/lib/s3.ts b/lib/s3.ts new file mode 100644 index 00000000..779431fb --- /dev/null +++ b/lib/s3.ts @@ -0,0 +1,140 @@ +import { S3Client, ListObjectsV2Command, GetObjectCommand, PutObjectCommand, DeleteObjectCommand } from '@aws-sdk/client-s3'; +import { getSignedUrl } from '@aws-sdk/s3-request-presigner'; + +// Environment variables for S3 configuration +const S3_BUCKET_URL = process.env.MINIO_S3_UPLOAD_BUCKET_URL || 'https://dome-api.slm-lab.net/'; +const S3_REGION = process.env.MINIO_AWS_REGION || 'eu-east-1'; +const S3_BUCKET_NAME = process.env.MINIO_AWS_S3_UPLOAD_BUCKET_NAME || 'pages'; + +// Create S3 client with MinIO configuration +export const s3Client = new S3Client({ + region: S3_REGION, + endpoint: S3_BUCKET_URL, + forcePathStyle: true, // Required for MinIO +}); + +// Helper functions for S3 operations + +// List objects in a "folder" for a specific user +export async function listUserObjects(userId: string, folder: string) { + try { + const prefix = `user-${userId}/${folder}/`; + const command = new ListObjectsV2Command({ + Bucket: S3_BUCKET_NAME, + Prefix: prefix, + Delimiter: '/' + }); + + const response = await s3Client.send(command); + + // Transform S3 objects to match the expected format for the frontend + // This ensures compatibility with existing NextCloud based components + return response.Contents?.map(item => ({ + id: item.Key, + title: item.Key?.split('/').pop()?.replace('.md', '') || '', + lastModified: item.LastModified?.toISOString(), + size: item.Size, + type: 'file', + mime: item.Key?.endsWith('.md') ? 'text/markdown' : 'application/octet-stream', + etag: item.ETag + })) || []; + } catch (error) { + console.error('Error listing objects:', error); + throw error; + } +} + +// Get object content +export async function getObjectContent(key: string) { + try { + const command = new GetObjectCommand({ + Bucket: S3_BUCKET_NAME, + Key: key + }); + + const response = await s3Client.send(command); + + // Convert the stream to string + return await response.Body?.transformToString(); + } catch (error) { + console.error('Error getting object content:', error); + throw error; + } +} + +// Put object (create or update a file) +export async function putObject(key: string, content: string, contentType?: string) { + try { + const command = new PutObjectCommand({ + Bucket: S3_BUCKET_NAME, + Key: key, + Body: content, + ContentType: contentType || (key.endsWith('.md') ? 'text/markdown' : 'text/plain') + }); + + const response = await s3Client.send(command); + + return { + id: key, + title: key.split('/').pop()?.replace('.md', '') || '', + lastModified: new Date().toISOString(), + size: content.length, + type: 'file', + mime: contentType || (key.endsWith('.md') ? 'text/markdown' : 'text/plain'), + etag: response.ETag + }; + } catch (error) { + console.error('Error putting object:', error); + throw error; + } +} + +// Delete object +export async function deleteObject(key: string) { + try { + const command = new DeleteObjectCommand({ + Bucket: S3_BUCKET_NAME, + Key: key + }); + + await s3Client.send(command); + return true; + } catch (error) { + console.error('Error deleting object:', error); + throw error; + } +} + +// Create folder structure (In S3, folders are just prefix notations) +export async function createUserFolderStructure(userId: string) { + try { + // Define the standard folders to create + const folders = ['notes', 'diary', 'health', 'contacts']; + + // For S3, creating a folder means creating an empty object with the folder name as a prefix + for (const folder of folders) { + const key = `user-${userId}/${folder}/`; + await putObject(key, '', 'application/x-directory'); + } + + return true; + } catch (error) { + console.error('Error creating folder structure:', error); + throw error; + } +} + +// Generate pre-signed URL for direct browser upload (optional feature) +export async function generatePresignedUrl(key: string, expiresIn = 3600) { + try { + const command = new PutObjectCommand({ + Bucket: S3_BUCKET_NAME, + Key: key + }); + + return await getSignedUrl(s3Client, command, { expiresIn }); + } catch (error) { + console.error('Error generating presigned URL:', error); + throw error; + } +} \ No newline at end of file diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json index 294c4437..be0f0299 100644 --- a/node_modules/.package-lock.json +++ b/node_modules/.package-lock.json @@ -28,6 +28,912 @@ "lru-cache": "^10.4.3" } }, + "node_modules/@aws-crypto/crc32": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/crc32/-/crc32-5.2.0.tgz", + "integrity": "sha512-nLbCWqQNgUiwwtFsen1AdzAtvuLRsQS8rYgMuxCrdKf9kOssamGLuPwyTY9wyYblNr9+1XM8v6zoDTPPSIeANg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-crypto/crc32c": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/crc32c/-/crc32c-5.2.0.tgz", + "integrity": "sha512-+iWb8qaHLYKrNvGRbiYRHSdKRWhto5XlZUEBwDjYNf+ly5SVYG6zEoYIdxvf5R3zyeP16w4PLBn3rH1xc74Rag==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/sha1-browser": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha1-browser/-/sha1-browser-5.2.0.tgz", + "integrity": "sha512-OH6lveCFfcDjX4dbAvCFSYUjJZjDr/3XJ3xHtjn3Oj5b9RjojQo8npoLeA/bNwkOkrSQ0wgrHzXk4tDRxGKJeg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/supports-web-crypto": "^5.2.0", + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz", + "integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-js": "^5.2.0", + "@aws-crypto/supports-web-crypto": "^5.2.0", + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz", + "integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-crypto/supports-web-crypto": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz", + "integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz", + "integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.222.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-s3": { + "version": "3.802.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.802.0.tgz", + "integrity": "sha512-YIwLLiqRZArEmRI94X8MOpWuXlmxI3NnxYv+3kk6HIc2YWPaOAf0YN7vWlnQFWo6Yi1gBRtP0HM8WzK4Bn5ORQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha1-browser": "5.2.0", + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-node": "3.799.0", + "@aws-sdk/middleware-bucket-endpoint": "3.775.0", + "@aws-sdk/middleware-expect-continue": "3.775.0", + "@aws-sdk/middleware-flexible-checksums": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-location-constraint": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-sdk-s3": "3.799.0", + "@aws-sdk/middleware-ssec": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/signature-v4-multi-region": "3.800.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@aws-sdk/xml-builder": "3.775.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/eventstream-serde-browser": "^4.0.2", + "@smithy/eventstream-serde-config-resolver": "^4.1.0", + "@smithy/eventstream-serde-node": "^4.0.2", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-blob-browser": "^4.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/hash-stream-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/md5-js": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "@smithy/util-waiter": "^4.0.3", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-sso": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.799.0.tgz", + "integrity": "sha512-/i/LG7AiWPmPxKCA2jnR2zaf7B3HYSTbxaZI21ElIz9wASlNAsKr8CnLY7qb50kOyXiNfQ834S5Q3Gl8dX9o3Q==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.2", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/core": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.799.0.tgz", + "integrity": "sha512-hkKF3Zpc6+H8GI1rlttYVRh9uEE77cqAzLmLpY3iu7sql8cZgPERRBfaFct8p1SaDyrksLNiboD1vKW58mbsYg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/core": "^3.3.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "fast-xml-parser": "4.4.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/core/node_modules/fast-xml-parser": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.4.1.tgz", + "integrity": "sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + }, + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + } + ], + "license": "MIT", + "dependencies": { + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, + "node_modules/@aws-sdk/credential-provider-env": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.799.0.tgz", + "integrity": "sha512-vT/SSWtbUIOW/U21qgEySmmO44SFWIA7WeQPX1OrI8WJ5n7OEI23JWLHjLvHTkYmuZK6z1rPcv7HzRgmuGRibA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-http": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.799.0.tgz", + "integrity": "sha512-2CjBpOWmhaPAExOgHnIB5nOkS5ef+mfRlJ1JC4nsnjAx0nrK4tk0XRE0LYz11P3+ue+a86cU8WTmBo+qjnGxPQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.799.0.tgz", + "integrity": "sha512-M9ubILFxerqw4QJwk83MnjtZyoA2eNCiea5V+PzZeHlwk2PON/EnawKqy65x9/hMHGoSvvNuby7iMAmPptu7yw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.799.0", + "@aws-sdk/credential-provider-web-identity": "3.799.0", + "@aws-sdk/nested-clients": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-node": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.799.0.tgz", + "integrity": "sha512-nd9fSJc0wUlgKUkIr2ldJhcIIrzJFS29AGZoyY22J3xih63nNDv61eTGVMsDZzHlV21XzMlPEljTR7axiimckg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-ini": "3.799.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.799.0", + "@aws-sdk/credential-provider-web-identity": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-process": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.799.0.tgz", + "integrity": "sha512-g8jmNs2k98WNHMYcea1YKA+7ao2Ma4w0P42Dz4YpcI155pQHxHx25RwbOG+rsAKuo3bKwkW53HVE/ZTKhcWFgw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.799.0.tgz", + "integrity": "sha512-lQv27QkNU9FJFZqEf5DIEN3uXEN409Iaym9WJzhOouGtxvTIAWiD23OYh1u8PvBdrordJGS2YddfQvhcmq9akw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/client-sso": "3.799.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/token-providers": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-web-identity": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.799.0.tgz", + "integrity": "sha512-8k1i9ut+BEg0QZ+I6UQMxGNR1T8paLmAOAZXU+nLQR0lcxS6lr8v+dqofgzQPuHLBkWNCr1Av1IKeL3bJjgU7g==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/nested-clients": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-bucket-endpoint": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.775.0.tgz", + "integrity": "sha512-qogMIpVChDYr4xiUNC19/RDSw/sKoHkAhouS6Skxiy6s27HBhow1L3Z1qVYXuBmOZGSWPU0xiyZCvOyWrv9s+Q==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-arn-parser": "3.723.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-expect-continue": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.775.0.tgz", + "integrity": "sha512-Apd3owkIeUW5dnk3au9np2IdW2N0zc9NjTjHiH+Mx3zqwSrc+m+ANgJVgk9mnQjMzU/vb7VuxJ0eqdEbp5gYsg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-flexible-checksums": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.799.0.tgz", + "integrity": "sha512-vBIAdDl2neaFiUMxyr7dAtX7m9Iw5c0bz7OirD0JGW0nYn0mBcqKpFZEU75ewA5p2+Cm7RQDdt6099ne3gj0WA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/crc32": "5.2.0", + "@aws-crypto/crc32c": "5.2.0", + "@aws-crypto/util": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/is-array-buffer": "^4.0.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-host-header": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.775.0.tgz", + "integrity": "sha512-tkSegM0Z6WMXpLB8oPys/d+umYIocvO298mGvcMCncpRl77L9XkvSLJIFzaHes+o7djAgIduYw8wKIMStFss2w==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-location-constraint": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.775.0.tgz", + "integrity": "sha512-8TMXEHZXZTFTckQLyBT5aEI8fX11HZcwZseRifvBKKpj0RZDk4F0EEYGxeNSPpUQ7n+PRWyfAEnnZNRdAj/1NQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-logger": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.775.0.tgz", + "integrity": "sha512-FaxO1xom4MAoUJsldmR92nT1G6uZxTdNYOFYtdHfd6N2wcNaTuxgjIvqzg5y7QIH9kn58XX/dzf1iTjgqUStZw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-recursion-detection": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.775.0.tgz", + "integrity": "sha512-GLCzC8D0A0YDG5u3F5U03Vb9j5tcOEFhr8oc6PDk0k0vm5VwtZOE6LvK7hcCSoAB4HXyOUM0sQuXrbaAh9OwXA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-sdk-s3": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.799.0.tgz", + "integrity": "sha512-Zwdge5NArgcJwPuGZwgfXY6XXkWEBmMS9dqu5g3DcfHmZUuSjQUqmOsDdSZlE3RFHrDAEbuGQlrFUE8zuwdKQA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-arn-parser": "3.723.0", + "@smithy/core": "^3.3.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-ssec": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.775.0.tgz", + "integrity": "sha512-Iw1RHD8vfAWWPzBBIKaojO4GAvQkHOYIpKdAfis/EUSUmSa79QsnXnRqsdcE0mCB0Ylj23yi+ah4/0wh9FsekA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-user-agent": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.799.0.tgz", + "integrity": "sha512-TropQZanbOTxa+p+Nl4fWkzlRhgFwDfW+Wb6TR3jZN7IXHNlPpgGFpdrgvBExhW/RBhqr+94OsR8Ou58lp3hhA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@smithy/core": "^3.3.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/nested-clients": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.799.0.tgz", + "integrity": "sha512-zILlWh7asrcQG9JYMYgnvEQBfwmWKfED0yWCf3UNAmQcfS9wkCAWCgicNy/y5KvNvEYnHidsU117STtyuUNG5g==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.2", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/region-config-resolver": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.775.0.tgz", + "integrity": "sha512-40iH3LJjrQS3LKUJAl7Wj0bln7RFPEvUYKFxtP8a+oKFDO0F65F52xZxIJbPn6sHkxWDAnZlGgdjZXM3p2g5wQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/s3-request-presigner": { + "version": "3.802.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/s3-request-presigner/-/s3-request-presigner-3.802.0.tgz", + "integrity": "sha512-gXavOjkIIbd89ooAKziPWPnvwA3ZuJvpLMWr2xB/RyDZIgnjSwvRGFpRJ5EZPZjkhBFk0QQcEVp2FIrPI9wHlA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/signature-v4-multi-region": "3.800.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-format-url": "3.775.0", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/signature-v4-multi-region": { + "version": "3.800.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.800.0.tgz", + "integrity": "sha512-c71wZuiSUHNFCvcuqOv3jbqP+NquB2YKN4qX90OwYXEqUKn8F8fKJPpjjHjz1eK6qWKtECR4V/NTno2P70Yz/Q==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-sdk-s3": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/token-providers": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.799.0.tgz", + "integrity": "sha512-/8iDjnsJs/D8AhGbDAmdF5oSHzE4jsDsM2RIIxmBAKTZXkaaclQBNX9CmAqLKQmO3IUMZsDH2KENHLVAk/N/mw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/nested-clients": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/types": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.775.0.tgz", + "integrity": "sha512-ZoGKwa4C9fC9Av6bdfqcW6Ix5ot05F/S4VxWR2nHuMv7hzfmAjTOcUiWT7UR4hM/U0whf84VhDtXN/DWAk52KA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-arn-parser": { + "version": "3.723.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.723.0.tgz", + "integrity": "sha512-ZhEfvUwNliOQROcAk34WJWVYTlTa4694kSVhDSjW6lE1bMataPnIN8A0ycukEzBXmd8ZSoBcQLn6lKGl7XIJ5w==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-endpoints": { + "version": "3.787.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.787.0.tgz", + "integrity": "sha512-fd3zkiOkwnbdbN0Xp9TsP5SWrmv0SpT70YEdbb8wAj2DWQwiCmFszaSs+YCvhoCdmlR3Wl9Spu0pGpSAGKeYvQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "@smithy/util-endpoints": "^3.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-format-url": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-format-url/-/util-format-url-3.775.0.tgz", + "integrity": "sha512-Nw4nBeyCbWixoGh8NcVpa/i8McMA6RXJIjQFyloJLaPr7CPquz7ZbSl0MUWMFVwP/VHaJ7B+lNN3Qz1iFCEP/Q==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-locate-window": { + "version": "3.723.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.723.0.tgz", + "integrity": "sha512-Yf2CS10BqK688DRsrKI/EO6B8ff5J86NXe4C+VCysK7UOgN0l1zOTeTukZ3H8Q9tYYX3oaF1961o8vRkFm7Nmw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-user-agent-browser": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.775.0.tgz", + "integrity": "sha512-txw2wkiJmZKVdDbscK7VBK+u+TJnRtlUjRTLei+elZg2ADhpQxfVAQl436FUeIv6AhB/oRHW6/K/EAGXUSWi0A==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-sdk/util-user-agent-node": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.799.0.tgz", + "integrity": "sha512-iXBk38RbIWPF5Nq9O4AnktORAzXovSVqWYClvS1qbE7ILsnTLJbagU9HlU25O2iV5COVh1qZkwuP5NHQ2yTEyw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + } + }, + "node_modules/@aws-sdk/xml-builder": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.775.0.tgz", + "integrity": "sha512-b9NGO6FKJeLGYnV7Z1yvcP1TNU4dkD5jNsLWOF1/sygZoASaQhNOlaiJ/1OH331YQ1R1oWk38nBb0frsYkDsOQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, "node_modules/@babel/runtime": { "version": "7.26.0", "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.0.tgz", @@ -2461,6 +3367,736 @@ "url": "https://ko-fi.com/killymxi" } }, + "node_modules/@smithy/abort-controller": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.0.2.tgz", + "integrity": "sha512-Sl/78VDtgqKxN2+1qduaVE140XF+Xg+TafkncspwM4jFP/LHr76ZHmIY/y3V1M0mMLNk+Je6IGbzxy23RSToMw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/chunked-blob-reader": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@smithy/chunked-blob-reader/-/chunked-blob-reader-5.0.0.tgz", + "integrity": "sha512-+sKqDBQqb036hh4NPaUiEkYFkTUGYzRsn3EuFhyfQfMy6oGHEUJDurLP9Ufb5dasr/XiAmPNMr6wa9afjQB+Gw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/chunked-blob-reader-native": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/chunked-blob-reader-native/-/chunked-blob-reader-native-4.0.0.tgz", + "integrity": "sha512-R9wM2yPmfEMsUmlMlIgSzOyICs0x9uu7UTHoccMyt7BWw8shcGM8HqB355+BZCPBcySvbTYMs62EgEQkNxz2ig==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-base64": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/config-resolver": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.1.0.tgz", + "integrity": "sha512-8smPlwhga22pwl23fM5ew4T9vfLUCeFXlcqNOCD5M5h8VmNPNUE9j6bQSuRXpDSV11L/E/SwEBQuW8hr6+nS1A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/core": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.3.0.tgz", + "integrity": "sha512-r6gvs5OfRq/w+9unPm7B3po4rmWaGh0CIL/OwHntGGux7+RhOOZLGuurbeMgWV6W55ZuyMTypJLeH0vn/ZRaWQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/middleware-serde": "^4.0.3", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/credential-provider-imds": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.0.2.tgz", + "integrity": "sha512-32lVig6jCaWBHnY+OEQ6e6Vnt5vDHaLiydGrwYMW9tPqO688hPGTYRamYJ1EptxEC2rAwJrHWmPoKRBl4iTa8w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/eventstream-codec": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-4.0.2.tgz", + "integrity": "sha512-p+f2kLSK7ZrXVfskU/f5dzksKTewZk8pJLPvER3aFHPt76C2MxD9vNatSfLzzQSQB4FNO96RK4PSXfhD1TTeMQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/crc32": "5.2.0", + "@smithy/types": "^4.2.0", + "@smithy/util-hex-encoding": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/eventstream-serde-browser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-4.0.2.tgz", + "integrity": "sha512-CepZCDs2xgVUtH7ZZ7oDdZFH8e6Y2zOv8iiX6RhndH69nlojCALSKK+OXwZUgOtUZEUaZ5e1hULVCHYbCn7pug==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/eventstream-serde-universal": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/eventstream-serde-config-resolver": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-4.1.0.tgz", + "integrity": "sha512-1PI+WPZ5TWXrfj3CIoKyUycYynYJgZjuQo8U+sphneOtjsgrttYybdqESFReQrdWJ+LKt6NEdbYzmmfDBmjX2A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/eventstream-serde-node": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-4.0.2.tgz", + "integrity": "sha512-C5bJ/C6x9ENPMx2cFOirspnF9ZsBVnBMtP6BdPl/qYSuUawdGQ34Lq0dMcf42QTjUZgWGbUIZnz6+zLxJlb9aw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/eventstream-serde-universal": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/eventstream-serde-universal": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-4.0.2.tgz", + "integrity": "sha512-St8h9JqzvnbB52FtckiHPN4U/cnXcarMniXRXTKn0r4b4XesZOGiAyUdj1aXbqqn1icSqBlzzUsCl6nPB018ng==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/eventstream-codec": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/fetch-http-handler": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.0.2.tgz", + "integrity": "sha512-+9Dz8sakS9pe7f2cBocpJXdeVjMopUDLgZs1yWeu7h++WqSbjUYv/JAJwKwXw1HV6gq1jyWjxuyn24E2GhoEcQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/hash-blob-browser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/hash-blob-browser/-/hash-blob-browser-4.0.2.tgz", + "integrity": "sha512-3g188Z3DyhtzfBRxpZjU8R9PpOQuYsbNnyStc/ZVS+9nVX1f6XeNOa9IrAh35HwwIZg+XWk8bFVtNINVscBP+g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/chunked-blob-reader": "^5.0.0", + "@smithy/chunked-blob-reader-native": "^4.0.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/hash-node": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.0.2.tgz", + "integrity": "sha512-VnTpYPnRUE7yVhWozFdlxcYknv9UN7CeOqSrMH+V877v4oqtVYuoqhIhtSjmGPvYrYnAkaM61sLMKHvxL138yg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/hash-stream-node": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-4.0.2.tgz", + "integrity": "sha512-POWDuTznzbIwlEXEvvXoPMS10y0WKXK790soe57tFRfvf4zBHyzE529HpZMqmDdwG9MfFflnyzndUQ8j78ZdSg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/invalid-dependency": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.0.2.tgz", + "integrity": "sha512-GatB4+2DTpgWPday+mnUkoumP54u/MDM/5u44KF9hIu8jF0uafZtQLcdfIKkIcUNuF/fBojpLEHZS/56JqPeXQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/is-array-buffer": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-4.0.0.tgz", + "integrity": "sha512-saYhF8ZZNoJDTvJBEWgeBccCg+yvp1CX+ed12yORU3NilJScfc6gfch2oVb4QgxZrGUx3/ZJlb+c/dJbyupxlw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/md5-js": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-4.0.2.tgz", + "integrity": "sha512-Hc0R8EiuVunUewCse2syVgA2AfSRco3LyAv07B/zCOMa+jpXI9ll+Q21Nc6FAlYPcpNcAXqBzMhNs1CD/pP2bA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-content-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.0.2.tgz", + "integrity": "sha512-hAfEXm1zU+ELvucxqQ7I8SszwQ4znWMbNv6PLMndN83JJN41EPuS93AIyh2N+gJ6x8QFhzSO6b7q2e6oClDI8A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-endpoint": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.1.1.tgz", + "integrity": "sha512-z5RmcHxjvScL+LwEDU2mTNCOhgUs4lu5PGdF1K36IPRmUHhNFxNxgenSB7smyDiYD4vdKQ7CAZtG5cUErqib9w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.0", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-retry": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.1.2.tgz", + "integrity": "sha512-qN/Mmxm8JWtFAjozJ8VSTM83KOX4cIks8UjDqqNkKIegzPrE5ZKPNCQ/DqUSIF90pue5a/NycNXnBod2NwvZZQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/service-error-classification": "^4.0.3", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-retry/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@smithy/middleware-serde": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.0.3.tgz", + "integrity": "sha512-rfgDVrgLEVMmMn0BI8O+8OVr6vXzjV7HZj57l0QxslhzbvVfikZbVfBVthjLHqib4BW44QhcIgJpvebHlRaC9A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-stack": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.0.2.tgz", + "integrity": "sha512-eSPVcuJJGVYrFYu2hEq8g8WWdJav3sdrI4o2c6z/rjnYDd3xH9j9E7deZQCzFn4QvGPouLngH3dQ+QVTxv5bOQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/node-config-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.0.2.tgz", + "integrity": "sha512-WgCkILRZfJwJ4Da92a6t3ozN/zcvYyJGUTmfGbgS/FkCcoCjl7G4FJaCDN1ySdvLvemnQeo25FdkyMSTSwulsw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/node-http-handler": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.0.4.tgz", + "integrity": "sha512-/mdqabuAT3o/ihBGjL94PUbTSPSRJ0eeVTdgADzow0wRJ0rN4A27EOrtlK56MYiO1fDvlO3jVTCxQtQmK9dZ1g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/property-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.0.2.tgz", + "integrity": "sha512-wNRoQC1uISOuNc2s4hkOYwYllmiyrvVXWMtq+TysNRVQaHm4yoafYQyjN/goYZS+QbYlPIbb/QRjaUZMuzwQ7A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/protocol-http": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.1.0.tgz", + "integrity": "sha512-KxAOL1nUNw2JTYrtviRRjEnykIDhxc84qMBzxvu1MUfQfHTuBlCG7PA6EdVwqpJjH7glw7FqQoFxUJSyBQgu7g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/querystring-builder": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.0.2.tgz", + "integrity": "sha512-NTOs0FwHw1vimmQM4ebh+wFQvOwkEf/kQL6bSM1Lock+Bv4I89B3hGYoUEPkmvYPkDKyp5UdXJYu+PoTQ3T31Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-uri-escape": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/querystring-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.0.2.tgz", + "integrity": "sha512-v6w8wnmZcVXjfVLjxw8qF7OwESD9wnpjp0Dqry/Pod0/5vcEA3qxCr+BhbOHlxS8O+29eLpT3aagxXGwIoEk7Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/service-error-classification": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.0.3.tgz", + "integrity": "sha512-FTbcajmltovWMjj3tksDQdD23b2w6gH+A0DYA1Yz3iSpjDj8fmkwy62UnXcWMy4d5YoMoSyLFHMfkEVEzbiN8Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/shared-ini-file-loader": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.0.2.tgz", + "integrity": "sha512-J9/gTWBGVuFZ01oVA6vdb4DAjf1XbDhK6sLsu3OS9qmLrS6KB5ygpeHiM3miIbj1qgSJ96GYszXFWv6ErJ8QEw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/signature-v4": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.1.0.tgz", + "integrity": "sha512-4t5WX60sL3zGJF/CtZsUQTs3UrZEDO2P7pEaElrekbLqkWPYkgqNW1oeiNYC6xXifBnT9dVBOnNQRvOE9riU9w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-uri-escape": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/smithy-client": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.2.1.tgz", + "integrity": "sha512-fbniZef60QdsBc4ZY0iyI8xbFHIiC/QRtPi66iE4ufjiE/aaz7AfUXzcWMkpO8r+QhLeNRIfmPchIG+3/QDZ6g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.0", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/types": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.2.0.tgz", + "integrity": "sha512-7eMk09zQKCO+E/ivsjQv+fDlOupcFUCSC/L2YUPgwhvowVGWbPQHjEFcmjt7QQ4ra5lyowS92SV53Zc6XD4+fg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/url-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.0.2.tgz", + "integrity": "sha512-Bm8n3j2ScqnT+kJaClSVCMeiSenK6jVAzZCNewsYWuZtnBehEz4r2qP0riZySZVfzB+03XZHJeqfmJDkeeSLiQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/querystring-parser": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-base64": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-4.0.0.tgz", + "integrity": "sha512-CvHfCmO2mchox9kjrtzoHkWHxjHZzaFojLc8quxXY7WAAMAg43nuxwv95tATVgQFNDwd4M9S1qFzj40Ul41Kmg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-body-length-browser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-4.0.0.tgz", + "integrity": "sha512-sNi3DL0/k64/LO3A256M+m3CDdG6V7WKWHdAiBBMUN8S3hK3aMPhwnPik2A/a2ONN+9doY9UxaLfgqsIRg69QA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-body-length-node": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-node/-/util-body-length-node-4.0.0.tgz", + "integrity": "sha512-q0iDP3VsZzqJyje8xJWEJCNIu3lktUGVoSy1KB0UWym2CL1siV3artm+u1DFYTLejpsrdGyCSWBdGNjJzfDPjg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-buffer-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-4.0.0.tgz", + "integrity": "sha512-9TOQ7781sZvddgO8nxueKi3+yGvkY35kotA0Y6BWRajAv8jjmigQ1sBwz0UX47pQMYXJPahSKEKYFgt+rXdcug==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-config-provider": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-4.0.0.tgz", + "integrity": "sha512-L1RBVzLyfE8OXH+1hsJ8p+acNUSirQnWQ6/EgpchV88G6zGBTDPdXiiExei6Z1wR2RxYvxY/XLw6AMNCCt8H3w==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-browser": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.0.9.tgz", + "integrity": "sha512-B8j0XsElvyhv6+5hlFf6vFV/uCSyLKcInpeXOGnOImX2mGXshE01RvPoGipTlRpIk53e6UfYj7WdDdgbVfXDZw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-node": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.0.9.tgz", + "integrity": "sha512-wTDU8P/zdIf9DOpV5qm64HVgGRXvqjqB/fJZTEQbrz3s79JHM/E7XkMm/876Oq+ZLHJQgnXM9QHDo29dlM62eA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/config-resolver": "^4.1.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-endpoints": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.0.2.tgz", + "integrity": "sha512-6QSutU5ZyrpNbnd51zRTL7goojlcnuOB55+F9VBD+j8JpRY50IGamsjlycrmpn8PQkmJucFW8A0LSfXj7jjtLQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-hex-encoding": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-4.0.0.tgz", + "integrity": "sha512-Yk5mLhHtfIgW2W2WQZWSg5kuMZCVbvhFmC7rV4IO2QqnZdbEFPmQnCcGMAX2z/8Qj3B9hYYNjZOhWym+RwhePw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-middleware": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.0.2.tgz", + "integrity": "sha512-6GDamTGLuBQVAEuQ4yDQ+ti/YINf/MEmIegrEeg7DdB/sld8BX1lqt9RRuIcABOhAGTA50bRbPzErez7SlDtDQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-retry": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.0.3.tgz", + "integrity": "sha512-DPuYjZQDXmKr/sNvy9Spu8R/ESa2e22wXZzSAY6NkjOLj6spbIje/Aq8rT97iUMdDj0qHMRIe+bTxvlU74d9Ng==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/service-error-classification": "^4.0.3", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-stream": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.2.0.tgz", + "integrity": "sha512-Vj1TtwWnuWqdgQI6YTUF5hQ/0jmFiOYsc51CSMgj7QfyO+RF4EnT2HNjoviNlOOmgzgvf3f5yno+EiC4vrnaWQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-uri-escape": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-4.0.0.tgz", + "integrity": "sha512-77yfbCbQMtgtTylO9itEAdpPXSog3ZxMe09AEhm0dU0NLTalV70ghDZFR+Nfi1C60jnJoh/Re4090/DuZh2Omg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-utf8": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-4.0.0.tgz", + "integrity": "sha512-b+zebfKCfRdgNJDknHCob3O7FpeYQN6ZG6YLExMcasDHsCXlsXCEuiPZeLnJLpwa5dvPetGlnGCiMHuLwGvFow==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-waiter": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.0.3.tgz", + "integrity": "sha512-JtaY3FxmD+te+KSI2FJuEcfNC9T/DGGVf551babM7fAaXhjJUt7oSYurH1Devxd2+BOSUACCgt3buinx4UnmEA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, "node_modules/@swc/counter": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz", @@ -2882,6 +4518,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/bowser": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz", + "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==", + "license": "MIT" + }, "node_modules/brace-expansion": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", diff --git a/node_modules/@aws-crypto/crc32/CHANGELOG.md b/node_modules/@aws-crypto/crc32/CHANGELOG.md new file mode 100644 index 00000000..7d76500d --- /dev/null +++ b/node_modules/@aws-crypto/crc32/CHANGELOG.md @@ -0,0 +1,100 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.1.0...v5.2.0) (2023-10-16) + +### Features + +- support ESM artifacts in all packages ([#752](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/752)) ([e930ffb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e930ffba5cfef66dd242049e7d514ced232c1e3b)) + +# [5.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.0.0...v5.1.0) (2023-09-22) + +### Bug Fixes + +- Update tsc to 2.x ([#735](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/735)) ([782e0de](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/782e0de9f5fef41f694130580a69d940894b6b8c)) + +### Features + +- Use @smithy/util-utf8 ([#730](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/730)) ([00fb851](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/00fb851ca3559d5a1f370f9256814de1210826b8)), closes [#699](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/699) + +# [5.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v4.0.1...v5.0.0) (2023-07-13) + +**Note:** Version bump only for package @aws-crypto/crc32 + +# [4.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v3.0.0...v4.0.0) (2023-02-20) + +**Note:** Version bump only for package @aws-crypto/crc32 + +# [3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.2...v3.0.0) (2023-01-12) + +- feat!: replace Hash implementations with Checksum interface (#492) ([da43dc0](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/da43dc0fdf669d9ebb5bfb1b1f7c79e46c4aaae1)), closes [#492](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/492) + +### BREAKING CHANGES + +- All classes that implemented `Hash` now implement `Checksum`. + +## [2.0.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.1...v2.0.2) (2022-09-07) + +### Bug Fixes + +- **#337:** update @aws-sdk/types ([#373](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/373)) ([b26a811](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/b26a811a392f5209c7ec7e57251500d4d78f97ff)), closes [#337](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/337) + +## [2.0.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.0...v2.0.1) (2021-12-09) + +**Note:** Version bump only for package @aws-crypto/crc32 + +# [2.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.2...v2.0.0) (2021-10-25) + +**Note:** Version bump only for package @aws-crypto/crc32 + +## [1.2.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.1...v1.2.2) (2021-10-12) + +### Bug Fixes + +- **crc32c:** ie11 does not support Array.from ([#221](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/221)) ([5f49547](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/5f495472ab8988cf203e0f2a70a51f7e1fcd7e60)) + +## [1.2.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.0...v1.2.1) (2021-09-17) + +**Note:** Version bump only for package @aws-crypto/crc32 + +# [1.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.1.1...v1.2.0) (2021-09-17) + +### Features + +- Add AwsCrc32 Hash ([f5d7e81](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/f5d7e815fcbe0f8da1edb855fea3bd33eb1edc15)) + +# [1.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/crc32@1.0.0...@aws-crypto/crc32@1.1.0) (2021-08-11) + +### Features + +- Create CRC-32C implementation ([#201](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/201)) ([e43c7ec](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e43c7ecd30d6499fa696f5839ecc30502a34b8b6)) + +# [1.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/crc32@1.0.0-alpha.0...@aws-crypto/crc32@1.0.0) (2020-10-22) + +**Note:** Version bump only for package @aws-crypto/crc32 + +# [1.0.0-alpha.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/crc32@0.1.0-preview.4...@aws-crypto/crc32@1.0.0-alpha.0) (2020-02-07) + +**Note:** Version bump only for package @aws-crypto/crc32 + +# [0.1.0-preview.4](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/crc32@0.1.0-preview.2...@aws-crypto/crc32@0.1.0-preview.4) (2020-01-16) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.3](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/crc32@0.1.0-preview.2...@aws-crypto/crc32@0.1.0-preview.3) (2019-11-15) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.2](https://github.com/aws/aws-javascript-crypto-helpers/compare/@aws-crypto/crc32@0.1.0-preview.1...@aws-crypto/crc32@0.1.0-preview.2) (2019-10-30) + +### Bug Fixes + +- remove /src/ from .npmignore (for sourcemaps) ([#5](https://github.com/aws/aws-javascript-crypto-helpers/issues/5)) ([ec52056](https://github.com/aws/aws-javascript-crypto-helpers/commit/ec52056)) diff --git a/node_modules/@aws-crypto/crc32/LICENSE b/node_modules/@aws-crypto/crc32/LICENSE new file mode 100644 index 00000000..980a15ac --- /dev/null +++ b/node_modules/@aws-crypto/crc32/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-crypto/crc32/README.md b/node_modules/@aws-crypto/crc32/README.md new file mode 100644 index 00000000..b54737a1 --- /dev/null +++ b/node_modules/@aws-crypto/crc32/README.md @@ -0,0 +1,16 @@ +# @aws-crypto/crc32 + +Pure JS implementation of CRC32 https://en.wikipedia.org/wiki/Cyclic_redundancy_check + +## Usage + +``` +import { Crc32 } from '@aws-crypto/crc32'; + +const crc32Digest = (new Crc32).update(buffer).digest() + +``` + +## Test + +`npm test` diff --git a/node_modules/@aws-crypto/crc32/build/main/aws_crc32.d.ts b/node_modules/@aws-crypto/crc32/build/main/aws_crc32.d.ts new file mode 100644 index 00000000..c91c2a52 --- /dev/null +++ b/node_modules/@aws-crypto/crc32/build/main/aws_crc32.d.ts @@ -0,0 +1,7 @@ +import { SourceData, Checksum } from "@aws-sdk/types"; +export declare class AwsCrc32 implements Checksum { + private crc32; + update(toHash: SourceData): void; + digest(): Promise; + reset(): void; +} diff --git a/node_modules/@aws-crypto/crc32/build/main/aws_crc32.js b/node_modules/@aws-crypto/crc32/build/main/aws_crc32.js new file mode 100644 index 00000000..09c304cd --- /dev/null +++ b/node_modules/@aws-crypto/crc32/build/main/aws_crc32.js @@ -0,0 +1,31 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AwsCrc32 = void 0; +var tslib_1 = require("tslib"); +var util_1 = require("@aws-crypto/util"); +var index_1 = require("./index"); +var AwsCrc32 = /** @class */ (function () { + function AwsCrc32() { + this.crc32 = new index_1.Crc32(); + } + AwsCrc32.prototype.update = function (toHash) { + if ((0, util_1.isEmptyData)(toHash)) + return; + this.crc32.update((0, util_1.convertToBuffer)(toHash)); + }; + AwsCrc32.prototype.digest = function () { + return tslib_1.__awaiter(this, void 0, void 0, function () { + return tslib_1.__generator(this, function (_a) { + return [2 /*return*/, (0, util_1.numToUint8)(this.crc32.digest())]; + }); + }); + }; + AwsCrc32.prototype.reset = function () { + this.crc32 = new index_1.Crc32(); + }; + return AwsCrc32; +}()); +exports.AwsCrc32 = AwsCrc32; +//# sourceMappingURL=aws_crc32.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/crc32/build/main/aws_crc32.js.map b/node_modules/@aws-crypto/crc32/build/main/aws_crc32.js.map new file mode 100644 index 00000000..fae264eb --- /dev/null +++ b/node_modules/@aws-crypto/crc32/build/main/aws_crc32.js.map @@ -0,0 +1 @@ +{"version":3,"file":"aws_crc32.js","sourceRoot":"","sources":["../../src/aws_crc32.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;;AAGtC,yCAA4E;AAC5E,iCAAgC;AAEhC;IAAA;QACU,UAAK,GAAG,IAAI,aAAK,EAAE,CAAC;IAe9B,CAAC;IAbC,yBAAM,GAAN,UAAO,MAAkB;QACvB,IAAI,IAAA,kBAAW,EAAC,MAAM,CAAC;YAAE,OAAO;QAEhC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,IAAA,sBAAe,EAAC,MAAM,CAAC,CAAC,CAAC;IAC7C,CAAC;IAEK,yBAAM,GAAZ;;;gBACE,sBAAO,IAAA,iBAAU,EAAC,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAC;;;KACxC;IAED,wBAAK,GAAL;QACE,IAAI,CAAC,KAAK,GAAG,IAAI,aAAK,EAAE,CAAC;IAC3B,CAAC;IACH,eAAC;AAAD,CAAC,AAhBD,IAgBC;AAhBY,4BAAQ"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/crc32/build/main/index.d.ts b/node_modules/@aws-crypto/crc32/build/main/index.d.ts new file mode 100644 index 00000000..75210713 --- /dev/null +++ b/node_modules/@aws-crypto/crc32/build/main/index.d.ts @@ -0,0 +1,7 @@ +export declare function crc32(data: Uint8Array): number; +export declare class Crc32 { + private checksum; + update(data: Uint8Array): this; + digest(): number; +} +export { AwsCrc32 } from "./aws_crc32"; diff --git a/node_modules/@aws-crypto/crc32/build/main/index.js b/node_modules/@aws-crypto/crc32/build/main/index.js new file mode 100644 index 00000000..fa789688 --- /dev/null +++ b/node_modules/@aws-crypto/crc32/build/main/index.js @@ -0,0 +1,108 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AwsCrc32 = exports.Crc32 = exports.crc32 = void 0; +var tslib_1 = require("tslib"); +var util_1 = require("@aws-crypto/util"); +function crc32(data) { + return new Crc32().update(data).digest(); +} +exports.crc32 = crc32; +var Crc32 = /** @class */ (function () { + function Crc32() { + this.checksum = 0xffffffff; + } + Crc32.prototype.update = function (data) { + var e_1, _a; + try { + for (var data_1 = tslib_1.__values(data), data_1_1 = data_1.next(); !data_1_1.done; data_1_1 = data_1.next()) { + var byte = data_1_1.value; + this.checksum = + (this.checksum >>> 8) ^ lookupTable[(this.checksum ^ byte) & 0xff]; + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (data_1_1 && !data_1_1.done && (_a = data_1.return)) _a.call(data_1); + } + finally { if (e_1) throw e_1.error; } + } + return this; + }; + Crc32.prototype.digest = function () { + return (this.checksum ^ 0xffffffff) >>> 0; + }; + return Crc32; +}()); +exports.Crc32 = Crc32; +// prettier-ignore +var a_lookUpTable = [ + 0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA, + 0x076DC419, 0x706AF48F, 0xE963A535, 0x9E6495A3, + 0x0EDB8832, 0x79DCB8A4, 0xE0D5E91E, 0x97D2D988, + 0x09B64C2B, 0x7EB17CBD, 0xE7B82D07, 0x90BF1D91, + 0x1DB71064, 0x6AB020F2, 0xF3B97148, 0x84BE41DE, + 0x1ADAD47D, 0x6DDDE4EB, 0xF4D4B551, 0x83D385C7, + 0x136C9856, 0x646BA8C0, 0xFD62F97A, 0x8A65C9EC, + 0x14015C4F, 0x63066CD9, 0xFA0F3D63, 0x8D080DF5, + 0x3B6E20C8, 0x4C69105E, 0xD56041E4, 0xA2677172, + 0x3C03E4D1, 0x4B04D447, 0xD20D85FD, 0xA50AB56B, + 0x35B5A8FA, 0x42B2986C, 0xDBBBC9D6, 0xACBCF940, + 0x32D86CE3, 0x45DF5C75, 0xDCD60DCF, 0xABD13D59, + 0x26D930AC, 0x51DE003A, 0xC8D75180, 0xBFD06116, + 0x21B4F4B5, 0x56B3C423, 0xCFBA9599, 0xB8BDA50F, + 0x2802B89E, 0x5F058808, 0xC60CD9B2, 0xB10BE924, + 0x2F6F7C87, 0x58684C11, 0xC1611DAB, 0xB6662D3D, + 0x76DC4190, 0x01DB7106, 0x98D220BC, 0xEFD5102A, + 0x71B18589, 0x06B6B51F, 0x9FBFE4A5, 0xE8B8D433, + 0x7807C9A2, 0x0F00F934, 0x9609A88E, 0xE10E9818, + 0x7F6A0DBB, 0x086D3D2D, 0x91646C97, 0xE6635C01, + 0x6B6B51F4, 0x1C6C6162, 0x856530D8, 0xF262004E, + 0x6C0695ED, 0x1B01A57B, 0x8208F4C1, 0xF50FC457, + 0x65B0D9C6, 0x12B7E950, 0x8BBEB8EA, 0xFCB9887C, + 0x62DD1DDF, 0x15DA2D49, 0x8CD37CF3, 0xFBD44C65, + 0x4DB26158, 0x3AB551CE, 0xA3BC0074, 0xD4BB30E2, + 0x4ADFA541, 0x3DD895D7, 0xA4D1C46D, 0xD3D6F4FB, + 0x4369E96A, 0x346ED9FC, 0xAD678846, 0xDA60B8D0, + 0x44042D73, 0x33031DE5, 0xAA0A4C5F, 0xDD0D7CC9, + 0x5005713C, 0x270241AA, 0xBE0B1010, 0xC90C2086, + 0x5768B525, 0x206F85B3, 0xB966D409, 0xCE61E49F, + 0x5EDEF90E, 0x29D9C998, 0xB0D09822, 0xC7D7A8B4, + 0x59B33D17, 0x2EB40D81, 0xB7BD5C3B, 0xC0BA6CAD, + 0xEDB88320, 0x9ABFB3B6, 0x03B6E20C, 0x74B1D29A, + 0xEAD54739, 0x9DD277AF, 0x04DB2615, 0x73DC1683, + 0xE3630B12, 0x94643B84, 0x0D6D6A3E, 0x7A6A5AA8, + 0xE40ECF0B, 0x9309FF9D, 0x0A00AE27, 0x7D079EB1, + 0xF00F9344, 0x8708A3D2, 0x1E01F268, 0x6906C2FE, + 0xF762575D, 0x806567CB, 0x196C3671, 0x6E6B06E7, + 0xFED41B76, 0x89D32BE0, 0x10DA7A5A, 0x67DD4ACC, + 0xF9B9DF6F, 0x8EBEEFF9, 0x17B7BE43, 0x60B08ED5, + 0xD6D6A3E8, 0xA1D1937E, 0x38D8C2C4, 0x4FDFF252, + 0xD1BB67F1, 0xA6BC5767, 0x3FB506DD, 0x48B2364B, + 0xD80D2BDA, 0xAF0A1B4C, 0x36034AF6, 0x41047A60, + 0xDF60EFC3, 0xA867DF55, 0x316E8EEF, 0x4669BE79, + 0xCB61B38C, 0xBC66831A, 0x256FD2A0, 0x5268E236, + 0xCC0C7795, 0xBB0B4703, 0x220216B9, 0x5505262F, + 0xC5BA3BBE, 0xB2BD0B28, 0x2BB45A92, 0x5CB36A04, + 0xC2D7FFA7, 0xB5D0CF31, 0x2CD99E8B, 0x5BDEAE1D, + 0x9B64C2B0, 0xEC63F226, 0x756AA39C, 0x026D930A, + 0x9C0906A9, 0xEB0E363F, 0x72076785, 0x05005713, + 0x95BF4A82, 0xE2B87A14, 0x7BB12BAE, 0x0CB61B38, + 0x92D28E9B, 0xE5D5BE0D, 0x7CDCEFB7, 0x0BDBDF21, + 0x86D3D2D4, 0xF1D4E242, 0x68DDB3F8, 0x1FDA836E, + 0x81BE16CD, 0xF6B9265B, 0x6FB077E1, 0x18B74777, + 0x88085AE6, 0xFF0F6A70, 0x66063BCA, 0x11010B5C, + 0x8F659EFF, 0xF862AE69, 0x616BFFD3, 0x166CCF45, + 0xA00AE278, 0xD70DD2EE, 0x4E048354, 0x3903B3C2, + 0xA7672661, 0xD06016F7, 0x4969474D, 0x3E6E77DB, + 0xAED16A4A, 0xD9D65ADC, 0x40DF0B66, 0x37D83BF0, + 0xA9BCAE53, 0xDEBB9EC5, 0x47B2CF7F, 0x30B5FFE9, + 0xBDBDF21C, 0xCABAC28A, 0x53B39330, 0x24B4A3A6, + 0xBAD03605, 0xCDD70693, 0x54DE5729, 0x23D967BF, + 0xB3667A2E, 0xC4614AB8, 0x5D681B02, 0x2A6F2B94, + 0xB40BBE37, 0xC30C8EA1, 0x5A05DF1B, 0x2D02EF8D, +]; +var lookupTable = (0, util_1.uint32ArrayFrom)(a_lookUpTable); +var aws_crc32_1 = require("./aws_crc32"); +Object.defineProperty(exports, "AwsCrc32", { enumerable: true, get: function () { return aws_crc32_1.AwsCrc32; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/crc32/build/main/index.js.map b/node_modules/@aws-crypto/crc32/build/main/index.js.map new file mode 100644 index 00000000..940f6aaa --- /dev/null +++ b/node_modules/@aws-crypto/crc32/build/main/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;AAAA,yCAAiD;AAEjD,SAAgB,KAAK,CAAC,IAAgB;IACpC,OAAO,IAAI,KAAK,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,MAAM,EAAE,CAAC;AAC3C,CAAC;AAFD,sBAEC;AAED;IAAA;QACU,aAAQ,GAAG,UAAU,CAAC;IAchC,CAAC;IAZC,sBAAM,GAAN,UAAO,IAAgB;;;YACrB,KAAmB,IAAA,SAAA,iBAAA,IAAI,CAAA,0BAAA,4CAAE;gBAApB,IAAM,IAAI,iBAAA;gBACb,IAAI,CAAC,QAAQ;oBACX,CAAC,IAAI,CAAC,QAAQ,KAAK,CAAC,CAAC,GAAG,WAAW,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC;aACtE;;;;;;;;;QAED,OAAO,IAAI,CAAC;IACd,CAAC;IAED,sBAAM,GAAN;QACE,OAAO,CAAC,IAAI,CAAC,QAAQ,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;IAC5C,CAAC;IACH,YAAC;AAAD,CAAC,AAfD,IAeC;AAfY,sBAAK;AAiBlB,kBAAkB;AAClB,IAAM,aAAa,GAAG;IACpB,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;CAC/C,CAAC;AACF,IAAM,WAAW,GAAgB,IAAA,sBAAe,EAAC,aAAa,CAAC,CAAA;AAC/D,yCAAuC;AAA9B,qGAAA,QAAQ,OAAA"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/crc32/build/module/aws_crc32.d.ts b/node_modules/@aws-crypto/crc32/build/module/aws_crc32.d.ts new file mode 100644 index 00000000..c91c2a52 --- /dev/null +++ b/node_modules/@aws-crypto/crc32/build/module/aws_crc32.d.ts @@ -0,0 +1,7 @@ +import { SourceData, Checksum } from "@aws-sdk/types"; +export declare class AwsCrc32 implements Checksum { + private crc32; + update(toHash: SourceData): void; + digest(): Promise; + reset(): void; +} diff --git a/node_modules/@aws-crypto/crc32/build/module/aws_crc32.js b/node_modules/@aws-crypto/crc32/build/module/aws_crc32.js new file mode 100644 index 00000000..4e916e62 --- /dev/null +++ b/node_modules/@aws-crypto/crc32/build/module/aws_crc32.js @@ -0,0 +1,28 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { __awaiter, __generator } from "tslib"; +import { convertToBuffer, isEmptyData, numToUint8 } from "@aws-crypto/util"; +import { Crc32 } from "./index"; +var AwsCrc32 = /** @class */ (function () { + function AwsCrc32() { + this.crc32 = new Crc32(); + } + AwsCrc32.prototype.update = function (toHash) { + if (isEmptyData(toHash)) + return; + this.crc32.update(convertToBuffer(toHash)); + }; + AwsCrc32.prototype.digest = function () { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + return [2 /*return*/, numToUint8(this.crc32.digest())]; + }); + }); + }; + AwsCrc32.prototype.reset = function () { + this.crc32 = new Crc32(); + }; + return AwsCrc32; +}()); +export { AwsCrc32 }; +//# sourceMappingURL=aws_crc32.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/crc32/build/module/aws_crc32.js.map b/node_modules/@aws-crypto/crc32/build/module/aws_crc32.js.map new file mode 100644 index 00000000..1a113cc2 --- /dev/null +++ b/node_modules/@aws-crypto/crc32/build/module/aws_crc32.js.map @@ -0,0 +1 @@ +{"version":3,"file":"aws_crc32.js","sourceRoot":"","sources":["../../src/aws_crc32.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;;AAGtC,OAAO,EAAE,eAAe,EAAE,WAAW,EAAE,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAC5E,OAAO,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAEhC;IAAA;QACU,UAAK,GAAG,IAAI,KAAK,EAAE,CAAC;IAe9B,CAAC;IAbC,yBAAM,GAAN,UAAO,MAAkB;QACvB,IAAI,WAAW,CAAC,MAAM,CAAC;YAAE,OAAO;QAEhC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,CAAC;IAC7C,CAAC;IAEK,yBAAM,GAAZ;;;gBACE,sBAAO,UAAU,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAC;;;KACxC;IAED,wBAAK,GAAL;QACE,IAAI,CAAC,KAAK,GAAG,IAAI,KAAK,EAAE,CAAC;IAC3B,CAAC;IACH,eAAC;AAAD,CAAC,AAhBD,IAgBC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/crc32/build/module/index.d.ts b/node_modules/@aws-crypto/crc32/build/module/index.d.ts new file mode 100644 index 00000000..75210713 --- /dev/null +++ b/node_modules/@aws-crypto/crc32/build/module/index.d.ts @@ -0,0 +1,7 @@ +export declare function crc32(data: Uint8Array): number; +export declare class Crc32 { + private checksum; + update(data: Uint8Array): this; + digest(): number; +} +export { AwsCrc32 } from "./aws_crc32"; diff --git a/node_modules/@aws-crypto/crc32/build/module/index.js b/node_modules/@aws-crypto/crc32/build/module/index.js new file mode 100644 index 00000000..00df32d0 --- /dev/null +++ b/node_modules/@aws-crypto/crc32/build/module/index.js @@ -0,0 +1,103 @@ +import { __values } from "tslib"; +import { uint32ArrayFrom } from "@aws-crypto/util"; +export function crc32(data) { + return new Crc32().update(data).digest(); +} +var Crc32 = /** @class */ (function () { + function Crc32() { + this.checksum = 0xffffffff; + } + Crc32.prototype.update = function (data) { + var e_1, _a; + try { + for (var data_1 = __values(data), data_1_1 = data_1.next(); !data_1_1.done; data_1_1 = data_1.next()) { + var byte = data_1_1.value; + this.checksum = + (this.checksum >>> 8) ^ lookupTable[(this.checksum ^ byte) & 0xff]; + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (data_1_1 && !data_1_1.done && (_a = data_1.return)) _a.call(data_1); + } + finally { if (e_1) throw e_1.error; } + } + return this; + }; + Crc32.prototype.digest = function () { + return (this.checksum ^ 0xffffffff) >>> 0; + }; + return Crc32; +}()); +export { Crc32 }; +// prettier-ignore +var a_lookUpTable = [ + 0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA, + 0x076DC419, 0x706AF48F, 0xE963A535, 0x9E6495A3, + 0x0EDB8832, 0x79DCB8A4, 0xE0D5E91E, 0x97D2D988, + 0x09B64C2B, 0x7EB17CBD, 0xE7B82D07, 0x90BF1D91, + 0x1DB71064, 0x6AB020F2, 0xF3B97148, 0x84BE41DE, + 0x1ADAD47D, 0x6DDDE4EB, 0xF4D4B551, 0x83D385C7, + 0x136C9856, 0x646BA8C0, 0xFD62F97A, 0x8A65C9EC, + 0x14015C4F, 0x63066CD9, 0xFA0F3D63, 0x8D080DF5, + 0x3B6E20C8, 0x4C69105E, 0xD56041E4, 0xA2677172, + 0x3C03E4D1, 0x4B04D447, 0xD20D85FD, 0xA50AB56B, + 0x35B5A8FA, 0x42B2986C, 0xDBBBC9D6, 0xACBCF940, + 0x32D86CE3, 0x45DF5C75, 0xDCD60DCF, 0xABD13D59, + 0x26D930AC, 0x51DE003A, 0xC8D75180, 0xBFD06116, + 0x21B4F4B5, 0x56B3C423, 0xCFBA9599, 0xB8BDA50F, + 0x2802B89E, 0x5F058808, 0xC60CD9B2, 0xB10BE924, + 0x2F6F7C87, 0x58684C11, 0xC1611DAB, 0xB6662D3D, + 0x76DC4190, 0x01DB7106, 0x98D220BC, 0xEFD5102A, + 0x71B18589, 0x06B6B51F, 0x9FBFE4A5, 0xE8B8D433, + 0x7807C9A2, 0x0F00F934, 0x9609A88E, 0xE10E9818, + 0x7F6A0DBB, 0x086D3D2D, 0x91646C97, 0xE6635C01, + 0x6B6B51F4, 0x1C6C6162, 0x856530D8, 0xF262004E, + 0x6C0695ED, 0x1B01A57B, 0x8208F4C1, 0xF50FC457, + 0x65B0D9C6, 0x12B7E950, 0x8BBEB8EA, 0xFCB9887C, + 0x62DD1DDF, 0x15DA2D49, 0x8CD37CF3, 0xFBD44C65, + 0x4DB26158, 0x3AB551CE, 0xA3BC0074, 0xD4BB30E2, + 0x4ADFA541, 0x3DD895D7, 0xA4D1C46D, 0xD3D6F4FB, + 0x4369E96A, 0x346ED9FC, 0xAD678846, 0xDA60B8D0, + 0x44042D73, 0x33031DE5, 0xAA0A4C5F, 0xDD0D7CC9, + 0x5005713C, 0x270241AA, 0xBE0B1010, 0xC90C2086, + 0x5768B525, 0x206F85B3, 0xB966D409, 0xCE61E49F, + 0x5EDEF90E, 0x29D9C998, 0xB0D09822, 0xC7D7A8B4, + 0x59B33D17, 0x2EB40D81, 0xB7BD5C3B, 0xC0BA6CAD, + 0xEDB88320, 0x9ABFB3B6, 0x03B6E20C, 0x74B1D29A, + 0xEAD54739, 0x9DD277AF, 0x04DB2615, 0x73DC1683, + 0xE3630B12, 0x94643B84, 0x0D6D6A3E, 0x7A6A5AA8, + 0xE40ECF0B, 0x9309FF9D, 0x0A00AE27, 0x7D079EB1, + 0xF00F9344, 0x8708A3D2, 0x1E01F268, 0x6906C2FE, + 0xF762575D, 0x806567CB, 0x196C3671, 0x6E6B06E7, + 0xFED41B76, 0x89D32BE0, 0x10DA7A5A, 0x67DD4ACC, + 0xF9B9DF6F, 0x8EBEEFF9, 0x17B7BE43, 0x60B08ED5, + 0xD6D6A3E8, 0xA1D1937E, 0x38D8C2C4, 0x4FDFF252, + 0xD1BB67F1, 0xA6BC5767, 0x3FB506DD, 0x48B2364B, + 0xD80D2BDA, 0xAF0A1B4C, 0x36034AF6, 0x41047A60, + 0xDF60EFC3, 0xA867DF55, 0x316E8EEF, 0x4669BE79, + 0xCB61B38C, 0xBC66831A, 0x256FD2A0, 0x5268E236, + 0xCC0C7795, 0xBB0B4703, 0x220216B9, 0x5505262F, + 0xC5BA3BBE, 0xB2BD0B28, 0x2BB45A92, 0x5CB36A04, + 0xC2D7FFA7, 0xB5D0CF31, 0x2CD99E8B, 0x5BDEAE1D, + 0x9B64C2B0, 0xEC63F226, 0x756AA39C, 0x026D930A, + 0x9C0906A9, 0xEB0E363F, 0x72076785, 0x05005713, + 0x95BF4A82, 0xE2B87A14, 0x7BB12BAE, 0x0CB61B38, + 0x92D28E9B, 0xE5D5BE0D, 0x7CDCEFB7, 0x0BDBDF21, + 0x86D3D2D4, 0xF1D4E242, 0x68DDB3F8, 0x1FDA836E, + 0x81BE16CD, 0xF6B9265B, 0x6FB077E1, 0x18B74777, + 0x88085AE6, 0xFF0F6A70, 0x66063BCA, 0x11010B5C, + 0x8F659EFF, 0xF862AE69, 0x616BFFD3, 0x166CCF45, + 0xA00AE278, 0xD70DD2EE, 0x4E048354, 0x3903B3C2, + 0xA7672661, 0xD06016F7, 0x4969474D, 0x3E6E77DB, + 0xAED16A4A, 0xD9D65ADC, 0x40DF0B66, 0x37D83BF0, + 0xA9BCAE53, 0xDEBB9EC5, 0x47B2CF7F, 0x30B5FFE9, + 0xBDBDF21C, 0xCABAC28A, 0x53B39330, 0x24B4A3A6, + 0xBAD03605, 0xCDD70693, 0x54DE5729, 0x23D967BF, + 0xB3667A2E, 0xC4614AB8, 0x5D681B02, 0x2A6F2B94, + 0xB40BBE37, 0xC30C8EA1, 0x5A05DF1B, 0x2D02EF8D, +]; +var lookupTable = uint32ArrayFrom(a_lookUpTable); +export { AwsCrc32 } from "./aws_crc32"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/crc32/build/module/index.js.map b/node_modules/@aws-crypto/crc32/build/module/index.js.map new file mode 100644 index 00000000..87b2d5e6 --- /dev/null +++ b/node_modules/@aws-crypto/crc32/build/module/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA,OAAO,EAAC,eAAe,EAAC,MAAM,kBAAkB,CAAC;AAEjD,MAAM,UAAU,KAAK,CAAC,IAAgB;IACpC,OAAO,IAAI,KAAK,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,MAAM,EAAE,CAAC;AAC3C,CAAC;AAED;IAAA;QACU,aAAQ,GAAG,UAAU,CAAC;IAchC,CAAC;IAZC,sBAAM,GAAN,UAAO,IAAgB;;;YACrB,KAAmB,IAAA,SAAA,SAAA,IAAI,CAAA,0BAAA,4CAAE;gBAApB,IAAM,IAAI,iBAAA;gBACb,IAAI,CAAC,QAAQ;oBACX,CAAC,IAAI,CAAC,QAAQ,KAAK,CAAC,CAAC,GAAG,WAAW,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC;aACtE;;;;;;;;;QAED,OAAO,IAAI,CAAC;IACd,CAAC;IAED,sBAAM,GAAN;QACE,OAAO,CAAC,IAAI,CAAC,QAAQ,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;IAC5C,CAAC;IACH,YAAC;AAAD,CAAC,AAfD,IAeC;;AAED,kBAAkB;AAClB,IAAM,aAAa,GAAG;IACpB,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9C,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;CAC/C,CAAC;AACF,IAAM,WAAW,GAAgB,eAAe,CAAC,aAAa,CAAC,CAAA;AAC/D,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/crc32/package.json b/node_modules/@aws-crypto/crc32/package.json new file mode 100644 index 00000000..9e83975b --- /dev/null +++ b/node_modules/@aws-crypto/crc32/package.json @@ -0,0 +1,32 @@ +{ + "name": "@aws-crypto/crc32", + "version": "5.2.0", + "scripts": { + "prepublishOnly": "tsc -p tsconfig.json && tsc -p tsconfig.module.json", + "pretest": "tsc -p tsconfig.test.json", + "test": "mocha --require ts-node/register test/**/*test.ts" + }, + "main": "./build/main/index.js", + "module": "./build/module/index.js", + "types": "./build/main/index.d.ts", + "repository": { + "type": "git", + "url": "git@github.com:aws/aws-sdk-js-crypto-helpers.git" + }, + "author": { + "name": "AWS Crypto Tools Team", + "email": "aws-cryptools@amazon.com", + "url": "https://docs.aws.amazon.com/aws-crypto-tools/index.html?id=docs_gateway#lang/en_us" + }, + "homepage": "https://github.com/aws/aws-sdk-js-crypto-helpers/tree/master/packages/crc32", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "gitHead": "c11b171b35ec5c093364f0e0d8dc4ab1af68e748" +} diff --git a/node_modules/@aws-crypto/crc32/src/aws_crc32.ts b/node_modules/@aws-crypto/crc32/src/aws_crc32.ts new file mode 100644 index 00000000..bee48c9b --- /dev/null +++ b/node_modules/@aws-crypto/crc32/src/aws_crc32.ts @@ -0,0 +1,24 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { SourceData, Checksum } from "@aws-sdk/types"; +import { convertToBuffer, isEmptyData, numToUint8 } from "@aws-crypto/util"; +import { Crc32 } from "./index"; + +export class AwsCrc32 implements Checksum { + private crc32 = new Crc32(); + + update(toHash: SourceData) { + if (isEmptyData(toHash)) return; + + this.crc32.update(convertToBuffer(toHash)); + } + + async digest(): Promise { + return numToUint8(this.crc32.digest()); + } + + reset(): void { + this.crc32 = new Crc32(); + } +} diff --git a/node_modules/@aws-crypto/crc32/src/index.ts b/node_modules/@aws-crypto/crc32/src/index.ts new file mode 100644 index 00000000..47623867 --- /dev/null +++ b/node_modules/@aws-crypto/crc32/src/index.ts @@ -0,0 +1,92 @@ +import {uint32ArrayFrom} from "@aws-crypto/util"; + +export function crc32(data: Uint8Array): number { + return new Crc32().update(data).digest(); +} + +export class Crc32 { + private checksum = 0xffffffff; + + update(data: Uint8Array): this { + for (const byte of data) { + this.checksum = + (this.checksum >>> 8) ^ lookupTable[(this.checksum ^ byte) & 0xff]; + } + + return this; + } + + digest(): number { + return (this.checksum ^ 0xffffffff) >>> 0; + } +} + +// prettier-ignore +const a_lookUpTable = [ + 0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA, + 0x076DC419, 0x706AF48F, 0xE963A535, 0x9E6495A3, + 0x0EDB8832, 0x79DCB8A4, 0xE0D5E91E, 0x97D2D988, + 0x09B64C2B, 0x7EB17CBD, 0xE7B82D07, 0x90BF1D91, + 0x1DB71064, 0x6AB020F2, 0xF3B97148, 0x84BE41DE, + 0x1ADAD47D, 0x6DDDE4EB, 0xF4D4B551, 0x83D385C7, + 0x136C9856, 0x646BA8C0, 0xFD62F97A, 0x8A65C9EC, + 0x14015C4F, 0x63066CD9, 0xFA0F3D63, 0x8D080DF5, + 0x3B6E20C8, 0x4C69105E, 0xD56041E4, 0xA2677172, + 0x3C03E4D1, 0x4B04D447, 0xD20D85FD, 0xA50AB56B, + 0x35B5A8FA, 0x42B2986C, 0xDBBBC9D6, 0xACBCF940, + 0x32D86CE3, 0x45DF5C75, 0xDCD60DCF, 0xABD13D59, + 0x26D930AC, 0x51DE003A, 0xC8D75180, 0xBFD06116, + 0x21B4F4B5, 0x56B3C423, 0xCFBA9599, 0xB8BDA50F, + 0x2802B89E, 0x5F058808, 0xC60CD9B2, 0xB10BE924, + 0x2F6F7C87, 0x58684C11, 0xC1611DAB, 0xB6662D3D, + 0x76DC4190, 0x01DB7106, 0x98D220BC, 0xEFD5102A, + 0x71B18589, 0x06B6B51F, 0x9FBFE4A5, 0xE8B8D433, + 0x7807C9A2, 0x0F00F934, 0x9609A88E, 0xE10E9818, + 0x7F6A0DBB, 0x086D3D2D, 0x91646C97, 0xE6635C01, + 0x6B6B51F4, 0x1C6C6162, 0x856530D8, 0xF262004E, + 0x6C0695ED, 0x1B01A57B, 0x8208F4C1, 0xF50FC457, + 0x65B0D9C6, 0x12B7E950, 0x8BBEB8EA, 0xFCB9887C, + 0x62DD1DDF, 0x15DA2D49, 0x8CD37CF3, 0xFBD44C65, + 0x4DB26158, 0x3AB551CE, 0xA3BC0074, 0xD4BB30E2, + 0x4ADFA541, 0x3DD895D7, 0xA4D1C46D, 0xD3D6F4FB, + 0x4369E96A, 0x346ED9FC, 0xAD678846, 0xDA60B8D0, + 0x44042D73, 0x33031DE5, 0xAA0A4C5F, 0xDD0D7CC9, + 0x5005713C, 0x270241AA, 0xBE0B1010, 0xC90C2086, + 0x5768B525, 0x206F85B3, 0xB966D409, 0xCE61E49F, + 0x5EDEF90E, 0x29D9C998, 0xB0D09822, 0xC7D7A8B4, + 0x59B33D17, 0x2EB40D81, 0xB7BD5C3B, 0xC0BA6CAD, + 0xEDB88320, 0x9ABFB3B6, 0x03B6E20C, 0x74B1D29A, + 0xEAD54739, 0x9DD277AF, 0x04DB2615, 0x73DC1683, + 0xE3630B12, 0x94643B84, 0x0D6D6A3E, 0x7A6A5AA8, + 0xE40ECF0B, 0x9309FF9D, 0x0A00AE27, 0x7D079EB1, + 0xF00F9344, 0x8708A3D2, 0x1E01F268, 0x6906C2FE, + 0xF762575D, 0x806567CB, 0x196C3671, 0x6E6B06E7, + 0xFED41B76, 0x89D32BE0, 0x10DA7A5A, 0x67DD4ACC, + 0xF9B9DF6F, 0x8EBEEFF9, 0x17B7BE43, 0x60B08ED5, + 0xD6D6A3E8, 0xA1D1937E, 0x38D8C2C4, 0x4FDFF252, + 0xD1BB67F1, 0xA6BC5767, 0x3FB506DD, 0x48B2364B, + 0xD80D2BDA, 0xAF0A1B4C, 0x36034AF6, 0x41047A60, + 0xDF60EFC3, 0xA867DF55, 0x316E8EEF, 0x4669BE79, + 0xCB61B38C, 0xBC66831A, 0x256FD2A0, 0x5268E236, + 0xCC0C7795, 0xBB0B4703, 0x220216B9, 0x5505262F, + 0xC5BA3BBE, 0xB2BD0B28, 0x2BB45A92, 0x5CB36A04, + 0xC2D7FFA7, 0xB5D0CF31, 0x2CD99E8B, 0x5BDEAE1D, + 0x9B64C2B0, 0xEC63F226, 0x756AA39C, 0x026D930A, + 0x9C0906A9, 0xEB0E363F, 0x72076785, 0x05005713, + 0x95BF4A82, 0xE2B87A14, 0x7BB12BAE, 0x0CB61B38, + 0x92D28E9B, 0xE5D5BE0D, 0x7CDCEFB7, 0x0BDBDF21, + 0x86D3D2D4, 0xF1D4E242, 0x68DDB3F8, 0x1FDA836E, + 0x81BE16CD, 0xF6B9265B, 0x6FB077E1, 0x18B74777, + 0x88085AE6, 0xFF0F6A70, 0x66063BCA, 0x11010B5C, + 0x8F659EFF, 0xF862AE69, 0x616BFFD3, 0x166CCF45, + 0xA00AE278, 0xD70DD2EE, 0x4E048354, 0x3903B3C2, + 0xA7672661, 0xD06016F7, 0x4969474D, 0x3E6E77DB, + 0xAED16A4A, 0xD9D65ADC, 0x40DF0B66, 0x37D83BF0, + 0xA9BCAE53, 0xDEBB9EC5, 0x47B2CF7F, 0x30B5FFE9, + 0xBDBDF21C, 0xCABAC28A, 0x53B39330, 0x24B4A3A6, + 0xBAD03605, 0xCDD70693, 0x54DE5729, 0x23D967BF, + 0xB3667A2E, 0xC4614AB8, 0x5D681B02, 0x2A6F2B94, + 0xB40BBE37, 0xC30C8EA1, 0x5A05DF1B, 0x2D02EF8D, +]; +const lookupTable: Uint32Array = uint32ArrayFrom(a_lookUpTable) +export { AwsCrc32 } from "./aws_crc32"; diff --git a/node_modules/@aws-crypto/crc32/tsconfig.json b/node_modules/@aws-crypto/crc32/tsconfig.json new file mode 100644 index 00000000..2b996d08 --- /dev/null +++ b/node_modules/@aws-crypto/crc32/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "rootDir": "./src", + "outDir": "./build/main", + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules/**"] +} diff --git a/node_modules/@aws-crypto/crc32/tsconfig.module.json b/node_modules/@aws-crypto/crc32/tsconfig.module.json new file mode 100644 index 00000000..7d0cfddc --- /dev/null +++ b/node_modules/@aws-crypto/crc32/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "outDir": "build/module", + "module": "esnext", + } +} diff --git a/node_modules/@aws-crypto/crc32c/CHANGELOG.md b/node_modules/@aws-crypto/crc32c/CHANGELOG.md new file mode 100644 index 00000000..f3a9ea68 --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/CHANGELOG.md @@ -0,0 +1,76 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.1.0...v5.2.0) (2023-10-16) + +### Features + +- support ESM artifacts in all packages ([#752](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/752)) ([e930ffb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e930ffba5cfef66dd242049e7d514ced232c1e3b)) + +# [5.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.0.0...v5.1.0) (2023-09-22) + +### Bug Fixes + +- Update tsc to 2.x ([#735](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/735)) ([782e0de](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/782e0de9f5fef41f694130580a69d940894b6b8c)) + +### Features + +- Use @smithy/util-utf8 ([#730](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/730)) ([00fb851](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/00fb851ca3559d5a1f370f9256814de1210826b8)), closes [#699](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/699) + +# [5.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v4.0.1...v5.0.0) (2023-07-13) + +**Note:** Version bump only for package @aws-crypto/crc32c + +# [4.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v3.0.0...v4.0.0) (2023-02-20) + +**Note:** Version bump only for package @aws-crypto/crc32c + +# [3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.2...v3.0.0) (2023-01-12) + +- feat!: replace Hash implementations with Checksum interface (#492) ([da43dc0](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/da43dc0fdf669d9ebb5bfb1b1f7c79e46c4aaae1)), closes [#492](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/492) + +### BREAKING CHANGES + +- All classes that implemented `Hash` now implement `Checksum`. + +## [2.0.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.1...v2.0.2) (2022-09-07) + +### Bug Fixes + +- **#337:** update @aws-sdk/types ([#373](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/373)) ([b26a811](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/b26a811a392f5209c7ec7e57251500d4d78f97ff)), closes [#337](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/337) + +## [2.0.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.0...v2.0.1) (2021-12-09) + +**Note:** Version bump only for package @aws-crypto/crc32c + +# [2.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.2...v2.0.0) (2021-10-25) + +**Note:** Version bump only for package @aws-crypto/crc32c + +## [1.2.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.1...v1.2.2) (2021-10-12) + +### Bug Fixes + +- **crc32c:** ie11 does not support Array.from ([#221](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/221)) ([5f49547](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/5f495472ab8988cf203e0f2a70a51f7e1fcd7e60)) + +## [1.2.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.0...v1.2.1) (2021-09-17) + +**Note:** Version bump only for package @aws-crypto/crc32c + +# [1.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.1.1...v1.2.0) (2021-09-17) + +### Features + +- Add AwsCrc32C Hash ([4840c83](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/4840c83bdd7c461dded777ebc45a8f99258ba21c)) + +## [0.2.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/crc32c@0.2.0...@aws-crypto/crc32c@0.2.1) (2021-08-24) + +**Note:** Version bump only for package @aws-crypto/crc32c + +# 0.2.0 (2021-08-11) + +### Features + +- Create CRC-32C implementation ([#201](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/201)) ([e43c7ec](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e43c7ecd30d6499fa696f5839ecc30502a34b8b6)) diff --git a/node_modules/@aws-crypto/crc32c/LICENSE b/node_modules/@aws-crypto/crc32c/LICENSE new file mode 100644 index 00000000..980a15ac --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-crypto/crc32c/README.md b/node_modules/@aws-crypto/crc32c/README.md new file mode 100644 index 00000000..2b8ef80c --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/README.md @@ -0,0 +1,16 @@ +# @aws-crypto/crc32c + +Pure JS implementation of CRC32-C https://en.wikipedia.org/wiki/Cyclic_redundancy_check + +## Usage + +``` +import { Crc32c } from '@aws-crypto/crc32c'; + +const crc32Digest = (new Crc32c).update(buffer).digest() + +``` + +## Test + +`npm test` diff --git a/node_modules/@aws-crypto/crc32c/build/main/aws_crc32c.d.ts b/node_modules/@aws-crypto/crc32c/build/main/aws_crc32c.d.ts new file mode 100644 index 00000000..3e28b90e --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/build/main/aws_crc32c.d.ts @@ -0,0 +1,7 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class AwsCrc32c implements Checksum { + private crc32c; + update(toHash: SourceData): void; + digest(): Promise; + reset(): void; +} diff --git a/node_modules/@aws-crypto/crc32c/build/main/aws_crc32c.js b/node_modules/@aws-crypto/crc32c/build/main/aws_crc32c.js new file mode 100644 index 00000000..a1338093 --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/build/main/aws_crc32c.js @@ -0,0 +1,31 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AwsCrc32c = void 0; +var tslib_1 = require("tslib"); +var util_1 = require("@aws-crypto/util"); +var index_1 = require("./index"); +var AwsCrc32c = /** @class */ (function () { + function AwsCrc32c() { + this.crc32c = new index_1.Crc32c(); + } + AwsCrc32c.prototype.update = function (toHash) { + if ((0, util_1.isEmptyData)(toHash)) + return; + this.crc32c.update((0, util_1.convertToBuffer)(toHash)); + }; + AwsCrc32c.prototype.digest = function () { + return tslib_1.__awaiter(this, void 0, void 0, function () { + return tslib_1.__generator(this, function (_a) { + return [2 /*return*/, (0, util_1.numToUint8)(this.crc32c.digest())]; + }); + }); + }; + AwsCrc32c.prototype.reset = function () { + this.crc32c = new index_1.Crc32c(); + }; + return AwsCrc32c; +}()); +exports.AwsCrc32c = AwsCrc32c; +//# sourceMappingURL=aws_crc32c.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/crc32c/build/main/aws_crc32c.js.map b/node_modules/@aws-crypto/crc32c/build/main/aws_crc32c.js.map new file mode 100644 index 00000000..05c15e7f --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/build/main/aws_crc32c.js.map @@ -0,0 +1 @@ +{"version":3,"file":"aws_crc32c.js","sourceRoot":"","sources":["../../src/aws_crc32c.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;;AAGtC,yCAA4E;AAC5E,iCAAiC;AAEjC;IAAA;QACU,WAAM,GAAG,IAAI,cAAM,EAAE,CAAC;IAehC,CAAC;IAbC,0BAAM,GAAN,UAAO,MAAkB;QACvB,IAAI,IAAA,kBAAW,EAAC,MAAM,CAAC;YAAE,OAAO;QAEhC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,IAAA,sBAAe,EAAC,MAAM,CAAC,CAAC,CAAC;IAC9C,CAAC;IAEK,0BAAM,GAAZ;;;gBACE,sBAAO,IAAA,iBAAU,EAAC,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,EAAC;;;KACzC;IAED,yBAAK,GAAL;QACE,IAAI,CAAC,MAAM,GAAG,IAAI,cAAM,EAAE,CAAC;IAC7B,CAAC;IACH,gBAAC;AAAD,CAAC,AAhBD,IAgBC;AAhBY,8BAAS"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/crc32c/build/main/index.d.ts b/node_modules/@aws-crypto/crc32c/build/main/index.d.ts new file mode 100644 index 00000000..eba3d9f6 --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/build/main/index.d.ts @@ -0,0 +1,7 @@ +export declare function crc32c(data: Uint8Array): number; +export declare class Crc32c { + private checksum; + update(data: Uint8Array): this; + digest(): number; +} +export { AwsCrc32c } from "./aws_crc32c"; diff --git a/node_modules/@aws-crypto/crc32c/build/main/index.js b/node_modules/@aws-crypto/crc32c/build/main/index.js new file mode 100644 index 00000000..3a1af554 --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/build/main/index.js @@ -0,0 +1,78 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AwsCrc32c = exports.Crc32c = exports.crc32c = void 0; +var tslib_1 = require("tslib"); +var util_1 = require("@aws-crypto/util"); +function crc32c(data) { + return new Crc32c().update(data).digest(); +} +exports.crc32c = crc32c; +var Crc32c = /** @class */ (function () { + function Crc32c() { + this.checksum = 0xffffffff; + } + Crc32c.prototype.update = function (data) { + var e_1, _a; + try { + for (var data_1 = tslib_1.__values(data), data_1_1 = data_1.next(); !data_1_1.done; data_1_1 = data_1.next()) { + var byte = data_1_1.value; + this.checksum = + (this.checksum >>> 8) ^ lookupTable[(this.checksum ^ byte) & 0xff]; + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (data_1_1 && !data_1_1.done && (_a = data_1.return)) _a.call(data_1); + } + finally { if (e_1) throw e_1.error; } + } + return this; + }; + Crc32c.prototype.digest = function () { + return (this.checksum ^ 0xffffffff) >>> 0; + }; + return Crc32c; +}()); +exports.Crc32c = Crc32c; +// prettier-ignore +var a_lookupTable = [ + 0x00000000, 0xF26B8303, 0xE13B70F7, 0x1350F3F4, 0xC79A971F, 0x35F1141C, 0x26A1E7E8, 0xD4CA64EB, + 0x8AD958CF, 0x78B2DBCC, 0x6BE22838, 0x9989AB3B, 0x4D43CFD0, 0xBF284CD3, 0xAC78BF27, 0x5E133C24, + 0x105EC76F, 0xE235446C, 0xF165B798, 0x030E349B, 0xD7C45070, 0x25AFD373, 0x36FF2087, 0xC494A384, + 0x9A879FA0, 0x68EC1CA3, 0x7BBCEF57, 0x89D76C54, 0x5D1D08BF, 0xAF768BBC, 0xBC267848, 0x4E4DFB4B, + 0x20BD8EDE, 0xD2D60DDD, 0xC186FE29, 0x33ED7D2A, 0xE72719C1, 0x154C9AC2, 0x061C6936, 0xF477EA35, + 0xAA64D611, 0x580F5512, 0x4B5FA6E6, 0xB93425E5, 0x6DFE410E, 0x9F95C20D, 0x8CC531F9, 0x7EAEB2FA, + 0x30E349B1, 0xC288CAB2, 0xD1D83946, 0x23B3BA45, 0xF779DEAE, 0x05125DAD, 0x1642AE59, 0xE4292D5A, + 0xBA3A117E, 0x4851927D, 0x5B016189, 0xA96AE28A, 0x7DA08661, 0x8FCB0562, 0x9C9BF696, 0x6EF07595, + 0x417B1DBC, 0xB3109EBF, 0xA0406D4B, 0x522BEE48, 0x86E18AA3, 0x748A09A0, 0x67DAFA54, 0x95B17957, + 0xCBA24573, 0x39C9C670, 0x2A993584, 0xD8F2B687, 0x0C38D26C, 0xFE53516F, 0xED03A29B, 0x1F682198, + 0x5125DAD3, 0xA34E59D0, 0xB01EAA24, 0x42752927, 0x96BF4DCC, 0x64D4CECF, 0x77843D3B, 0x85EFBE38, + 0xDBFC821C, 0x2997011F, 0x3AC7F2EB, 0xC8AC71E8, 0x1C661503, 0xEE0D9600, 0xFD5D65F4, 0x0F36E6F7, + 0x61C69362, 0x93AD1061, 0x80FDE395, 0x72966096, 0xA65C047D, 0x5437877E, 0x4767748A, 0xB50CF789, + 0xEB1FCBAD, 0x197448AE, 0x0A24BB5A, 0xF84F3859, 0x2C855CB2, 0xDEEEDFB1, 0xCDBE2C45, 0x3FD5AF46, + 0x7198540D, 0x83F3D70E, 0x90A324FA, 0x62C8A7F9, 0xB602C312, 0x44694011, 0x5739B3E5, 0xA55230E6, + 0xFB410CC2, 0x092A8FC1, 0x1A7A7C35, 0xE811FF36, 0x3CDB9BDD, 0xCEB018DE, 0xDDE0EB2A, 0x2F8B6829, + 0x82F63B78, 0x709DB87B, 0x63CD4B8F, 0x91A6C88C, 0x456CAC67, 0xB7072F64, 0xA457DC90, 0x563C5F93, + 0x082F63B7, 0xFA44E0B4, 0xE9141340, 0x1B7F9043, 0xCFB5F4A8, 0x3DDE77AB, 0x2E8E845F, 0xDCE5075C, + 0x92A8FC17, 0x60C37F14, 0x73938CE0, 0x81F80FE3, 0x55326B08, 0xA759E80B, 0xB4091BFF, 0x466298FC, + 0x1871A4D8, 0xEA1A27DB, 0xF94AD42F, 0x0B21572C, 0xDFEB33C7, 0x2D80B0C4, 0x3ED04330, 0xCCBBC033, + 0xA24BB5A6, 0x502036A5, 0x4370C551, 0xB11B4652, 0x65D122B9, 0x97BAA1BA, 0x84EA524E, 0x7681D14D, + 0x2892ED69, 0xDAF96E6A, 0xC9A99D9E, 0x3BC21E9D, 0xEF087A76, 0x1D63F975, 0x0E330A81, 0xFC588982, + 0xB21572C9, 0x407EF1CA, 0x532E023E, 0xA145813D, 0x758FE5D6, 0x87E466D5, 0x94B49521, 0x66DF1622, + 0x38CC2A06, 0xCAA7A905, 0xD9F75AF1, 0x2B9CD9F2, 0xFF56BD19, 0x0D3D3E1A, 0x1E6DCDEE, 0xEC064EED, + 0xC38D26C4, 0x31E6A5C7, 0x22B65633, 0xD0DDD530, 0x0417B1DB, 0xF67C32D8, 0xE52CC12C, 0x1747422F, + 0x49547E0B, 0xBB3FFD08, 0xA86F0EFC, 0x5A048DFF, 0x8ECEE914, 0x7CA56A17, 0x6FF599E3, 0x9D9E1AE0, + 0xD3D3E1AB, 0x21B862A8, 0x32E8915C, 0xC083125F, 0x144976B4, 0xE622F5B7, 0xF5720643, 0x07198540, + 0x590AB964, 0xAB613A67, 0xB831C993, 0x4A5A4A90, 0x9E902E7B, 0x6CFBAD78, 0x7FAB5E8C, 0x8DC0DD8F, + 0xE330A81A, 0x115B2B19, 0x020BD8ED, 0xF0605BEE, 0x24AA3F05, 0xD6C1BC06, 0xC5914FF2, 0x37FACCF1, + 0x69E9F0D5, 0x9B8273D6, 0x88D28022, 0x7AB90321, 0xAE7367CA, 0x5C18E4C9, 0x4F48173D, 0xBD23943E, + 0xF36E6F75, 0x0105EC76, 0x12551F82, 0xE03E9C81, 0x34F4F86A, 0xC69F7B69, 0xD5CF889D, 0x27A40B9E, + 0x79B737BA, 0x8BDCB4B9, 0x988C474D, 0x6AE7C44E, 0xBE2DA0A5, 0x4C4623A6, 0x5F16D052, 0xAD7D5351, +]; +var lookupTable = (0, util_1.uint32ArrayFrom)(a_lookupTable); +var aws_crc32c_1 = require("./aws_crc32c"); +Object.defineProperty(exports, "AwsCrc32c", { enumerable: true, get: function () { return aws_crc32c_1.AwsCrc32c; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/crc32c/build/main/index.js.map b/node_modules/@aws-crypto/crc32c/build/main/index.js.map new file mode 100644 index 00000000..2025d272 --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/build/main/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;;AAEtC,yCAAiD;AAEjD,SAAgB,MAAM,CAAC,IAAgB;IACrC,OAAO,IAAI,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,MAAM,EAAE,CAAC;AAC5C,CAAC;AAFD,wBAEC;AAED;IAAA;QACU,aAAQ,GAAG,UAAU,CAAC;IAchC,CAAC;IAZC,uBAAM,GAAN,UAAO,IAAgB;;;YACrB,KAAmB,IAAA,SAAA,iBAAA,IAAI,CAAA,0BAAA,4CAAE;gBAApB,IAAM,IAAI,iBAAA;gBACb,IAAI,CAAC,QAAQ;oBACX,CAAC,IAAI,CAAC,QAAQ,KAAK,CAAC,CAAC,GAAG,WAAW,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC;aACtE;;;;;;;;;QAED,OAAO,IAAI,CAAC;IACd,CAAC;IAED,uBAAM,GAAN;QACE,OAAO,CAAC,IAAI,CAAC,QAAQ,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;IAC5C,CAAC;IACH,aAAC;AAAD,CAAC,AAfD,IAeC;AAfY,wBAAM;AAiBnB,kBAAkB;AAClB,IAAM,aAAa,GAAG;IACpB,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;CAC/F,CAAC;AAEF,IAAM,WAAW,GAAgB,IAAA,sBAAe,EAAC,aAAa,CAAC,CAAA;AAC/D,2CAAyC;AAAhC,uGAAA,SAAS,OAAA"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/crc32c/build/module/aws_crc32c.d.ts b/node_modules/@aws-crypto/crc32c/build/module/aws_crc32c.d.ts new file mode 100644 index 00000000..3e28b90e --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/build/module/aws_crc32c.d.ts @@ -0,0 +1,7 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class AwsCrc32c implements Checksum { + private crc32c; + update(toHash: SourceData): void; + digest(): Promise; + reset(): void; +} diff --git a/node_modules/@aws-crypto/crc32c/build/module/aws_crc32c.js b/node_modules/@aws-crypto/crc32c/build/module/aws_crc32c.js new file mode 100644 index 00000000..00b97790 --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/build/module/aws_crc32c.js @@ -0,0 +1,28 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { __awaiter, __generator } from "tslib"; +import { convertToBuffer, isEmptyData, numToUint8 } from "@aws-crypto/util"; +import { Crc32c } from "./index"; +var AwsCrc32c = /** @class */ (function () { + function AwsCrc32c() { + this.crc32c = new Crc32c(); + } + AwsCrc32c.prototype.update = function (toHash) { + if (isEmptyData(toHash)) + return; + this.crc32c.update(convertToBuffer(toHash)); + }; + AwsCrc32c.prototype.digest = function () { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + return [2 /*return*/, numToUint8(this.crc32c.digest())]; + }); + }); + }; + AwsCrc32c.prototype.reset = function () { + this.crc32c = new Crc32c(); + }; + return AwsCrc32c; +}()); +export { AwsCrc32c }; +//# sourceMappingURL=aws_crc32c.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/crc32c/build/module/aws_crc32c.js.map b/node_modules/@aws-crypto/crc32c/build/module/aws_crc32c.js.map new file mode 100644 index 00000000..6959f355 --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/build/module/aws_crc32c.js.map @@ -0,0 +1 @@ +{"version":3,"file":"aws_crc32c.js","sourceRoot":"","sources":["../../src/aws_crc32c.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;;AAGtC,OAAO,EAAE,eAAe,EAAE,WAAW,EAAE,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAC5E,OAAO,EAAE,MAAM,EAAE,MAAM,SAAS,CAAC;AAEjC;IAAA;QACU,WAAM,GAAG,IAAI,MAAM,EAAE,CAAC;IAehC,CAAC;IAbC,0BAAM,GAAN,UAAO,MAAkB;QACvB,IAAI,WAAW,CAAC,MAAM,CAAC;YAAE,OAAO;QAEhC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,CAAC;IAC9C,CAAC;IAEK,0BAAM,GAAZ;;;gBACE,sBAAO,UAAU,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,EAAC;;;KACzC;IAED,yBAAK,GAAL;QACE,IAAI,CAAC,MAAM,GAAG,IAAI,MAAM,EAAE,CAAC;IAC7B,CAAC;IACH,gBAAC;AAAD,CAAC,AAhBD,IAgBC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/crc32c/build/module/index.d.ts b/node_modules/@aws-crypto/crc32c/build/module/index.d.ts new file mode 100644 index 00000000..eba3d9f6 --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/build/module/index.d.ts @@ -0,0 +1,7 @@ +export declare function crc32c(data: Uint8Array): number; +export declare class Crc32c { + private checksum; + update(data: Uint8Array): this; + digest(): number; +} +export { AwsCrc32c } from "./aws_crc32c"; diff --git a/node_modules/@aws-crypto/crc32c/build/module/index.js b/node_modules/@aws-crypto/crc32c/build/module/index.js new file mode 100644 index 00000000..609aa977 --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/build/module/index.js @@ -0,0 +1,73 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { __values } from "tslib"; +import { uint32ArrayFrom } from "@aws-crypto/util"; +export function crc32c(data) { + return new Crc32c().update(data).digest(); +} +var Crc32c = /** @class */ (function () { + function Crc32c() { + this.checksum = 0xffffffff; + } + Crc32c.prototype.update = function (data) { + var e_1, _a; + try { + for (var data_1 = __values(data), data_1_1 = data_1.next(); !data_1_1.done; data_1_1 = data_1.next()) { + var byte = data_1_1.value; + this.checksum = + (this.checksum >>> 8) ^ lookupTable[(this.checksum ^ byte) & 0xff]; + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (data_1_1 && !data_1_1.done && (_a = data_1.return)) _a.call(data_1); + } + finally { if (e_1) throw e_1.error; } + } + return this; + }; + Crc32c.prototype.digest = function () { + return (this.checksum ^ 0xffffffff) >>> 0; + }; + return Crc32c; +}()); +export { Crc32c }; +// prettier-ignore +var a_lookupTable = [ + 0x00000000, 0xF26B8303, 0xE13B70F7, 0x1350F3F4, 0xC79A971F, 0x35F1141C, 0x26A1E7E8, 0xD4CA64EB, + 0x8AD958CF, 0x78B2DBCC, 0x6BE22838, 0x9989AB3B, 0x4D43CFD0, 0xBF284CD3, 0xAC78BF27, 0x5E133C24, + 0x105EC76F, 0xE235446C, 0xF165B798, 0x030E349B, 0xD7C45070, 0x25AFD373, 0x36FF2087, 0xC494A384, + 0x9A879FA0, 0x68EC1CA3, 0x7BBCEF57, 0x89D76C54, 0x5D1D08BF, 0xAF768BBC, 0xBC267848, 0x4E4DFB4B, + 0x20BD8EDE, 0xD2D60DDD, 0xC186FE29, 0x33ED7D2A, 0xE72719C1, 0x154C9AC2, 0x061C6936, 0xF477EA35, + 0xAA64D611, 0x580F5512, 0x4B5FA6E6, 0xB93425E5, 0x6DFE410E, 0x9F95C20D, 0x8CC531F9, 0x7EAEB2FA, + 0x30E349B1, 0xC288CAB2, 0xD1D83946, 0x23B3BA45, 0xF779DEAE, 0x05125DAD, 0x1642AE59, 0xE4292D5A, + 0xBA3A117E, 0x4851927D, 0x5B016189, 0xA96AE28A, 0x7DA08661, 0x8FCB0562, 0x9C9BF696, 0x6EF07595, + 0x417B1DBC, 0xB3109EBF, 0xA0406D4B, 0x522BEE48, 0x86E18AA3, 0x748A09A0, 0x67DAFA54, 0x95B17957, + 0xCBA24573, 0x39C9C670, 0x2A993584, 0xD8F2B687, 0x0C38D26C, 0xFE53516F, 0xED03A29B, 0x1F682198, + 0x5125DAD3, 0xA34E59D0, 0xB01EAA24, 0x42752927, 0x96BF4DCC, 0x64D4CECF, 0x77843D3B, 0x85EFBE38, + 0xDBFC821C, 0x2997011F, 0x3AC7F2EB, 0xC8AC71E8, 0x1C661503, 0xEE0D9600, 0xFD5D65F4, 0x0F36E6F7, + 0x61C69362, 0x93AD1061, 0x80FDE395, 0x72966096, 0xA65C047D, 0x5437877E, 0x4767748A, 0xB50CF789, + 0xEB1FCBAD, 0x197448AE, 0x0A24BB5A, 0xF84F3859, 0x2C855CB2, 0xDEEEDFB1, 0xCDBE2C45, 0x3FD5AF46, + 0x7198540D, 0x83F3D70E, 0x90A324FA, 0x62C8A7F9, 0xB602C312, 0x44694011, 0x5739B3E5, 0xA55230E6, + 0xFB410CC2, 0x092A8FC1, 0x1A7A7C35, 0xE811FF36, 0x3CDB9BDD, 0xCEB018DE, 0xDDE0EB2A, 0x2F8B6829, + 0x82F63B78, 0x709DB87B, 0x63CD4B8F, 0x91A6C88C, 0x456CAC67, 0xB7072F64, 0xA457DC90, 0x563C5F93, + 0x082F63B7, 0xFA44E0B4, 0xE9141340, 0x1B7F9043, 0xCFB5F4A8, 0x3DDE77AB, 0x2E8E845F, 0xDCE5075C, + 0x92A8FC17, 0x60C37F14, 0x73938CE0, 0x81F80FE3, 0x55326B08, 0xA759E80B, 0xB4091BFF, 0x466298FC, + 0x1871A4D8, 0xEA1A27DB, 0xF94AD42F, 0x0B21572C, 0xDFEB33C7, 0x2D80B0C4, 0x3ED04330, 0xCCBBC033, + 0xA24BB5A6, 0x502036A5, 0x4370C551, 0xB11B4652, 0x65D122B9, 0x97BAA1BA, 0x84EA524E, 0x7681D14D, + 0x2892ED69, 0xDAF96E6A, 0xC9A99D9E, 0x3BC21E9D, 0xEF087A76, 0x1D63F975, 0x0E330A81, 0xFC588982, + 0xB21572C9, 0x407EF1CA, 0x532E023E, 0xA145813D, 0x758FE5D6, 0x87E466D5, 0x94B49521, 0x66DF1622, + 0x38CC2A06, 0xCAA7A905, 0xD9F75AF1, 0x2B9CD9F2, 0xFF56BD19, 0x0D3D3E1A, 0x1E6DCDEE, 0xEC064EED, + 0xC38D26C4, 0x31E6A5C7, 0x22B65633, 0xD0DDD530, 0x0417B1DB, 0xF67C32D8, 0xE52CC12C, 0x1747422F, + 0x49547E0B, 0xBB3FFD08, 0xA86F0EFC, 0x5A048DFF, 0x8ECEE914, 0x7CA56A17, 0x6FF599E3, 0x9D9E1AE0, + 0xD3D3E1AB, 0x21B862A8, 0x32E8915C, 0xC083125F, 0x144976B4, 0xE622F5B7, 0xF5720643, 0x07198540, + 0x590AB964, 0xAB613A67, 0xB831C993, 0x4A5A4A90, 0x9E902E7B, 0x6CFBAD78, 0x7FAB5E8C, 0x8DC0DD8F, + 0xE330A81A, 0x115B2B19, 0x020BD8ED, 0xF0605BEE, 0x24AA3F05, 0xD6C1BC06, 0xC5914FF2, 0x37FACCF1, + 0x69E9F0D5, 0x9B8273D6, 0x88D28022, 0x7AB90321, 0xAE7367CA, 0x5C18E4C9, 0x4F48173D, 0xBD23943E, + 0xF36E6F75, 0x0105EC76, 0x12551F82, 0xE03E9C81, 0x34F4F86A, 0xC69F7B69, 0xD5CF889D, 0x27A40B9E, + 0x79B737BA, 0x8BDCB4B9, 0x988C474D, 0x6AE7C44E, 0xBE2DA0A5, 0x4C4623A6, 0x5F16D052, 0xAD7D5351, +]; +var lookupTable = uint32ArrayFrom(a_lookupTable); +export { AwsCrc32c } from "./aws_crc32c"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/crc32c/build/module/index.js.map b/node_modules/@aws-crypto/crc32c/build/module/index.js.map new file mode 100644 index 00000000..4a69957a --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/build/module/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;;AAEtC,OAAO,EAAC,eAAe,EAAC,MAAM,kBAAkB,CAAC;AAEjD,MAAM,UAAU,MAAM,CAAC,IAAgB;IACrC,OAAO,IAAI,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,MAAM,EAAE,CAAC;AAC5C,CAAC;AAED;IAAA;QACU,aAAQ,GAAG,UAAU,CAAC;IAchC,CAAC;IAZC,uBAAM,GAAN,UAAO,IAAgB;;;YACrB,KAAmB,IAAA,SAAA,SAAA,IAAI,CAAA,0BAAA,4CAAE;gBAApB,IAAM,IAAI,iBAAA;gBACb,IAAI,CAAC,QAAQ;oBACX,CAAC,IAAI,CAAC,QAAQ,KAAK,CAAC,CAAC,GAAG,WAAW,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC;aACtE;;;;;;;;;QAED,OAAO,IAAI,CAAC;IACd,CAAC;IAED,uBAAM,GAAN;QACE,OAAO,CAAC,IAAI,CAAC,QAAQ,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;IAC5C,CAAC;IACH,aAAC;AAAD,CAAC,AAfD,IAeC;;AAED,kBAAkB;AAClB,IAAM,aAAa,GAAG;IACpB,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;IAC9F,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU;CAC/F,CAAC;AAEF,IAAM,WAAW,GAAgB,eAAe,CAAC,aAAa,CAAC,CAAA;AAC/D,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/crc32c/package.json b/node_modules/@aws-crypto/crc32c/package.json new file mode 100644 index 00000000..1862d9ef --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/package.json @@ -0,0 +1,32 @@ +{ + "name": "@aws-crypto/crc32c", + "version": "5.2.0", + "scripts": { + "prepublishOnly": "tsc -p tsconfig.json && tsc -p tsconfig.module.json", + "pretest": "tsc -p tsconfig.test.json", + "test": "mocha --require ts-node/register test/**/*test.ts" + }, + "main": "./build/main/index.js", + "module": "./build/module/index.js", + "types": "./build/main/index.d.ts", + "repository": { + "type": "git", + "url": "git@github.com:aws/aws-sdk-js-crypto-helpers.git" + }, + "author": { + "name": "AWS Crypto Tools Team", + "email": "aws-cryptools@amazon.com", + "url": "https://docs.aws.amazon.com/aws-crypto-tools/index.html?id=docs_gateway#lang/en_us" + }, + "homepage": "https://github.com/aws/aws-sdk-js-crypto-helpers/tree/master/packages/crc32c", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + }, + "publishConfig": { + "access": "public" + }, + "gitHead": "c11b171b35ec5c093364f0e0d8dc4ab1af68e748" +} diff --git a/node_modules/@aws-crypto/crc32c/src/aws_crc32c.ts b/node_modules/@aws-crypto/crc32c/src/aws_crc32c.ts new file mode 100644 index 00000000..0108cb38 --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/src/aws_crc32c.ts @@ -0,0 +1,24 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { Checksum, SourceData } from "@aws-sdk/types"; +import { convertToBuffer, isEmptyData, numToUint8 } from "@aws-crypto/util"; +import { Crc32c } from "./index"; + +export class AwsCrc32c implements Checksum { + private crc32c = new Crc32c(); + + update(toHash: SourceData) { + if (isEmptyData(toHash)) return; + + this.crc32c.update(convertToBuffer(toHash)); + } + + async digest(): Promise { + return numToUint8(this.crc32c.digest()); + } + + reset(): void { + this.crc32c = new Crc32c(); + } +} diff --git a/node_modules/@aws-crypto/crc32c/src/index.ts b/node_modules/@aws-crypto/crc32c/src/index.ts new file mode 100644 index 00000000..83a7824e --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/src/index.ts @@ -0,0 +1,64 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import {uint32ArrayFrom} from "@aws-crypto/util"; + +export function crc32c(data: Uint8Array): number { + return new Crc32c().update(data).digest(); +} + +export class Crc32c { + private checksum = 0xffffffff; + + update(data: Uint8Array): this { + for (const byte of data) { + this.checksum = + (this.checksum >>> 8) ^ lookupTable[(this.checksum ^ byte) & 0xff]; + } + + return this; + } + + digest(): number { + return (this.checksum ^ 0xffffffff) >>> 0; + } +} + +// prettier-ignore +const a_lookupTable = [ + 0x00000000, 0xF26B8303, 0xE13B70F7, 0x1350F3F4, 0xC79A971F, 0x35F1141C, 0x26A1E7E8, 0xD4CA64EB, + 0x8AD958CF, 0x78B2DBCC, 0x6BE22838, 0x9989AB3B, 0x4D43CFD0, 0xBF284CD3, 0xAC78BF27, 0x5E133C24, + 0x105EC76F, 0xE235446C, 0xF165B798, 0x030E349B, 0xD7C45070, 0x25AFD373, 0x36FF2087, 0xC494A384, + 0x9A879FA0, 0x68EC1CA3, 0x7BBCEF57, 0x89D76C54, 0x5D1D08BF, 0xAF768BBC, 0xBC267848, 0x4E4DFB4B, + 0x20BD8EDE, 0xD2D60DDD, 0xC186FE29, 0x33ED7D2A, 0xE72719C1, 0x154C9AC2, 0x061C6936, 0xF477EA35, + 0xAA64D611, 0x580F5512, 0x4B5FA6E6, 0xB93425E5, 0x6DFE410E, 0x9F95C20D, 0x8CC531F9, 0x7EAEB2FA, + 0x30E349B1, 0xC288CAB2, 0xD1D83946, 0x23B3BA45, 0xF779DEAE, 0x05125DAD, 0x1642AE59, 0xE4292D5A, + 0xBA3A117E, 0x4851927D, 0x5B016189, 0xA96AE28A, 0x7DA08661, 0x8FCB0562, 0x9C9BF696, 0x6EF07595, + 0x417B1DBC, 0xB3109EBF, 0xA0406D4B, 0x522BEE48, 0x86E18AA3, 0x748A09A0, 0x67DAFA54, 0x95B17957, + 0xCBA24573, 0x39C9C670, 0x2A993584, 0xD8F2B687, 0x0C38D26C, 0xFE53516F, 0xED03A29B, 0x1F682198, + 0x5125DAD3, 0xA34E59D0, 0xB01EAA24, 0x42752927, 0x96BF4DCC, 0x64D4CECF, 0x77843D3B, 0x85EFBE38, + 0xDBFC821C, 0x2997011F, 0x3AC7F2EB, 0xC8AC71E8, 0x1C661503, 0xEE0D9600, 0xFD5D65F4, 0x0F36E6F7, + 0x61C69362, 0x93AD1061, 0x80FDE395, 0x72966096, 0xA65C047D, 0x5437877E, 0x4767748A, 0xB50CF789, + 0xEB1FCBAD, 0x197448AE, 0x0A24BB5A, 0xF84F3859, 0x2C855CB2, 0xDEEEDFB1, 0xCDBE2C45, 0x3FD5AF46, + 0x7198540D, 0x83F3D70E, 0x90A324FA, 0x62C8A7F9, 0xB602C312, 0x44694011, 0x5739B3E5, 0xA55230E6, + 0xFB410CC2, 0x092A8FC1, 0x1A7A7C35, 0xE811FF36, 0x3CDB9BDD, 0xCEB018DE, 0xDDE0EB2A, 0x2F8B6829, + 0x82F63B78, 0x709DB87B, 0x63CD4B8F, 0x91A6C88C, 0x456CAC67, 0xB7072F64, 0xA457DC90, 0x563C5F93, + 0x082F63B7, 0xFA44E0B4, 0xE9141340, 0x1B7F9043, 0xCFB5F4A8, 0x3DDE77AB, 0x2E8E845F, 0xDCE5075C, + 0x92A8FC17, 0x60C37F14, 0x73938CE0, 0x81F80FE3, 0x55326B08, 0xA759E80B, 0xB4091BFF, 0x466298FC, + 0x1871A4D8, 0xEA1A27DB, 0xF94AD42F, 0x0B21572C, 0xDFEB33C7, 0x2D80B0C4, 0x3ED04330, 0xCCBBC033, + 0xA24BB5A6, 0x502036A5, 0x4370C551, 0xB11B4652, 0x65D122B9, 0x97BAA1BA, 0x84EA524E, 0x7681D14D, + 0x2892ED69, 0xDAF96E6A, 0xC9A99D9E, 0x3BC21E9D, 0xEF087A76, 0x1D63F975, 0x0E330A81, 0xFC588982, + 0xB21572C9, 0x407EF1CA, 0x532E023E, 0xA145813D, 0x758FE5D6, 0x87E466D5, 0x94B49521, 0x66DF1622, + 0x38CC2A06, 0xCAA7A905, 0xD9F75AF1, 0x2B9CD9F2, 0xFF56BD19, 0x0D3D3E1A, 0x1E6DCDEE, 0xEC064EED, + 0xC38D26C4, 0x31E6A5C7, 0x22B65633, 0xD0DDD530, 0x0417B1DB, 0xF67C32D8, 0xE52CC12C, 0x1747422F, + 0x49547E0B, 0xBB3FFD08, 0xA86F0EFC, 0x5A048DFF, 0x8ECEE914, 0x7CA56A17, 0x6FF599E3, 0x9D9E1AE0, + 0xD3D3E1AB, 0x21B862A8, 0x32E8915C, 0xC083125F, 0x144976B4, 0xE622F5B7, 0xF5720643, 0x07198540, + 0x590AB964, 0xAB613A67, 0xB831C993, 0x4A5A4A90, 0x9E902E7B, 0x6CFBAD78, 0x7FAB5E8C, 0x8DC0DD8F, + 0xE330A81A, 0x115B2B19, 0x020BD8ED, 0xF0605BEE, 0x24AA3F05, 0xD6C1BC06, 0xC5914FF2, 0x37FACCF1, + 0x69E9F0D5, 0x9B8273D6, 0x88D28022, 0x7AB90321, 0xAE7367CA, 0x5C18E4C9, 0x4F48173D, 0xBD23943E, + 0xF36E6F75, 0x0105EC76, 0x12551F82, 0xE03E9C81, 0x34F4F86A, 0xC69F7B69, 0xD5CF889D, 0x27A40B9E, + 0x79B737BA, 0x8BDCB4B9, 0x988C474D, 0x6AE7C44E, 0xBE2DA0A5, 0x4C4623A6, 0x5F16D052, 0xAD7D5351, +]; + +const lookupTable: Uint32Array = uint32ArrayFrom(a_lookupTable) +export { AwsCrc32c } from "./aws_crc32c"; diff --git a/node_modules/@aws-crypto/crc32c/tsconfig.json b/node_modules/@aws-crypto/crc32c/tsconfig.json new file mode 100644 index 00000000..2b996d08 --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "rootDir": "./src", + "outDir": "./build/main", + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules/**"] +} diff --git a/node_modules/@aws-crypto/crc32c/tsconfig.module.json b/node_modules/@aws-crypto/crc32c/tsconfig.module.json new file mode 100644 index 00000000..7d0cfddc --- /dev/null +++ b/node_modules/@aws-crypto/crc32c/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "outDir": "build/module", + "module": "esnext", + } +} diff --git a/node_modules/@aws-crypto/sha1-browser/CHANGELOG.md b/node_modules/@aws-crypto/sha1-browser/CHANGELOG.md new file mode 100644 index 00000000..fc58d4da --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/CHANGELOG.md @@ -0,0 +1,62 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.1.0...v5.2.0) (2023-10-16) + +### Features + +- support ESM artifacts in all packages ([#752](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/752)) ([e930ffb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e930ffba5cfef66dd242049e7d514ced232c1e3b)) + +# [5.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.0.0...v5.1.0) (2023-09-22) + +### Bug Fixes + +- Update tsc to 2.x ([#735](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/735)) ([782e0de](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/782e0de9f5fef41f694130580a69d940894b6b8c)) + +### Features + +- Use @smithy/util-utf8 ([#730](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/730)) ([00fb851](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/00fb851ca3559d5a1f370f9256814de1210826b8)), closes [#699](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/699) + +# [5.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v4.0.1...v5.0.0) (2023-07-13) + +- feat!: drop support for IE 11 (#629) ([6c49fb6](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6c49fb6c1b1f18bbff02dbd77a37a21bdb40c959)), closes [#629](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/629) + +### BREAKING CHANGES + +- Remove support for IE11 + +Co-authored-by: texastony <5892063+texastony@users.noreply.github.com> + +# [4.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v3.0.0...v4.0.0) (2023-02-20) + +**Note:** Version bump only for package @aws-crypto/sha1-browser + +# [3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.2...v3.0.0) (2023-01-12) + +- feat!: replace Hash implementations with Checksum interface (#492) ([da43dc0](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/da43dc0fdf669d9ebb5bfb1b1f7c79e46c4aaae1)), closes [#492](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/492) + +### BREAKING CHANGES + +- All classes that implemented `Hash` now implement `Checksum`. + +## [2.0.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.1...v2.0.2) (2022-09-07) + +### Bug Fixes + +- **#337:** update @aws-sdk/types ([#373](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/373)) ([b26a811](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/b26a811a392f5209c7ec7e57251500d4d78f97ff)), closes [#337](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/337) + +# [2.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.2...v2.0.0) (2021-10-25) + +**Note:** Version bump only for package @aws-crypto/sha1-browser + +# [1.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.1.1...v1.2.0) (2021-09-17) + +### Bug Fixes + +- Adding ie11-detection dependency to sha1-browser ([#213](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/213)) ([138750d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/138750d96385b8cc479b6f54c500ee1b5380648c)) + +### Features + +- Add SHA1 ([#208](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/208)) ([45c50ff](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/45c50ffa3acc9e3bf4039ab59a0102e4d40455ec)) diff --git a/node_modules/@aws-crypto/sha1-browser/LICENSE b/node_modules/@aws-crypto/sha1-browser/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-crypto/sha1-browser/README.md b/node_modules/@aws-crypto/sha1-browser/README.md new file mode 100644 index 00000000..e03244fe --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/README.md @@ -0,0 +1,21 @@ +# @aws-crypto/sha1-browser + +SHA1 wrapper for browsers that prefers `window.crypto.subtle`. + +SHA1 is **NOT** a cryptographically secure algorithm. +It should _only_ be used for non cryptographic functions like checksums. + +## Usage + +``` +import {Sha1} from '@aws-crypto/sha1-browser' + +const hash = new Sha1(); +hash.update('some data'); +const result = await hash.digest(); + +``` + +## Test + +`npm test` diff --git a/node_modules/@aws-crypto/sha1-browser/build/main/constants.d.ts b/node_modules/@aws-crypto/sha1-browser/build/main/constants.d.ts new file mode 100644 index 00000000..bc52bca4 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/main/constants.d.ts @@ -0,0 +1,10 @@ +export declare const SHA_1_HASH: { + name: "SHA-1"; +}; +export declare const SHA_1_HMAC_ALGO: { + name: "HMAC"; + hash: { + name: "SHA-1"; + }; +}; +export declare const EMPTY_DATA_SHA_1: Uint8Array; diff --git a/node_modules/@aws-crypto/sha1-browser/build/main/constants.js b/node_modules/@aws-crypto/sha1-browser/build/main/constants.js new file mode 100644 index 00000000..cd34041b --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/main/constants.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.EMPTY_DATA_SHA_1 = exports.SHA_1_HMAC_ALGO = exports.SHA_1_HASH = void 0; +exports.SHA_1_HASH = { name: "SHA-1" }; +exports.SHA_1_HMAC_ALGO = { + name: "HMAC", + hash: exports.SHA_1_HASH, +}; +exports.EMPTY_DATA_SHA_1 = new Uint8Array([ + 218, + 57, + 163, + 238, + 94, + 107, + 75, + 13, + 50, + 85, + 191, + 239, + 149, + 96, + 24, + 144, + 175, + 216, + 7, + 9, +]); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/build/main/constants.js.map b/node_modules/@aws-crypto/sha1-browser/build/main/constants.js.map new file mode 100644 index 00000000..9817c121 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/main/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":";;;AAAa,QAAA,UAAU,GAAsB,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC;AAElD,QAAA,eAAe,GAA8C;IACxE,IAAI,EAAE,MAAM;IACZ,IAAI,EAAE,kBAAU;CACjB,CAAC;AAEW,QAAA,gBAAgB,GAAG,IAAI,UAAU,CAAC;IAC7C,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,EAAE;IACF,EAAE;IACF,EAAE;IACF,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,CAAC;IACD,CAAC;CACF,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/build/main/crossPlatformSha1.d.ts b/node_modules/@aws-crypto/sha1-browser/build/main/crossPlatformSha1.d.ts new file mode 100644 index 00000000..0a16bc88 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/main/crossPlatformSha1.d.ts @@ -0,0 +1,8 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha1 implements Checksum { + private hash; + constructor(secret?: SourceData); + update(data: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + digest(): Promise; + reset(): void; +} diff --git a/node_modules/@aws-crypto/sha1-browser/build/main/crossPlatformSha1.js b/node_modules/@aws-crypto/sha1-browser/build/main/crossPlatformSha1.js new file mode 100644 index 00000000..2f37cedc --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/main/crossPlatformSha1.js @@ -0,0 +1,29 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Sha1 = void 0; +var webCryptoSha1_1 = require("./webCryptoSha1"); +var supports_web_crypto_1 = require("@aws-crypto/supports-web-crypto"); +var util_locate_window_1 = require("@aws-sdk/util-locate-window"); +var util_1 = require("@aws-crypto/util"); +var Sha1 = /** @class */ (function () { + function Sha1(secret) { + if ((0, supports_web_crypto_1.supportsWebCrypto)((0, util_locate_window_1.locateWindow)())) { + this.hash = new webCryptoSha1_1.Sha1(secret); + } + else { + throw new Error("SHA1 not supported"); + } + } + Sha1.prototype.update = function (data, encoding) { + this.hash.update((0, util_1.convertToBuffer)(data)); + }; + Sha1.prototype.digest = function () { + return this.hash.digest(); + }; + Sha1.prototype.reset = function () { + this.hash.reset(); + }; + return Sha1; +}()); +exports.Sha1 = Sha1; +//# sourceMappingURL=crossPlatformSha1.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/build/main/crossPlatformSha1.js.map b/node_modules/@aws-crypto/sha1-browser/build/main/crossPlatformSha1.js.map new file mode 100644 index 00000000..d686f6b0 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/main/crossPlatformSha1.js.map @@ -0,0 +1 @@ +{"version":3,"file":"crossPlatformSha1.js","sourceRoot":"","sources":["../../src/crossPlatformSha1.ts"],"names":[],"mappings":";;;AAAA,iDAAwD;AAExD,uEAAoE;AACpE,kEAA2D;AAC3D,yCAAmD;AAEnD;IAGE,cAAY,MAAmB;QAC7B,IAAI,IAAA,uCAAiB,EAAC,IAAA,iCAAY,GAAE,CAAC,EAAE;YACrC,IAAI,CAAC,IAAI,GAAG,IAAI,oBAAa,CAAC,MAAM,CAAC,CAAC;SACvC;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC,CAAC;SACvC;IACH,CAAC;IAED,qBAAM,GAAN,UAAO,IAAgB,EAAE,QAAsC;QAC7D,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAA,sBAAe,EAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC;IAED,qBAAM,GAAN;QACE,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;IAC5B,CAAC;IAED,oBAAK,GAAL;QACE,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC;IACpB,CAAC;IACH,WAAC;AAAD,CAAC,AAtBD,IAsBC;AAtBY,oBAAI"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/build/main/index.d.ts b/node_modules/@aws-crypto/sha1-browser/build/main/index.d.ts new file mode 100644 index 00000000..1b6072d8 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/main/index.d.ts @@ -0,0 +1,2 @@ +export * from "./crossPlatformSha1"; +export { Sha1 as WebCryptoSha1 } from "./webCryptoSha1"; diff --git a/node_modules/@aws-crypto/sha1-browser/build/main/index.js b/node_modules/@aws-crypto/sha1-browser/build/main/index.js new file mode 100644 index 00000000..0d445491 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/main/index.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.WebCryptoSha1 = void 0; +var tslib_1 = require("tslib"); +tslib_1.__exportStar(require("./crossPlatformSha1"), exports); +var webCryptoSha1_1 = require("./webCryptoSha1"); +Object.defineProperty(exports, "WebCryptoSha1", { enumerable: true, get: function () { return webCryptoSha1_1.Sha1; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/build/main/index.js.map b/node_modules/@aws-crypto/sha1-browser/build/main/index.js.map new file mode 100644 index 00000000..bca6f293 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/main/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;AAAA,8DAAoC;AACpC,iDAAwD;AAA/C,8GAAA,IAAI,OAAiB"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/build/main/isEmptyData.d.ts b/node_modules/@aws-crypto/sha1-browser/build/main/isEmptyData.d.ts new file mode 100644 index 00000000..43ae4a7c --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/main/isEmptyData.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function isEmptyData(data: SourceData): boolean; diff --git a/node_modules/@aws-crypto/sha1-browser/build/main/isEmptyData.js b/node_modules/@aws-crypto/sha1-browser/build/main/isEmptyData.js new file mode 100644 index 00000000..fe91548a --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/main/isEmptyData.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isEmptyData = void 0; +function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +exports.isEmptyData = isEmptyData; +//# sourceMappingURL=isEmptyData.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/build/main/isEmptyData.js.map b/node_modules/@aws-crypto/sha1-browser/build/main/isEmptyData.js.map new file mode 100644 index 00000000..20ccfd6a --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/main/isEmptyData.js.map @@ -0,0 +1 @@ +{"version":3,"file":"isEmptyData.js","sourceRoot":"","sources":["../../src/isEmptyData.ts"],"names":[],"mappings":";;;AAEA,SAAgB,WAAW,CAAC,IAAgB;IAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;KAC1B;IAED,OAAO,IAAI,CAAC,UAAU,KAAK,CAAC,CAAC;AAC/B,CAAC;AAND,kCAMC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/build/main/webCryptoSha1.d.ts b/node_modules/@aws-crypto/sha1-browser/build/main/webCryptoSha1.d.ts new file mode 100644 index 00000000..a9d1b038 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/main/webCryptoSha1.d.ts @@ -0,0 +1,9 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha1 implements Checksum { + private readonly key; + private toHash; + constructor(secret?: SourceData); + update(data: SourceData): void; + digest(): Promise; + reset(): void; +} diff --git a/node_modules/@aws-crypto/sha1-browser/build/main/webCryptoSha1.js b/node_modules/@aws-crypto/sha1-browser/build/main/webCryptoSha1.js new file mode 100644 index 00000000..2bd961de --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/main/webCryptoSha1.js @@ -0,0 +1,61 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Sha1 = void 0; +var util_utf8_1 = require("@smithy/util-utf8"); +var isEmptyData_1 = require("./isEmptyData"); +var constants_1 = require("./constants"); +var util_locate_window_1 = require("@aws-sdk/util-locate-window"); +var Sha1 = /** @class */ (function () { + function Sha1(secret) { + this.toHash = new Uint8Array(0); + if (secret !== void 0) { + this.key = new Promise(function (resolve, reject) { + (0, util_locate_window_1.locateWindow)() + .crypto.subtle.importKey("raw", convertToBuffer(secret), constants_1.SHA_1_HMAC_ALGO, false, ["sign"]) + .then(resolve, reject); + }); + this.key.catch(function () { }); + } + } + Sha1.prototype.update = function (data) { + if ((0, isEmptyData_1.isEmptyData)(data)) { + return; + } + var update = convertToBuffer(data); + var typedArray = new Uint8Array(this.toHash.byteLength + update.byteLength); + typedArray.set(this.toHash, 0); + typedArray.set(update, this.toHash.byteLength); + this.toHash = typedArray; + }; + Sha1.prototype.digest = function () { + var _this = this; + if (this.key) { + return this.key.then(function (key) { + return (0, util_locate_window_1.locateWindow)() + .crypto.subtle.sign(constants_1.SHA_1_HMAC_ALGO, key, _this.toHash) + .then(function (data) { return new Uint8Array(data); }); + }); + } + if ((0, isEmptyData_1.isEmptyData)(this.toHash)) { + return Promise.resolve(constants_1.EMPTY_DATA_SHA_1); + } + return Promise.resolve() + .then(function () { return (0, util_locate_window_1.locateWindow)().crypto.subtle.digest(constants_1.SHA_1_HASH, _this.toHash); }) + .then(function (data) { return Promise.resolve(new Uint8Array(data)); }); + }; + Sha1.prototype.reset = function () { + this.toHash = new Uint8Array(0); + }; + return Sha1; +}()); +exports.Sha1 = Sha1; +function convertToBuffer(data) { + if (typeof data === "string") { + return (0, util_utf8_1.fromUtf8)(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +} +//# sourceMappingURL=webCryptoSha1.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/build/main/webCryptoSha1.js.map b/node_modules/@aws-crypto/sha1-browser/build/main/webCryptoSha1.js.map new file mode 100644 index 00000000..c150a32a --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/main/webCryptoSha1.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webCryptoSha1.js","sourceRoot":"","sources":["../../src/webCryptoSha1.ts"],"names":[],"mappings":";;;AACA,+CAA6C;AAC7C,6CAA4C;AAC5C,yCAA4E;AAC5E,kEAA2D;AAE3D;IAIE,cAAY,MAAmB;QAFvB,WAAM,GAAe,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;QAG7C,IAAI,MAAM,KAAK,KAAK,CAAC,EAAE;YACrB,IAAI,CAAC,GAAG,GAAG,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;gBACrC,IAAA,iCAAY,GAAE;qBACX,MAAM,CAAC,MAAM,CAAC,SAAS,CACtB,KAAK,EACL,eAAe,CAAC,MAAM,CAAC,EACvB,2BAAe,EACf,KAAK,EACL,CAAC,MAAM,CAAC,CACT;qBACA,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;YAC3B,CAAC,CAAC,CAAC;YACH,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,cAAO,CAAC,CAAC,CAAC;SAC1B;IACH,CAAC;IAED,qBAAM,GAAN,UAAO,IAAgB;QACrB,IAAI,IAAA,yBAAW,EAAC,IAAI,CAAC,EAAE;YACrB,OAAO;SACR;QAED,IAAM,MAAM,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;QACrC,IAAM,UAAU,GAAG,IAAI,UAAU,CAC/B,IAAI,CAAC,MAAM,CAAC,UAAU,GAAG,MAAM,CAAC,UAAU,CAC3C,CAAC;QACF,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;QAC/B,UAAU,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC;QAC/C,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC;IAC3B,CAAC;IAED,qBAAM,GAAN;QAAA,iBAgBC;QAfC,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,OAAO,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,UAAC,GAAG;gBACvB,OAAA,IAAA,iCAAY,GAAE;qBACX,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,2BAAe,EAAE,GAAG,EAAE,KAAI,CAAC,MAAM,CAAC;qBACrD,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,UAAU,CAAC,IAAI,CAAC,EAApB,CAAoB,CAAC;YAFvC,CAEuC,CACxC,CAAC;SACH;QAED,IAAI,IAAA,yBAAW,EAAC,IAAI,CAAC,MAAM,CAAC,EAAE;YAC5B,OAAO,OAAO,CAAC,OAAO,CAAC,4BAAgB,CAAC,CAAC;SAC1C;QAED,OAAO,OAAO,CAAC,OAAO,EAAE;aACrB,IAAI,CAAC,cAAM,OAAA,IAAA,iCAAY,GAAE,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,sBAAU,EAAE,KAAI,CAAC,MAAM,CAAC,EAA5D,CAA4D,CAAC;aACxE,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,OAAO,CAAC,OAAO,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC,EAArC,CAAqC,CAAC,CAAC;IAC3D,CAAC;IAED,oBAAK,GAAL;QACE,IAAI,CAAC,MAAM,GAAG,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;IAClC,CAAC;IACH,WAAC;AAAD,CAAC,AAxDD,IAwDC;AAxDY,oBAAI;AA0DjB,SAAS,eAAe,CAAC,IAAgB;IACvC,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,IAAA,oBAAQ,EAAC,IAAI,CAAC,CAAC;KACvB;IAED,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;QAC5B,OAAO,IAAI,UAAU,CACnB,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,UAAU,EACf,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC,iBAAiB,CAC/C,CAAC;KACH;IAED,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC;AAC9B,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/build/module/constants.d.ts b/node_modules/@aws-crypto/sha1-browser/build/module/constants.d.ts new file mode 100644 index 00000000..bc52bca4 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/module/constants.d.ts @@ -0,0 +1,10 @@ +export declare const SHA_1_HASH: { + name: "SHA-1"; +}; +export declare const SHA_1_HMAC_ALGO: { + name: "HMAC"; + hash: { + name: "SHA-1"; + }; +}; +export declare const EMPTY_DATA_SHA_1: Uint8Array; diff --git a/node_modules/@aws-crypto/sha1-browser/build/module/constants.js b/node_modules/@aws-crypto/sha1-browser/build/module/constants.js new file mode 100644 index 00000000..f4f9585b --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/module/constants.js @@ -0,0 +1,28 @@ +export var SHA_1_HASH = { name: "SHA-1" }; +export var SHA_1_HMAC_ALGO = { + name: "HMAC", + hash: SHA_1_HASH, +}; +export var EMPTY_DATA_SHA_1 = new Uint8Array([ + 218, + 57, + 163, + 238, + 94, + 107, + 75, + 13, + 50, + 85, + 191, + 239, + 149, + 96, + 24, + 144, + 175, + 216, + 7, + 9, +]); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/build/module/constants.js.map b/node_modules/@aws-crypto/sha1-browser/build/module/constants.js.map new file mode 100644 index 00000000..3d4d3e40 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/module/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,IAAM,UAAU,GAAsB,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC;AAE/D,MAAM,CAAC,IAAM,eAAe,GAA8C;IACxE,IAAI,EAAE,MAAM;IACZ,IAAI,EAAE,UAAU;CACjB,CAAC;AAEF,MAAM,CAAC,IAAM,gBAAgB,GAAG,IAAI,UAAU,CAAC;IAC7C,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,EAAE;IACF,EAAE;IACF,EAAE;IACF,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,CAAC;IACD,CAAC;CACF,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/build/module/crossPlatformSha1.d.ts b/node_modules/@aws-crypto/sha1-browser/build/module/crossPlatformSha1.d.ts new file mode 100644 index 00000000..0a16bc88 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/module/crossPlatformSha1.d.ts @@ -0,0 +1,8 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha1 implements Checksum { + private hash; + constructor(secret?: SourceData); + update(data: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + digest(): Promise; + reset(): void; +} diff --git a/node_modules/@aws-crypto/sha1-browser/build/module/crossPlatformSha1.js b/node_modules/@aws-crypto/sha1-browser/build/module/crossPlatformSha1.js new file mode 100644 index 00000000..0bf91c1c --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/module/crossPlatformSha1.js @@ -0,0 +1,26 @@ +import { Sha1 as WebCryptoSha1 } from "./webCryptoSha1"; +import { supportsWebCrypto } from "@aws-crypto/supports-web-crypto"; +import { locateWindow } from "@aws-sdk/util-locate-window"; +import { convertToBuffer } from "@aws-crypto/util"; +var Sha1 = /** @class */ (function () { + function Sha1(secret) { + if (supportsWebCrypto(locateWindow())) { + this.hash = new WebCryptoSha1(secret); + } + else { + throw new Error("SHA1 not supported"); + } + } + Sha1.prototype.update = function (data, encoding) { + this.hash.update(convertToBuffer(data)); + }; + Sha1.prototype.digest = function () { + return this.hash.digest(); + }; + Sha1.prototype.reset = function () { + this.hash.reset(); + }; + return Sha1; +}()); +export { Sha1 }; +//# sourceMappingURL=crossPlatformSha1.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/build/module/crossPlatformSha1.js.map b/node_modules/@aws-crypto/sha1-browser/build/module/crossPlatformSha1.js.map new file mode 100644 index 00000000..63fefec8 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/module/crossPlatformSha1.js.map @@ -0,0 +1 @@ +{"version":3,"file":"crossPlatformSha1.js","sourceRoot":"","sources":["../../src/crossPlatformSha1.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,IAAI,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAExD,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAC;AACpE,OAAO,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAC;AAC3D,OAAO,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAEnD;IAGE,cAAY,MAAmB;QAC7B,IAAI,iBAAiB,CAAC,YAAY,EAAE,CAAC,EAAE;YACrC,IAAI,CAAC,IAAI,GAAG,IAAI,aAAa,CAAC,MAAM,CAAC,CAAC;SACvC;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC,CAAC;SACvC;IACH,CAAC;IAED,qBAAM,GAAN,UAAO,IAAgB,EAAE,QAAsC;QAC7D,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC;IAED,qBAAM,GAAN;QACE,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;IAC5B,CAAC;IAED,oBAAK,GAAL;QACE,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC;IACpB,CAAC;IACH,WAAC;AAAD,CAAC,AAtBD,IAsBC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/build/module/index.d.ts b/node_modules/@aws-crypto/sha1-browser/build/module/index.d.ts new file mode 100644 index 00000000..1b6072d8 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/module/index.d.ts @@ -0,0 +1,2 @@ +export * from "./crossPlatformSha1"; +export { Sha1 as WebCryptoSha1 } from "./webCryptoSha1"; diff --git a/node_modules/@aws-crypto/sha1-browser/build/module/index.js b/node_modules/@aws-crypto/sha1-browser/build/module/index.js new file mode 100644 index 00000000..c6ec26dd --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/module/index.js @@ -0,0 +1,3 @@ +export * from "./crossPlatformSha1"; +export { Sha1 as WebCryptoSha1 } from "./webCryptoSha1"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/build/module/index.js.map b/node_modules/@aws-crypto/sha1-browser/build/module/index.js.map new file mode 100644 index 00000000..b012e259 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/module/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,qBAAqB,CAAC;AACpC,OAAO,EAAE,IAAI,IAAI,aAAa,EAAE,MAAM,iBAAiB,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/build/module/isEmptyData.d.ts b/node_modules/@aws-crypto/sha1-browser/build/module/isEmptyData.d.ts new file mode 100644 index 00000000..43ae4a7c --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/module/isEmptyData.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function isEmptyData(data: SourceData): boolean; diff --git a/node_modules/@aws-crypto/sha1-browser/build/module/isEmptyData.js b/node_modules/@aws-crypto/sha1-browser/build/module/isEmptyData.js new file mode 100644 index 00000000..4f31a618 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/module/isEmptyData.js @@ -0,0 +1,7 @@ +export function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +//# sourceMappingURL=isEmptyData.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/build/module/isEmptyData.js.map b/node_modules/@aws-crypto/sha1-browser/build/module/isEmptyData.js.map new file mode 100644 index 00000000..776ce2b9 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/module/isEmptyData.js.map @@ -0,0 +1 @@ +{"version":3,"file":"isEmptyData.js","sourceRoot":"","sources":["../../src/isEmptyData.ts"],"names":[],"mappings":"AAEA,MAAM,UAAU,WAAW,CAAC,IAAgB;IAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;KAC1B;IAED,OAAO,IAAI,CAAC,UAAU,KAAK,CAAC,CAAC;AAC/B,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/build/module/webCryptoSha1.d.ts b/node_modules/@aws-crypto/sha1-browser/build/module/webCryptoSha1.d.ts new file mode 100644 index 00000000..a9d1b038 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/module/webCryptoSha1.d.ts @@ -0,0 +1,9 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha1 implements Checksum { + private readonly key; + private toHash; + constructor(secret?: SourceData); + update(data: SourceData): void; + digest(): Promise; + reset(): void; +} diff --git a/node_modules/@aws-crypto/sha1-browser/build/module/webCryptoSha1.js b/node_modules/@aws-crypto/sha1-browser/build/module/webCryptoSha1.js new file mode 100644 index 00000000..58b4dbb9 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/module/webCryptoSha1.js @@ -0,0 +1,58 @@ +import { fromUtf8 } from "@smithy/util-utf8"; +import { isEmptyData } from "./isEmptyData"; +import { EMPTY_DATA_SHA_1, SHA_1_HASH, SHA_1_HMAC_ALGO } from "./constants"; +import { locateWindow } from "@aws-sdk/util-locate-window"; +var Sha1 = /** @class */ (function () { + function Sha1(secret) { + this.toHash = new Uint8Array(0); + if (secret !== void 0) { + this.key = new Promise(function (resolve, reject) { + locateWindow() + .crypto.subtle.importKey("raw", convertToBuffer(secret), SHA_1_HMAC_ALGO, false, ["sign"]) + .then(resolve, reject); + }); + this.key.catch(function () { }); + } + } + Sha1.prototype.update = function (data) { + if (isEmptyData(data)) { + return; + } + var update = convertToBuffer(data); + var typedArray = new Uint8Array(this.toHash.byteLength + update.byteLength); + typedArray.set(this.toHash, 0); + typedArray.set(update, this.toHash.byteLength); + this.toHash = typedArray; + }; + Sha1.prototype.digest = function () { + var _this = this; + if (this.key) { + return this.key.then(function (key) { + return locateWindow() + .crypto.subtle.sign(SHA_1_HMAC_ALGO, key, _this.toHash) + .then(function (data) { return new Uint8Array(data); }); + }); + } + if (isEmptyData(this.toHash)) { + return Promise.resolve(EMPTY_DATA_SHA_1); + } + return Promise.resolve() + .then(function () { return locateWindow().crypto.subtle.digest(SHA_1_HASH, _this.toHash); }) + .then(function (data) { return Promise.resolve(new Uint8Array(data)); }); + }; + Sha1.prototype.reset = function () { + this.toHash = new Uint8Array(0); + }; + return Sha1; +}()); +export { Sha1 }; +function convertToBuffer(data) { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +} +//# sourceMappingURL=webCryptoSha1.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/build/module/webCryptoSha1.js.map b/node_modules/@aws-crypto/sha1-browser/build/module/webCryptoSha1.js.map new file mode 100644 index 00000000..fc3861fa --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/build/module/webCryptoSha1.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webCryptoSha1.js","sourceRoot":"","sources":["../../src/webCryptoSha1.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAC7C,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,gBAAgB,EAAE,UAAU,EAAE,eAAe,EAAE,MAAM,aAAa,CAAC;AAC5E,OAAO,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAC;AAE3D;IAIE,cAAY,MAAmB;QAFvB,WAAM,GAAe,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;QAG7C,IAAI,MAAM,KAAK,KAAK,CAAC,EAAE;YACrB,IAAI,CAAC,GAAG,GAAG,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;gBACrC,YAAY,EAAE;qBACX,MAAM,CAAC,MAAM,CAAC,SAAS,CACtB,KAAK,EACL,eAAe,CAAC,MAAM,CAAC,EACvB,eAAe,EACf,KAAK,EACL,CAAC,MAAM,CAAC,CACT;qBACA,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;YAC3B,CAAC,CAAC,CAAC;YACH,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,cAAO,CAAC,CAAC,CAAC;SAC1B;IACH,CAAC;IAED,qBAAM,GAAN,UAAO,IAAgB;QACrB,IAAI,WAAW,CAAC,IAAI,CAAC,EAAE;YACrB,OAAO;SACR;QAED,IAAM,MAAM,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;QACrC,IAAM,UAAU,GAAG,IAAI,UAAU,CAC/B,IAAI,CAAC,MAAM,CAAC,UAAU,GAAG,MAAM,CAAC,UAAU,CAC3C,CAAC;QACF,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;QAC/B,UAAU,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC;QAC/C,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC;IAC3B,CAAC;IAED,qBAAM,GAAN;QAAA,iBAgBC;QAfC,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,OAAO,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,UAAC,GAAG;gBACvB,OAAA,YAAY,EAAE;qBACX,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,eAAe,EAAE,GAAG,EAAE,KAAI,CAAC,MAAM,CAAC;qBACrD,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,UAAU,CAAC,IAAI,CAAC,EAApB,CAAoB,CAAC;YAFvC,CAEuC,CACxC,CAAC;SACH;QAED,IAAI,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;YAC5B,OAAO,OAAO,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAC;SAC1C;QAED,OAAO,OAAO,CAAC,OAAO,EAAE;aACrB,IAAI,CAAC,cAAM,OAAA,YAAY,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE,KAAI,CAAC,MAAM,CAAC,EAA5D,CAA4D,CAAC;aACxE,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,OAAO,CAAC,OAAO,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC,EAArC,CAAqC,CAAC,CAAC;IAC3D,CAAC;IAED,oBAAK,GAAL;QACE,IAAI,CAAC,MAAM,GAAG,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;IAClC,CAAC;IACH,WAAC;AAAD,CAAC,AAxDD,IAwDC;;AAED,SAAS,eAAe,CAAC,IAAgB;IACvC,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAC;KACvB;IAED,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;QAC5B,OAAO,IAAI,UAAU,CACnB,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,UAAU,EACf,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC,iBAAiB,CAC/C,CAAC;KACH;IAED,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC;AAC9B,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/LICENSE b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/README.md b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/README.md new file mode 100644 index 00000000..31853f24 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/README.md @@ -0,0 +1,10 @@ +# @smithy/is-array-buffer + +[![NPM version](https://img.shields.io/npm/v/@smithy/is-array-buffer/latest.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/is-array-buffer.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/dist-cjs/index.js b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/dist-cjs/index.js new file mode 100644 index 00000000..5d792e71 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/dist-cjs/index.js @@ -0,0 +1,32 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isArrayBuffer: () => isArrayBuffer +}); +module.exports = __toCommonJS(src_exports); +var isArrayBuffer = /* @__PURE__ */ __name((arg) => typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer || Object.prototype.toString.call(arg) === "[object ArrayBuffer]", "isArrayBuffer"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + isArrayBuffer +}); + diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/dist-es/index.js b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/dist-es/index.js new file mode 100644 index 00000000..8096cca3 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/dist-es/index.js @@ -0,0 +1,2 @@ +export const isArrayBuffer = (arg) => (typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer) || + Object.prototype.toString.call(arg) === "[object ArrayBuffer]"; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts new file mode 100644 index 00000000..64f452e7 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..ca8fd6bd --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/package.json b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/package.json new file mode 100644 index 00000000..ed8affc7 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/is-array-buffer", + "version": "2.2.0", + "description": "Provides a function for detecting if an argument is an ArrayBuffer", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline is-array-buffer", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/is-array-buffer", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/is-array-buffer" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/LICENSE b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/README.md b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/README.md new file mode 100644 index 00000000..c896b04a --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/README.md @@ -0,0 +1,10 @@ +# @smithy/util-buffer-from + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-buffer-from/latest.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-buffer-from.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/dist-cjs/index.js b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/dist-cjs/index.js new file mode 100644 index 00000000..c6738d94 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/dist-cjs/index.js @@ -0,0 +1,47 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromArrayBuffer: () => fromArrayBuffer, + fromString: () => fromString +}); +module.exports = __toCommonJS(src_exports); +var import_is_array_buffer = require("@smithy/is-array-buffer"); +var import_buffer = require("buffer"); +var fromArrayBuffer = /* @__PURE__ */ __name((input, offset = 0, length = input.byteLength - offset) => { + if (!(0, import_is_array_buffer.isArrayBuffer)(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return import_buffer.Buffer.from(input, offset, length); +}, "fromArrayBuffer"); +var fromString = /* @__PURE__ */ __name((input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? import_buffer.Buffer.from(input, encoding) : import_buffer.Buffer.from(input); +}, "fromString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromArrayBuffer, + fromString +}); + diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/dist-es/index.js b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/dist-es/index.js new file mode 100644 index 00000000..718f8315 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/dist-es/index.js @@ -0,0 +1,14 @@ +import { isArrayBuffer } from "@smithy/is-array-buffer"; +import { Buffer } from "buffer"; +export const fromArrayBuffer = (input, offset = 0, length = input.byteLength - offset) => { + if (!isArrayBuffer(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return Buffer.from(input, offset, length); +}; +export const fromString = (input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? Buffer.from(input, encoding) : Buffer.from(input); +}; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts new file mode 100644 index 00000000..a523134a --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..f9173f74 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/package.json b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/package.json new file mode 100644 index 00000000..a12e51cc --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/util-buffer-from", + "version": "2.2.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-buffer-from", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^14.14.31", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-buffer-from", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-buffer-from" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/LICENSE b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/README.md b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/README.md new file mode 100644 index 00000000..fc5db6d8 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/README.md @@ -0,0 +1,4 @@ +# @smithy/util-utf8 + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-utf8/latest.svg)](https://www.npmjs.com/package/@smithy/util-utf8) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-utf8.svg)](https://www.npmjs.com/package/@smithy/util-utf8) diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-cjs/index.js b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-cjs/index.js new file mode 100644 index 00000000..0b22680a --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-cjs/index.js @@ -0,0 +1,65 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromUtf8: () => fromUtf8, + toUint8Array: () => toUint8Array, + toUtf8: () => toUtf8 +}); +module.exports = __toCommonJS(src_exports); + +// src/fromUtf8.ts +var import_util_buffer_from = require("@smithy/util-buffer-from"); +var fromUtf8 = /* @__PURE__ */ __name((input) => { + const buf = (0, import_util_buffer_from.fromString)(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}, "fromUtf8"); + +// src/toUint8Array.ts +var toUint8Array = /* @__PURE__ */ __name((data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}, "toUint8Array"); + +// src/toUtf8.ts + +var toUtf8 = /* @__PURE__ */ __name((input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return (0, import_util_buffer_from.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}, "toUtf8"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromUtf8, + toUint8Array, + toUtf8 +}); + diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js new file mode 100644 index 00000000..73441900 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js @@ -0,0 +1 @@ +export const fromUtf8 = (input) => new TextEncoder().encode(input); diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js new file mode 100644 index 00000000..6dc438b3 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js @@ -0,0 +1,5 @@ +import { fromString } from "@smithy/util-buffer-from"; +export const fromUtf8 = (input) => { + const buf = fromString(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-es/index.js b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-es/index.js new file mode 100644 index 00000000..00ba4657 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js new file mode 100644 index 00000000..2cd36f75 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js @@ -0,0 +1,10 @@ +import { fromUtf8 } from "./fromUtf8"; +export const toUint8Array = (data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js new file mode 100644 index 00000000..c2921278 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js @@ -0,0 +1,9 @@ +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return new TextDecoder("utf-8").decode(input); +}; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.js b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.js new file mode 100644 index 00000000..7be8745a --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.js @@ -0,0 +1,10 @@ +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return fromArrayBuffer(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts new file mode 100644 index 00000000..dd919817 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts new file mode 100644 index 00000000..dd919817 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/index.d.ts b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/index.d.ts new file mode 100644 index 00000000..00ba4657 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts new file mode 100644 index 00000000..11b6342e --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts new file mode 100644 index 00000000..8494acd8 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts new file mode 100644 index 00000000..8494acd8 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts new file mode 100644 index 00000000..39f3d6dd --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts new file mode 100644 index 00000000..39f3d6dd --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..ef9761d7 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts new file mode 100644 index 00000000..562fe101 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts new file mode 100644 index 00000000..33511ad7 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts new file mode 100644 index 00000000..33511ad7 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/package.json b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/package.json new file mode 100644 index 00000000..78bfb4df --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-utf8", + "version": "2.3.0", + "description": "A UTF-8 string <-> UInt8Array converter", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-utf8", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/fromUtf8": "./dist-es/fromUtf8.browser", + "./dist-es/toUtf8": "./dist-es/toUtf8.browser" + }, + "react-native": {}, + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-utf8", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-utf8" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha1-browser/package.json b/node_modules/@aws-crypto/sha1-browser/package.json new file mode 100644 index 00000000..3517bc75 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/package.json @@ -0,0 +1,35 @@ +{ + "name": "@aws-crypto/sha1-browser", + "version": "5.2.0", + "scripts": { + "prepublishOnly": "tsc -p tsconfig.json && tsc -p tsconfig.module.json", + "pretest": "tsc -p tsconfig.test.json", + "test": "mocha --require ts-node/register test/**/*test.ts" + }, + "repository": { + "type": "git", + "url": "git@github.com:aws/aws-sdk-js-crypto-helpers.git" + }, + "author": { + "name": "AWS Crypto Tools Team", + "email": "aws-cryptools@amazon.com", + "url": "https://docs.aws.amazon.com/aws-crypto-tools/index.html?id=docs_gateway#lang/en_us" + }, + "homepage": "https://github.com/aws/aws-sdk-js-crypto-helpers/tree/master/packages/sha1-browser", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/supports-web-crypto": "^5.2.0", + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + }, + "main": "./build/main/index.js", + "module": "./build/module/index.js", + "types": "./build/main/index.d.ts", + "publishConfig": { + "access": "public" + }, + "gitHead": "c11b171b35ec5c093364f0e0d8dc4ab1af68e748" +} diff --git a/node_modules/@aws-crypto/sha1-browser/src/constants.ts b/node_modules/@aws-crypto/sha1-browser/src/constants.ts new file mode 100644 index 00000000..6b7dfcd1 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/src/constants.ts @@ -0,0 +1,29 @@ +export const SHA_1_HASH: { name: "SHA-1" } = { name: "SHA-1" }; + +export const SHA_1_HMAC_ALGO: { name: "HMAC"; hash: { name: "SHA-1" } } = { + name: "HMAC", + hash: SHA_1_HASH, +}; + +export const EMPTY_DATA_SHA_1 = new Uint8Array([ + 218, + 57, + 163, + 238, + 94, + 107, + 75, + 13, + 50, + 85, + 191, + 239, + 149, + 96, + 24, + 144, + 175, + 216, + 7, + 9, +]); diff --git a/node_modules/@aws-crypto/sha1-browser/src/crossPlatformSha1.ts b/node_modules/@aws-crypto/sha1-browser/src/crossPlatformSha1.ts new file mode 100644 index 00000000..24ad4eef --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/src/crossPlatformSha1.ts @@ -0,0 +1,29 @@ +import { Sha1 as WebCryptoSha1 } from "./webCryptoSha1"; +import { Checksum, SourceData } from "@aws-sdk/types"; +import { supportsWebCrypto } from "@aws-crypto/supports-web-crypto"; +import { locateWindow } from "@aws-sdk/util-locate-window"; +import { convertToBuffer } from "@aws-crypto/util"; + +export class Sha1 implements Checksum { + private hash: Checksum; + + constructor(secret?: SourceData) { + if (supportsWebCrypto(locateWindow())) { + this.hash = new WebCryptoSha1(secret); + } else { + throw new Error("SHA1 not supported"); + } + } + + update(data: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void { + this.hash.update(convertToBuffer(data)); + } + + digest(): Promise { + return this.hash.digest(); + } + + reset(): void { + this.hash.reset(); + } +} diff --git a/node_modules/@aws-crypto/sha1-browser/src/index.ts b/node_modules/@aws-crypto/sha1-browser/src/index.ts new file mode 100644 index 00000000..1b6072d8 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/src/index.ts @@ -0,0 +1,2 @@ +export * from "./crossPlatformSha1"; +export { Sha1 as WebCryptoSha1 } from "./webCryptoSha1"; diff --git a/node_modules/@aws-crypto/sha1-browser/src/isEmptyData.ts b/node_modules/@aws-crypto/sha1-browser/src/isEmptyData.ts new file mode 100644 index 00000000..538971f4 --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/src/isEmptyData.ts @@ -0,0 +1,9 @@ +import { SourceData } from "@aws-sdk/types"; + +export function isEmptyData(data: SourceData): boolean { + if (typeof data === "string") { + return data.length === 0; + } + + return data.byteLength === 0; +} diff --git a/node_modules/@aws-crypto/sha1-browser/src/webCryptoSha1.ts b/node_modules/@aws-crypto/sha1-browser/src/webCryptoSha1.ts new file mode 100644 index 00000000..ffa020bf --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/src/webCryptoSha1.ts @@ -0,0 +1,79 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +import { fromUtf8 } from "@smithy/util-utf8"; +import { isEmptyData } from "./isEmptyData"; +import { EMPTY_DATA_SHA_1, SHA_1_HASH, SHA_1_HMAC_ALGO } from "./constants"; +import { locateWindow } from "@aws-sdk/util-locate-window"; + +export class Sha1 implements Checksum { + private readonly key: Promise | undefined; + private toHash: Uint8Array = new Uint8Array(0); + + constructor(secret?: SourceData) { + if (secret !== void 0) { + this.key = new Promise((resolve, reject) => { + locateWindow() + .crypto.subtle.importKey( + "raw", + convertToBuffer(secret), + SHA_1_HMAC_ALGO, + false, + ["sign"] + ) + .then(resolve, reject); + }); + this.key.catch(() => {}); + } + } + + update(data: SourceData): void { + if (isEmptyData(data)) { + return; + } + + const update = convertToBuffer(data); + const typedArray = new Uint8Array( + this.toHash.byteLength + update.byteLength + ); + typedArray.set(this.toHash, 0); + typedArray.set(update, this.toHash.byteLength); + this.toHash = typedArray; + } + + digest(): Promise { + if (this.key) { + return this.key.then((key) => + locateWindow() + .crypto.subtle.sign(SHA_1_HMAC_ALGO, key, this.toHash) + .then((data) => new Uint8Array(data)) + ); + } + + if (isEmptyData(this.toHash)) { + return Promise.resolve(EMPTY_DATA_SHA_1); + } + + return Promise.resolve() + .then(() => locateWindow().crypto.subtle.digest(SHA_1_HASH, this.toHash)) + .then((data) => Promise.resolve(new Uint8Array(data))); + } + + reset(): void { + this.toHash = new Uint8Array(0); + } +} + +function convertToBuffer(data: SourceData): Uint8Array { + if (typeof data === "string") { + return fromUtf8(data); + } + + if (ArrayBuffer.isView(data)) { + return new Uint8Array( + data.buffer, + data.byteOffset, + data.byteLength / Uint8Array.BYTES_PER_ELEMENT + ); + } + + return new Uint8Array(data); +} diff --git a/node_modules/@aws-crypto/sha1-browser/tsconfig.json b/node_modules/@aws-crypto/sha1-browser/tsconfig.json new file mode 100644 index 00000000..fb9aa95f --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "rootDir": "./src", + "outDir": "./build/main", + "lib": ["dom"], + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules/**"] +} diff --git a/node_modules/@aws-crypto/sha1-browser/tsconfig.module.json b/node_modules/@aws-crypto/sha1-browser/tsconfig.module.json new file mode 100644 index 00000000..7d0cfddc --- /dev/null +++ b/node_modules/@aws-crypto/sha1-browser/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "outDir": "build/module", + "module": "esnext", + } +} diff --git a/node_modules/@aws-crypto/sha256-browser/CHANGELOG.md b/node_modules/@aws-crypto/sha256-browser/CHANGELOG.md new file mode 100644 index 00000000..e6036f8a --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/CHANGELOG.md @@ -0,0 +1,118 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.1.0...v5.2.0) (2023-10-16) + +### Features + +- support ESM artifacts in all packages ([#752](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/752)) ([e930ffb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e930ffba5cfef66dd242049e7d514ced232c1e3b)) + +# [5.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.0.0...v5.1.0) (2023-09-22) + +### Bug Fixes + +- Update tsc to 2.x ([#735](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/735)) ([782e0de](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/782e0de9f5fef41f694130580a69d940894b6b8c)) + +### Features + +- Use @smithy/util-utf8 ([#730](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/730)) ([00fb851](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/00fb851ca3559d5a1f370f9256814de1210826b8)), closes [#699](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/699) + +# [5.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v4.0.1...v5.0.0) (2023-07-13) + +- feat!: drop support for IE 11 (#629) ([6c49fb6](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6c49fb6c1b1f18bbff02dbd77a37a21bdb40c959)), closes [#629](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/629) + +### BREAKING CHANGES + +- Remove support for IE11 + +Co-authored-by: texastony <5892063+texastony@users.noreply.github.com> + +# [4.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v3.0.0...v4.0.0) (2023-02-20) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.2...v3.0.0) (2023-01-12) + +### Bug Fixes + +- **docs:** sha256 packages, clarify hmac support ([#455](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/455)) ([1be5043](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/1be5043325991f3f5ccb52a8dd928f004b4d442e)) + +- feat!: replace Hash implementations with Checksum interface (#492) ([da43dc0](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/da43dc0fdf669d9ebb5bfb1b1f7c79e46c4aaae1)), closes [#492](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/492) + +### BREAKING CHANGES + +- All classes that implemented `Hash` now implement `Checksum`. + +## [2.0.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.1...v2.0.2) (2022-09-07) + +### Bug Fixes + +- **#337:** update @aws-sdk/types ([#373](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/373)) ([b26a811](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/b26a811a392f5209c7ec7e57251500d4d78f97ff)), closes [#337](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/337) + +## [2.0.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.0...v2.0.1) (2021-12-09) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [2.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.2...v2.0.0) (2021-10-25) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +## [1.2.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.1...v1.2.2) (2021-10-12) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +## [1.2.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.0...v1.2.1) (2021-09-17) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [1.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.1.1...v1.2.0) (2021-09-17) + +### Features + +- add @aws-crypto/util ([8f489cb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/8f489cbe4c0e134f826bac66f1bf5172597048b9)) + +## [1.1.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@1.1.0...@aws-crypto/sha256-browser@1.1.1) (2021-07-13) + +### Bug Fixes + +- **sha256-browser:** throw errors not string ([#194](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/194)) ([7fa7ac4](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/7fa7ac445ef7a04dfb1ff479e7114aba045b2b2c)) + +# [1.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@1.0.0...@aws-crypto/sha256-browser@1.1.0) (2021-01-13) + +### Bug Fixes + +- remove package lock ([6002a5a](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6002a5ab9218dc8798c19dc205d3eebd3bec5b43)) +- **aws-crypto:** export explicit dependencies on [@aws-types](https://github.com/aws-types) ([6a1873a](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6a1873a4dcc2aaa4a1338595703cfa7099f17b8c)) +- **deps-dev:** move @aws-sdk/types to devDependencies ([#188](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/188)) ([08efdf4](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/08efdf46dcc612d88c441e29945d787f253ee77d)) + +# [1.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@1.0.0-alpha.0...@aws-crypto/sha256-browser@1.0.0) (2020-10-22) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [1.0.0-alpha.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@0.1.0-preview.4...@aws-crypto/sha256-browser@1.0.0-alpha.0) (2020-02-07) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [0.1.0-preview.4](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@0.1.0-preview.2...@aws-crypto/sha256-browser@0.1.0-preview.4) (2020-01-16) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- es2015.iterable required ([#10](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/10)) ([6e08d83](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6e08d83c33667ad8cbeeaaa7cedf1bbe05f79ed8)) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.3](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@0.1.0-preview.2...@aws-crypto/sha256-browser@0.1.0-preview.3) (2019-11-15) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- es2015.iterable required ([#10](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/10)) ([6e08d83](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6e08d83c33667ad8cbeeaaa7cedf1bbe05f79ed8)) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.2](https://github.com/aws/aws-javascript-crypto-helpers/compare/@aws-crypto/sha256-browser@0.1.0-preview.1...@aws-crypto/sha256-browser@0.1.0-preview.2) (2019-10-30) + +### Bug Fixes + +- remove /src/ from .npmignore (for sourcemaps) ([#5](https://github.com/aws/aws-javascript-crypto-helpers/issues/5)) ([ec52056](https://github.com/aws/aws-javascript-crypto-helpers/commit/ec52056)) diff --git a/node_modules/@aws-crypto/sha256-browser/LICENSE b/node_modules/@aws-crypto/sha256-browser/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-crypto/sha256-browser/README.md b/node_modules/@aws-crypto/sha256-browser/README.md new file mode 100644 index 00000000..75bf105a --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/README.md @@ -0,0 +1,31 @@ +# @aws-crypto/sha256-browser + +SHA256 wrapper for browsers that prefers `window.crypto.subtle` but will +fall back to a pure JS implementation in @aws-crypto/sha256-js +to provide a consistent interface for SHA256. + +## Usage + +- To hash "some data" +``` +import {Sha256} from '@aws-crypto/sha256-browser' + +const hash = new Sha256(); +hash.update('some data'); +const result = await hash.digest(); + +``` + +- To hmac "some data" with "a key" +``` +import {Sha256} from '@aws-crypto/sha256-browser' + +const hash = new Sha256('a key'); +hash.update('some data'); +const result = await hash.digest(); + +``` + +## Test + +`npm test` diff --git a/node_modules/@aws-crypto/sha256-browser/build/main/constants.d.ts b/node_modules/@aws-crypto/sha256-browser/build/main/constants.d.ts new file mode 100644 index 00000000..fe8def75 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/main/constants.d.ts @@ -0,0 +1,10 @@ +export declare const SHA_256_HASH: { + name: "SHA-256"; +}; +export declare const SHA_256_HMAC_ALGO: { + name: "HMAC"; + hash: { + name: "SHA-256"; + }; +}; +export declare const EMPTY_DATA_SHA_256: Uint8Array; diff --git a/node_modules/@aws-crypto/sha256-browser/build/main/constants.js b/node_modules/@aws-crypto/sha256-browser/build/main/constants.js new file mode 100644 index 00000000..acb5c553 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/main/constants.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.EMPTY_DATA_SHA_256 = exports.SHA_256_HMAC_ALGO = exports.SHA_256_HASH = void 0; +exports.SHA_256_HASH = { name: "SHA-256" }; +exports.SHA_256_HMAC_ALGO = { + name: "HMAC", + hash: exports.SHA_256_HASH +}; +exports.EMPTY_DATA_SHA_256 = new Uint8Array([ + 227, + 176, + 196, + 66, + 152, + 252, + 28, + 20, + 154, + 251, + 244, + 200, + 153, + 111, + 185, + 36, + 39, + 174, + 65, + 228, + 100, + 155, + 147, + 76, + 164, + 149, + 153, + 27, + 120, + 82, + 184, + 85 +]); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/build/main/constants.js.map b/node_modules/@aws-crypto/sha256-browser/build/main/constants.js.map new file mode 100644 index 00000000..217561a5 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/main/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":";;;AAAa,QAAA,YAAY,GAAwB,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC;AAExD,QAAA,iBAAiB,GAAgD;IAC5E,IAAI,EAAE,MAAM;IACZ,IAAI,EAAE,oBAAY;CACnB,CAAC;AAEW,QAAA,kBAAkB,GAAG,IAAI,UAAU,CAAC;IAC/C,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,EAAE;IACF,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,EAAE;IACF,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,EAAE;IACF,GAAG;IACH,EAAE;CACH,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.d.ts b/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.d.ts new file mode 100644 index 00000000..055d3ef7 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.d.ts @@ -0,0 +1,8 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private hash; + constructor(secret?: SourceData); + update(data: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + digest(): Promise; + reset(): void; +} diff --git a/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js b/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js new file mode 100644 index 00000000..cde2a427 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Sha256 = void 0; +var webCryptoSha256_1 = require("./webCryptoSha256"); +var sha256_js_1 = require("@aws-crypto/sha256-js"); +var supports_web_crypto_1 = require("@aws-crypto/supports-web-crypto"); +var util_locate_window_1 = require("@aws-sdk/util-locate-window"); +var util_1 = require("@aws-crypto/util"); +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + if ((0, supports_web_crypto_1.supportsWebCrypto)((0, util_locate_window_1.locateWindow)())) { + this.hash = new webCryptoSha256_1.Sha256(secret); + } + else { + this.hash = new sha256_js_1.Sha256(secret); + } + } + Sha256.prototype.update = function (data, encoding) { + this.hash.update((0, util_1.convertToBuffer)(data)); + }; + Sha256.prototype.digest = function () { + return this.hash.digest(); + }; + Sha256.prototype.reset = function () { + this.hash.reset(); + }; + return Sha256; +}()); +exports.Sha256 = Sha256; +//# sourceMappingURL=crossPlatformSha256.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js.map b/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js.map new file mode 100644 index 00000000..9a177dc5 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"crossPlatformSha256.js","sourceRoot":"","sources":["../../src/crossPlatformSha256.ts"],"names":[],"mappings":";;;AAAA,qDAA8D;AAC9D,mDAA2D;AAE3D,uEAAoE;AACpE,kEAA2D;AAC3D,yCAAmD;AAEnD;IAGE,gBAAY,MAAmB;QAC7B,IAAI,IAAA,uCAAiB,EAAC,IAAA,iCAAY,GAAE,CAAC,EAAE;YACrC,IAAI,CAAC,IAAI,GAAG,IAAI,wBAAe,CAAC,MAAM,CAAC,CAAC;SACzC;aAAM;YACL,IAAI,CAAC,IAAI,GAAG,IAAI,kBAAQ,CAAC,MAAM,CAAC,CAAC;SAClC;IACH,CAAC;IAED,uBAAM,GAAN,UAAO,IAAgB,EAAE,QAAsC;QAC7D,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAA,sBAAe,EAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC;IAED,uBAAM,GAAN;QACE,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;IAC5B,CAAC;IAED,sBAAK,GAAL;QACE,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC;IACpB,CAAC;IACH,aAAC;AAAD,CAAC,AAtBD,IAsBC;AAtBY,wBAAM"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/build/main/index.d.ts b/node_modules/@aws-crypto/sha256-browser/build/main/index.d.ts new file mode 100644 index 00000000..60ab3973 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/main/index.d.ts @@ -0,0 +1,2 @@ +export * from "./crossPlatformSha256"; +export { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; diff --git a/node_modules/@aws-crypto/sha256-browser/build/main/index.js b/node_modules/@aws-crypto/sha256-browser/build/main/index.js new file mode 100644 index 00000000..a2703493 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/main/index.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.WebCryptoSha256 = void 0; +var tslib_1 = require("tslib"); +tslib_1.__exportStar(require("./crossPlatformSha256"), exports); +var webCryptoSha256_1 = require("./webCryptoSha256"); +Object.defineProperty(exports, "WebCryptoSha256", { enumerable: true, get: function () { return webCryptoSha256_1.Sha256; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/build/main/index.js.map b/node_modules/@aws-crypto/sha256-browser/build/main/index.js.map new file mode 100644 index 00000000..64b19eba --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/main/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;AAAA,gEAAsC;AACtC,qDAA8D;AAArD,kHAAA,MAAM,OAAmB"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.d.ts b/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.d.ts new file mode 100644 index 00000000..43ae4a7c --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function isEmptyData(data: SourceData): boolean; diff --git a/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js b/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js new file mode 100644 index 00000000..fe91548a --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isEmptyData = void 0; +function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +exports.isEmptyData = isEmptyData; +//# sourceMappingURL=isEmptyData.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js.map b/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js.map new file mode 100644 index 00000000..20ccfd6a --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js.map @@ -0,0 +1 @@ +{"version":3,"file":"isEmptyData.js","sourceRoot":"","sources":["../../src/isEmptyData.ts"],"names":[],"mappings":";;;AAEA,SAAgB,WAAW,CAAC,IAAgB;IAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;KAC1B;IAED,OAAO,IAAI,CAAC,UAAU,KAAK,CAAC,CAAC;AAC/B,CAAC;AAND,kCAMC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.d.ts b/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.d.ts new file mode 100644 index 00000000..ec0e214d --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.d.ts @@ -0,0 +1,10 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private readonly secret?; + private key; + private toHash; + constructor(secret?: SourceData); + update(data: SourceData): void; + digest(): Promise; + reset(): void; +} diff --git a/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js b/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js new file mode 100644 index 00000000..778fdd90 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js @@ -0,0 +1,56 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Sha256 = void 0; +var util_1 = require("@aws-crypto/util"); +var constants_1 = require("./constants"); +var util_locate_window_1 = require("@aws-sdk/util-locate-window"); +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + this.toHash = new Uint8Array(0); + this.secret = secret; + this.reset(); + } + Sha256.prototype.update = function (data) { + if ((0, util_1.isEmptyData)(data)) { + return; + } + var update = (0, util_1.convertToBuffer)(data); + var typedArray = new Uint8Array(this.toHash.byteLength + update.byteLength); + typedArray.set(this.toHash, 0); + typedArray.set(update, this.toHash.byteLength); + this.toHash = typedArray; + }; + Sha256.prototype.digest = function () { + var _this = this; + if (this.key) { + return this.key.then(function (key) { + return (0, util_locate_window_1.locateWindow)() + .crypto.subtle.sign(constants_1.SHA_256_HMAC_ALGO, key, _this.toHash) + .then(function (data) { return new Uint8Array(data); }); + }); + } + if ((0, util_1.isEmptyData)(this.toHash)) { + return Promise.resolve(constants_1.EMPTY_DATA_SHA_256); + } + return Promise.resolve() + .then(function () { + return (0, util_locate_window_1.locateWindow)().crypto.subtle.digest(constants_1.SHA_256_HASH, _this.toHash); + }) + .then(function (data) { return Promise.resolve(new Uint8Array(data)); }); + }; + Sha256.prototype.reset = function () { + var _this = this; + this.toHash = new Uint8Array(0); + if (this.secret && this.secret !== void 0) { + this.key = new Promise(function (resolve, reject) { + (0, util_locate_window_1.locateWindow)() + .crypto.subtle.importKey("raw", (0, util_1.convertToBuffer)(_this.secret), constants_1.SHA_256_HMAC_ALGO, false, ["sign"]) + .then(resolve, reject); + }); + this.key.catch(function () { }); + } + }; + return Sha256; +}()); +exports.Sha256 = Sha256; +//# sourceMappingURL=webCryptoSha256.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js.map b/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js.map new file mode 100644 index 00000000..7b55a073 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webCryptoSha256.js","sourceRoot":"","sources":["../../src/webCryptoSha256.ts"],"names":[],"mappings":";;;AACA,yCAAgE;AAChE,yCAIqB;AACrB,kEAA2D;AAE3D;IAKE,gBAAY,MAAmB;QAFvB,WAAM,GAAe,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;QAG7C,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,KAAK,EAAE,CAAC;IACf,CAAC;IAED,uBAAM,GAAN,UAAO,IAAgB;QACrB,IAAI,IAAA,kBAAW,EAAC,IAAI,CAAC,EAAE;YACrB,OAAO;SACR;QAED,IAAM,MAAM,GAAG,IAAA,sBAAe,EAAC,IAAI,CAAC,CAAC;QACrC,IAAM,UAAU,GAAG,IAAI,UAAU,CAC/B,IAAI,CAAC,MAAM,CAAC,UAAU,GAAG,MAAM,CAAC,UAAU,CAC3C,CAAC;QACF,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;QAC/B,UAAU,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC;QAC/C,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC;IAC3B,CAAC;IAED,uBAAM,GAAN;QAAA,iBAkBC;QAjBC,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,OAAO,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,UAAC,GAAG;gBACvB,OAAA,IAAA,iCAAY,GAAE;qBACX,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,6BAAiB,EAAE,GAAG,EAAE,KAAI,CAAC,MAAM,CAAC;qBACvD,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,UAAU,CAAC,IAAI,CAAC,EAApB,CAAoB,CAAC;YAFvC,CAEuC,CACxC,CAAC;SACH;QAED,IAAI,IAAA,kBAAW,EAAC,IAAI,CAAC,MAAM,CAAC,EAAE;YAC5B,OAAO,OAAO,CAAC,OAAO,CAAC,8BAAkB,CAAC,CAAC;SAC5C;QAED,OAAO,OAAO,CAAC,OAAO,EAAE;aACrB,IAAI,CAAC;YACJ,OAAA,IAAA,iCAAY,GAAE,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,wBAAY,EAAE,KAAI,CAAC,MAAM,CAAC;QAA9D,CAA8D,CAC/D;aACA,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,OAAO,CAAC,OAAO,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC,EAArC,CAAqC,CAAC,CAAC;IAC3D,CAAC;IAED,sBAAK,GAAL;QAAA,iBAgBC;QAfC,IAAI,CAAC,MAAM,GAAG,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;QAChC,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,KAAK,KAAK,CAAC,EAAE;YACzC,IAAI,CAAC,GAAG,GAAG,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;gBACrC,IAAA,iCAAY,GAAE;qBACT,MAAM,CAAC,MAAM,CAAC,SAAS,CACxB,KAAK,EACL,IAAA,sBAAe,EAAC,KAAI,CAAC,MAAoB,CAAC,EAC1C,6BAAiB,EACjB,KAAK,EACL,CAAC,MAAM,CAAC,CACX;qBACI,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;YAC7B,CAAC,CAAC,CAAC;YACH,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,cAAO,CAAC,CAAC,CAAC;SAC1B;IACH,CAAC;IACH,aAAC;AAAD,CAAC,AA7DD,IA6DC;AA7DY,wBAAM"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/build/module/constants.d.ts b/node_modules/@aws-crypto/sha256-browser/build/module/constants.d.ts new file mode 100644 index 00000000..fe8def75 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/module/constants.d.ts @@ -0,0 +1,10 @@ +export declare const SHA_256_HASH: { + name: "SHA-256"; +}; +export declare const SHA_256_HMAC_ALGO: { + name: "HMAC"; + hash: { + name: "SHA-256"; + }; +}; +export declare const EMPTY_DATA_SHA_256: Uint8Array; diff --git a/node_modules/@aws-crypto/sha256-browser/build/module/constants.js b/node_modules/@aws-crypto/sha256-browser/build/module/constants.js new file mode 100644 index 00000000..7fb1613a --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/module/constants.js @@ -0,0 +1,40 @@ +export var SHA_256_HASH = { name: "SHA-256" }; +export var SHA_256_HMAC_ALGO = { + name: "HMAC", + hash: SHA_256_HASH +}; +export var EMPTY_DATA_SHA_256 = new Uint8Array([ + 227, + 176, + 196, + 66, + 152, + 252, + 28, + 20, + 154, + 251, + 244, + 200, + 153, + 111, + 185, + 36, + 39, + 174, + 65, + 228, + 100, + 155, + 147, + 76, + 164, + 149, + 153, + 27, + 120, + 82, + 184, + 85 +]); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/build/module/constants.js.map b/node_modules/@aws-crypto/sha256-browser/build/module/constants.js.map new file mode 100644 index 00000000..09ed9a31 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/module/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,IAAM,YAAY,GAAwB,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC;AAErE,MAAM,CAAC,IAAM,iBAAiB,GAAgD;IAC5E,IAAI,EAAE,MAAM;IACZ,IAAI,EAAE,YAAY;CACnB,CAAC;AAEF,MAAM,CAAC,IAAM,kBAAkB,GAAG,IAAI,UAAU,CAAC;IAC/C,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,EAAE;IACF,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,EAAE;IACF,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,EAAE;IACF,GAAG;IACH,EAAE;CACH,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.d.ts b/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.d.ts new file mode 100644 index 00000000..055d3ef7 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.d.ts @@ -0,0 +1,8 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private hash; + constructor(secret?: SourceData); + update(data: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + digest(): Promise; + reset(): void; +} diff --git a/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js b/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js new file mode 100644 index 00000000..5ae82ea7 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js @@ -0,0 +1,27 @@ +import { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; +import { Sha256 as JsSha256 } from "@aws-crypto/sha256-js"; +import { supportsWebCrypto } from "@aws-crypto/supports-web-crypto"; +import { locateWindow } from "@aws-sdk/util-locate-window"; +import { convertToBuffer } from "@aws-crypto/util"; +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + if (supportsWebCrypto(locateWindow())) { + this.hash = new WebCryptoSha256(secret); + } + else { + this.hash = new JsSha256(secret); + } + } + Sha256.prototype.update = function (data, encoding) { + this.hash.update(convertToBuffer(data)); + }; + Sha256.prototype.digest = function () { + return this.hash.digest(); + }; + Sha256.prototype.reset = function () { + this.hash.reset(); + }; + return Sha256; +}()); +export { Sha256 }; +//# sourceMappingURL=crossPlatformSha256.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js.map b/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js.map new file mode 100644 index 00000000..4a83c570 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"crossPlatformSha256.js","sourceRoot":"","sources":["../../src/crossPlatformSha256.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,IAAI,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAC9D,OAAO,EAAE,MAAM,IAAI,QAAQ,EAAE,MAAM,uBAAuB,CAAC;AAE3D,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAC;AACpE,OAAO,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAC;AAC3D,OAAO,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAEnD;IAGE,gBAAY,MAAmB;QAC7B,IAAI,iBAAiB,CAAC,YAAY,EAAE,CAAC,EAAE;YACrC,IAAI,CAAC,IAAI,GAAG,IAAI,eAAe,CAAC,MAAM,CAAC,CAAC;SACzC;aAAM;YACL,IAAI,CAAC,IAAI,GAAG,IAAI,QAAQ,CAAC,MAAM,CAAC,CAAC;SAClC;IACH,CAAC;IAED,uBAAM,GAAN,UAAO,IAAgB,EAAE,QAAsC;QAC7D,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC;IAED,uBAAM,GAAN;QACE,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;IAC5B,CAAC;IAED,sBAAK,GAAL;QACE,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC;IACpB,CAAC;IACH,aAAC;AAAD,CAAC,AAtBD,IAsBC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/build/module/index.d.ts b/node_modules/@aws-crypto/sha256-browser/build/module/index.d.ts new file mode 100644 index 00000000..60ab3973 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/module/index.d.ts @@ -0,0 +1,2 @@ +export * from "./crossPlatformSha256"; +export { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; diff --git a/node_modules/@aws-crypto/sha256-browser/build/module/index.js b/node_modules/@aws-crypto/sha256-browser/build/module/index.js new file mode 100644 index 00000000..94ffb635 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/module/index.js @@ -0,0 +1,3 @@ +export * from "./crossPlatformSha256"; +export { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/build/module/index.js.map b/node_modules/@aws-crypto/sha256-browser/build/module/index.js.map new file mode 100644 index 00000000..01d20bc5 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/module/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,uBAAuB,CAAC;AACtC,OAAO,EAAE,MAAM,IAAI,eAAe,EAAE,MAAM,mBAAmB,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.d.ts b/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.d.ts new file mode 100644 index 00000000..43ae4a7c --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function isEmptyData(data: SourceData): boolean; diff --git a/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js b/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js new file mode 100644 index 00000000..4f31a618 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js @@ -0,0 +1,7 @@ +export function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +//# sourceMappingURL=isEmptyData.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js.map b/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js.map new file mode 100644 index 00000000..776ce2b9 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js.map @@ -0,0 +1 @@ +{"version":3,"file":"isEmptyData.js","sourceRoot":"","sources":["../../src/isEmptyData.ts"],"names":[],"mappings":"AAEA,MAAM,UAAU,WAAW,CAAC,IAAgB;IAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;KAC1B;IAED,OAAO,IAAI,CAAC,UAAU,KAAK,CAAC,CAAC;AAC/B,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.d.ts b/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.d.ts new file mode 100644 index 00000000..ec0e214d --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.d.ts @@ -0,0 +1,10 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private readonly secret?; + private key; + private toHash; + constructor(secret?: SourceData); + update(data: SourceData): void; + digest(): Promise; + reset(): void; +} diff --git a/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js b/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js new file mode 100644 index 00000000..d12acd01 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js @@ -0,0 +1,53 @@ +import { isEmptyData, convertToBuffer } from "@aws-crypto/util"; +import { EMPTY_DATA_SHA_256, SHA_256_HASH, SHA_256_HMAC_ALGO, } from "./constants"; +import { locateWindow } from "@aws-sdk/util-locate-window"; +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + this.toHash = new Uint8Array(0); + this.secret = secret; + this.reset(); + } + Sha256.prototype.update = function (data) { + if (isEmptyData(data)) { + return; + } + var update = convertToBuffer(data); + var typedArray = new Uint8Array(this.toHash.byteLength + update.byteLength); + typedArray.set(this.toHash, 0); + typedArray.set(update, this.toHash.byteLength); + this.toHash = typedArray; + }; + Sha256.prototype.digest = function () { + var _this = this; + if (this.key) { + return this.key.then(function (key) { + return locateWindow() + .crypto.subtle.sign(SHA_256_HMAC_ALGO, key, _this.toHash) + .then(function (data) { return new Uint8Array(data); }); + }); + } + if (isEmptyData(this.toHash)) { + return Promise.resolve(EMPTY_DATA_SHA_256); + } + return Promise.resolve() + .then(function () { + return locateWindow().crypto.subtle.digest(SHA_256_HASH, _this.toHash); + }) + .then(function (data) { return Promise.resolve(new Uint8Array(data)); }); + }; + Sha256.prototype.reset = function () { + var _this = this; + this.toHash = new Uint8Array(0); + if (this.secret && this.secret !== void 0) { + this.key = new Promise(function (resolve, reject) { + locateWindow() + .crypto.subtle.importKey("raw", convertToBuffer(_this.secret), SHA_256_HMAC_ALGO, false, ["sign"]) + .then(resolve, reject); + }); + this.key.catch(function () { }); + } + }; + return Sha256; +}()); +export { Sha256 }; +//# sourceMappingURL=webCryptoSha256.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js.map b/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js.map new file mode 100644 index 00000000..c7b31c0e --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webCryptoSha256.js","sourceRoot":"","sources":["../../src/webCryptoSha256.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAChE,OAAO,EACL,kBAAkB,EAClB,YAAY,EACZ,iBAAiB,GAClB,MAAM,aAAa,CAAC;AACrB,OAAO,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAC;AAE3D;IAKE,gBAAY,MAAmB;QAFvB,WAAM,GAAe,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;QAG7C,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,KAAK,EAAE,CAAC;IACf,CAAC;IAED,uBAAM,GAAN,UAAO,IAAgB;QACrB,IAAI,WAAW,CAAC,IAAI,CAAC,EAAE;YACrB,OAAO;SACR;QAED,IAAM,MAAM,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;QACrC,IAAM,UAAU,GAAG,IAAI,UAAU,CAC/B,IAAI,CAAC,MAAM,CAAC,UAAU,GAAG,MAAM,CAAC,UAAU,CAC3C,CAAC;QACF,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;QAC/B,UAAU,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC;QAC/C,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC;IAC3B,CAAC;IAED,uBAAM,GAAN;QAAA,iBAkBC;QAjBC,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,OAAO,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,UAAC,GAAG;gBACvB,OAAA,YAAY,EAAE;qBACX,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,iBAAiB,EAAE,GAAG,EAAE,KAAI,CAAC,MAAM,CAAC;qBACvD,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,UAAU,CAAC,IAAI,CAAC,EAApB,CAAoB,CAAC;YAFvC,CAEuC,CACxC,CAAC;SACH;QAED,IAAI,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;YAC5B,OAAO,OAAO,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC;SAC5C;QAED,OAAO,OAAO,CAAC,OAAO,EAAE;aACrB,IAAI,CAAC;YACJ,OAAA,YAAY,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,EAAE,KAAI,CAAC,MAAM,CAAC;QAA9D,CAA8D,CAC/D;aACA,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,OAAO,CAAC,OAAO,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC,EAArC,CAAqC,CAAC,CAAC;IAC3D,CAAC;IAED,sBAAK,GAAL;QAAA,iBAgBC;QAfC,IAAI,CAAC,MAAM,GAAG,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;QAChC,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,KAAK,KAAK,CAAC,EAAE;YACzC,IAAI,CAAC,GAAG,GAAG,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;gBACrC,YAAY,EAAE;qBACT,MAAM,CAAC,MAAM,CAAC,SAAS,CACxB,KAAK,EACL,eAAe,CAAC,KAAI,CAAC,MAAoB,CAAC,EAC1C,iBAAiB,EACjB,KAAK,EACL,CAAC,MAAM,CAAC,CACX;qBACI,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;YAC7B,CAAC,CAAC,CAAC;YACH,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,cAAO,CAAC,CAAC,CAAC;SAC1B;IACH,CAAC;IACH,aAAC;AAAD,CAAC,AA7DD,IA6DC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/LICENSE b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/README.md b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/README.md new file mode 100644 index 00000000..31853f24 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/README.md @@ -0,0 +1,10 @@ +# @smithy/is-array-buffer + +[![NPM version](https://img.shields.io/npm/v/@smithy/is-array-buffer/latest.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/is-array-buffer.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-cjs/index.js b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-cjs/index.js new file mode 100644 index 00000000..5d792e71 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-cjs/index.js @@ -0,0 +1,32 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isArrayBuffer: () => isArrayBuffer +}); +module.exports = __toCommonJS(src_exports); +var isArrayBuffer = /* @__PURE__ */ __name((arg) => typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer || Object.prototype.toString.call(arg) === "[object ArrayBuffer]", "isArrayBuffer"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + isArrayBuffer +}); + diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-es/index.js b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-es/index.js new file mode 100644 index 00000000..8096cca3 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-es/index.js @@ -0,0 +1,2 @@ +export const isArrayBuffer = (arg) => (typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer) || + Object.prototype.toString.call(arg) === "[object ArrayBuffer]"; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts new file mode 100644 index 00000000..64f452e7 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..ca8fd6bd --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/package.json b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/package.json new file mode 100644 index 00000000..ed8affc7 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/is-array-buffer", + "version": "2.2.0", + "description": "Provides a function for detecting if an argument is an ArrayBuffer", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline is-array-buffer", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/is-array-buffer", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/is-array-buffer" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/LICENSE b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/README.md b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/README.md new file mode 100644 index 00000000..c896b04a --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/README.md @@ -0,0 +1,10 @@ +# @smithy/util-buffer-from + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-buffer-from/latest.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-buffer-from.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-cjs/index.js b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-cjs/index.js new file mode 100644 index 00000000..c6738d94 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-cjs/index.js @@ -0,0 +1,47 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromArrayBuffer: () => fromArrayBuffer, + fromString: () => fromString +}); +module.exports = __toCommonJS(src_exports); +var import_is_array_buffer = require("@smithy/is-array-buffer"); +var import_buffer = require("buffer"); +var fromArrayBuffer = /* @__PURE__ */ __name((input, offset = 0, length = input.byteLength - offset) => { + if (!(0, import_is_array_buffer.isArrayBuffer)(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return import_buffer.Buffer.from(input, offset, length); +}, "fromArrayBuffer"); +var fromString = /* @__PURE__ */ __name((input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? import_buffer.Buffer.from(input, encoding) : import_buffer.Buffer.from(input); +}, "fromString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromArrayBuffer, + fromString +}); + diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-es/index.js b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-es/index.js new file mode 100644 index 00000000..718f8315 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-es/index.js @@ -0,0 +1,14 @@ +import { isArrayBuffer } from "@smithy/is-array-buffer"; +import { Buffer } from "buffer"; +export const fromArrayBuffer = (input, offset = 0, length = input.byteLength - offset) => { + if (!isArrayBuffer(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return Buffer.from(input, offset, length); +}; +export const fromString = (input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? Buffer.from(input, encoding) : Buffer.from(input); +}; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts new file mode 100644 index 00000000..a523134a --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..f9173f74 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/package.json b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/package.json new file mode 100644 index 00000000..a12e51cc --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/util-buffer-from", + "version": "2.2.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-buffer-from", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^14.14.31", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-buffer-from", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-buffer-from" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/LICENSE b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/README.md b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/README.md new file mode 100644 index 00000000..fc5db6d8 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/README.md @@ -0,0 +1,4 @@ +# @smithy/util-utf8 + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-utf8/latest.svg)](https://www.npmjs.com/package/@smithy/util-utf8) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-utf8.svg)](https://www.npmjs.com/package/@smithy/util-utf8) diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/index.js b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/index.js new file mode 100644 index 00000000..0b22680a --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/index.js @@ -0,0 +1,65 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromUtf8: () => fromUtf8, + toUint8Array: () => toUint8Array, + toUtf8: () => toUtf8 +}); +module.exports = __toCommonJS(src_exports); + +// src/fromUtf8.ts +var import_util_buffer_from = require("@smithy/util-buffer-from"); +var fromUtf8 = /* @__PURE__ */ __name((input) => { + const buf = (0, import_util_buffer_from.fromString)(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}, "fromUtf8"); + +// src/toUint8Array.ts +var toUint8Array = /* @__PURE__ */ __name((data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}, "toUint8Array"); + +// src/toUtf8.ts + +var toUtf8 = /* @__PURE__ */ __name((input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return (0, import_util_buffer_from.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}, "toUtf8"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromUtf8, + toUint8Array, + toUtf8 +}); + diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js new file mode 100644 index 00000000..73441900 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js @@ -0,0 +1 @@ +export const fromUtf8 = (input) => new TextEncoder().encode(input); diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js new file mode 100644 index 00000000..6dc438b3 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js @@ -0,0 +1,5 @@ +import { fromString } from "@smithy/util-buffer-from"; +export const fromUtf8 = (input) => { + const buf = fromString(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/index.js b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/index.js new file mode 100644 index 00000000..00ba4657 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js new file mode 100644 index 00000000..2cd36f75 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js @@ -0,0 +1,10 @@ +import { fromUtf8 } from "./fromUtf8"; +export const toUint8Array = (data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js new file mode 100644 index 00000000..c2921278 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js @@ -0,0 +1,9 @@ +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return new TextDecoder("utf-8").decode(input); +}; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.js b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.js new file mode 100644 index 00000000..7be8745a --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.js @@ -0,0 +1,10 @@ +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return fromArrayBuffer(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts new file mode 100644 index 00000000..dd919817 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts new file mode 100644 index 00000000..dd919817 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/index.d.ts b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/index.d.ts new file mode 100644 index 00000000..00ba4657 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts new file mode 100644 index 00000000..11b6342e --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts new file mode 100644 index 00000000..8494acd8 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts new file mode 100644 index 00000000..8494acd8 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts new file mode 100644 index 00000000..39f3d6dd --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts new file mode 100644 index 00000000..39f3d6dd --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..ef9761d7 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts new file mode 100644 index 00000000..562fe101 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts new file mode 100644 index 00000000..33511ad7 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts new file mode 100644 index 00000000..33511ad7 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/package.json b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/package.json new file mode 100644 index 00000000..78bfb4df --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-utf8", + "version": "2.3.0", + "description": "A UTF-8 string <-> UInt8Array converter", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-utf8", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/fromUtf8": "./dist-es/fromUtf8.browser", + "./dist-es/toUtf8": "./dist-es/toUtf8.browser" + }, + "react-native": {}, + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-utf8", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-utf8" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-browser/package.json b/node_modules/@aws-crypto/sha256-browser/package.json new file mode 100644 index 00000000..2688ecf9 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/package.json @@ -0,0 +1,33 @@ +{ + "name": "@aws-crypto/sha256-browser", + "version": "5.2.0", + "scripts": { + "prepublishOnly": "tsc -p tsconfig.json && tsc -p tsconfig.module.json", + "pretest": "tsc -p tsconfig.test.json", + "test": "mocha --require ts-node/register test/**/*test.ts" + }, + "repository": { + "type": "git", + "url": "git@github.com:aws/aws-sdk-js-crypto-helpers.git" + }, + "author": { + "name": "AWS Crypto Tools Team", + "email": "aws-cryptools@amazon.com", + "url": "https://docs.aws.amazon.com/aws-crypto-tools/index.html?id=docs_gateway#lang/en_us" + }, + "homepage": "https://github.com/aws/aws-sdk-js-crypto-helpers/tree/master/packages/sha256-browser", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-js": "^5.2.0", + "@aws-crypto/supports-web-crypto": "^5.2.0", + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + }, + "main": "./build/main/index.js", + "module": "./build/module/index.js", + "types": "./build/main/index.d.ts", + "gitHead": "c11b171b35ec5c093364f0e0d8dc4ab1af68e748" +} diff --git a/node_modules/@aws-crypto/sha256-browser/src/constants.ts b/node_modules/@aws-crypto/sha256-browser/src/constants.ts new file mode 100644 index 00000000..7f68e2ac --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/src/constants.ts @@ -0,0 +1,41 @@ +export const SHA_256_HASH: { name: "SHA-256" } = { name: "SHA-256" }; + +export const SHA_256_HMAC_ALGO: { name: "HMAC"; hash: { name: "SHA-256" } } = { + name: "HMAC", + hash: SHA_256_HASH +}; + +export const EMPTY_DATA_SHA_256 = new Uint8Array([ + 227, + 176, + 196, + 66, + 152, + 252, + 28, + 20, + 154, + 251, + 244, + 200, + 153, + 111, + 185, + 36, + 39, + 174, + 65, + 228, + 100, + 155, + 147, + 76, + 164, + 149, + 153, + 27, + 120, + 82, + 184, + 85 +]); diff --git a/node_modules/@aws-crypto/sha256-browser/src/crossPlatformSha256.ts b/node_modules/@aws-crypto/sha256-browser/src/crossPlatformSha256.ts new file mode 100644 index 00000000..8cb9ff06 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/src/crossPlatformSha256.ts @@ -0,0 +1,30 @@ +import { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; +import { Sha256 as JsSha256 } from "@aws-crypto/sha256-js"; +import { Checksum, SourceData } from "@aws-sdk/types"; +import { supportsWebCrypto } from "@aws-crypto/supports-web-crypto"; +import { locateWindow } from "@aws-sdk/util-locate-window"; +import { convertToBuffer } from "@aws-crypto/util"; + +export class Sha256 implements Checksum { + private hash: Checksum; + + constructor(secret?: SourceData) { + if (supportsWebCrypto(locateWindow())) { + this.hash = new WebCryptoSha256(secret); + } else { + this.hash = new JsSha256(secret); + } + } + + update(data: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void { + this.hash.update(convertToBuffer(data)); + } + + digest(): Promise { + return this.hash.digest(); + } + + reset(): void { + this.hash.reset(); + } +} diff --git a/node_modules/@aws-crypto/sha256-browser/src/index.ts b/node_modules/@aws-crypto/sha256-browser/src/index.ts new file mode 100644 index 00000000..60ab3973 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/src/index.ts @@ -0,0 +1,2 @@ +export * from "./crossPlatformSha256"; +export { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; diff --git a/node_modules/@aws-crypto/sha256-browser/src/isEmptyData.ts b/node_modules/@aws-crypto/sha256-browser/src/isEmptyData.ts new file mode 100644 index 00000000..538971f4 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/src/isEmptyData.ts @@ -0,0 +1,9 @@ +import { SourceData } from "@aws-sdk/types"; + +export function isEmptyData(data: SourceData): boolean { + if (typeof data === "string") { + return data.length === 0; + } + + return data.byteLength === 0; +} diff --git a/node_modules/@aws-crypto/sha256-browser/src/webCryptoSha256.ts b/node_modules/@aws-crypto/sha256-browser/src/webCryptoSha256.ts new file mode 100644 index 00000000..fe4db571 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/src/webCryptoSha256.ts @@ -0,0 +1,71 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +import { isEmptyData, convertToBuffer } from "@aws-crypto/util"; +import { + EMPTY_DATA_SHA_256, + SHA_256_HASH, + SHA_256_HMAC_ALGO, +} from "./constants"; +import { locateWindow } from "@aws-sdk/util-locate-window"; + +export class Sha256 implements Checksum { + private readonly secret?: SourceData; + private key: Promise | undefined; + private toHash: Uint8Array = new Uint8Array(0); + + constructor(secret?: SourceData) { + this.secret = secret; + this.reset(); + } + + update(data: SourceData): void { + if (isEmptyData(data)) { + return; + } + + const update = convertToBuffer(data); + const typedArray = new Uint8Array( + this.toHash.byteLength + update.byteLength + ); + typedArray.set(this.toHash, 0); + typedArray.set(update, this.toHash.byteLength); + this.toHash = typedArray; + } + + digest(): Promise { + if (this.key) { + return this.key.then((key) => + locateWindow() + .crypto.subtle.sign(SHA_256_HMAC_ALGO, key, this.toHash) + .then((data) => new Uint8Array(data)) + ); + } + + if (isEmptyData(this.toHash)) { + return Promise.resolve(EMPTY_DATA_SHA_256); + } + + return Promise.resolve() + .then(() => + locateWindow().crypto.subtle.digest(SHA_256_HASH, this.toHash) + ) + .then((data) => Promise.resolve(new Uint8Array(data))); + } + + reset(): void { + this.toHash = new Uint8Array(0); + if (this.secret && this.secret !== void 0) { + this.key = new Promise((resolve, reject) => { + locateWindow() + .crypto.subtle.importKey( + "raw", + convertToBuffer(this.secret as SourceData), + SHA_256_HMAC_ALGO, + false, + ["sign"] + ) + .then(resolve, reject); + }); + this.key.catch(() => {}); + } + } +} diff --git a/node_modules/@aws-crypto/sha256-browser/tsconfig.json b/node_modules/@aws-crypto/sha256-browser/tsconfig.json new file mode 100644 index 00000000..fb9aa95f --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "rootDir": "./src", + "outDir": "./build/main", + "lib": ["dom"], + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules/**"] +} diff --git a/node_modules/@aws-crypto/sha256-browser/tsconfig.module.json b/node_modules/@aws-crypto/sha256-browser/tsconfig.module.json new file mode 100644 index 00000000..7d0cfddc --- /dev/null +++ b/node_modules/@aws-crypto/sha256-browser/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "outDir": "build/module", + "module": "esnext", + } +} diff --git a/node_modules/@aws-crypto/sha256-js/CHANGELOG.md b/node_modules/@aws-crypto/sha256-js/CHANGELOG.md new file mode 100644 index 00000000..97c1f60a --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/CHANGELOG.md @@ -0,0 +1,106 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.1.0...v5.2.0) (2023-10-16) + +### Features + +- support ESM artifacts in all packages ([#752](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/752)) ([e930ffb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e930ffba5cfef66dd242049e7d514ced232c1e3b)) + +# [5.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.0.0...v5.1.0) (2023-09-22) + +### Bug Fixes + +- Update tsc to 2.x ([#735](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/735)) ([782e0de](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/782e0de9f5fef41f694130580a69d940894b6b8c)) + +# [5.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v4.0.1...v5.0.0) (2023-07-13) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [4.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v3.0.0...v4.0.0) (2023-02-20) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.2...v3.0.0) (2023-01-12) + +### Bug Fixes + +- **docs:** sha256 packages, clarify hmac support ([#455](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/455)) ([1be5043](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/1be5043325991f3f5ccb52a8dd928f004b4d442e)) + +- feat!: replace Hash implementations with Checksum interface (#492) ([da43dc0](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/da43dc0fdf669d9ebb5bfb1b1f7c79e46c4aaae1)), closes [#492](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/492) + +### BREAKING CHANGES + +- All classes that implemented `Hash` now implement `Checksum`. + +## [2.0.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.1...v2.0.2) (2022-09-07) + +### Bug Fixes + +- **#337:** update @aws-sdk/types ([#373](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/373)) ([b26a811](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/b26a811a392f5209c7ec7e57251500d4d78f97ff)), closes [#337](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/337) + +## [2.0.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.0...v2.0.1) (2021-12-09) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [2.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.2...v2.0.0) (2021-10-25) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +## [1.2.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.1...v1.2.2) (2021-10-12) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +## [1.2.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.0...v1.2.1) (2021-09-17) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [1.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.1.1...v1.2.0) (2021-09-17) + +### Features + +- add @aws-crypto/util ([8f489cb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/8f489cbe4c0e134f826bac66f1bf5172597048b9)) + +# [1.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@1.0.0...@aws-crypto/sha256-js@1.1.0) (2021-01-13) + +### Bug Fixes + +- remove package lock ([6002a5a](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6002a5ab9218dc8798c19dc205d3eebd3bec5b43)) +- **aws-crypto:** export explicit dependencies on [@aws-types](https://github.com/aws-types) ([6a1873a](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6a1873a4dcc2aaa4a1338595703cfa7099f17b8c)) +- **deps-dev:** move @aws-sdk/types to devDependencies ([#188](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/188)) ([08efdf4](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/08efdf46dcc612d88c441e29945d787f253ee77d)) + +# [1.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@1.0.0-alpha.0...@aws-crypto/sha256-js@1.0.0) (2020-10-22) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [1.0.0-alpha.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@0.1.0-preview.4...@aws-crypto/sha256-js@1.0.0-alpha.0) (2020-02-07) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [0.1.0-preview.4](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@0.1.0-preview.2...@aws-crypto/sha256-js@0.1.0-preview.4) (2020-01-16) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- es2015.iterable required ([#10](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/10)) ([6e08d83](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6e08d83c33667ad8cbeeaaa7cedf1bbe05f79ed8)) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.3](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@0.1.0-preview.2...@aws-crypto/sha256-js@0.1.0-preview.3) (2019-11-15) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- es2015.iterable required ([#10](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/10)) ([6e08d83](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6e08d83c33667ad8cbeeaaa7cedf1bbe05f79ed8)) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.2](https://github.com/aws/aws-javascript-crypto-helpers/compare/@aws-crypto/sha256-js@0.1.0-preview.1...@aws-crypto/sha256-js@0.1.0-preview.2) (2019-10-30) + +### Bug Fixes + +- remove /src/ from .npmignore (for sourcemaps) ([#5](https://github.com/aws/aws-javascript-crypto-helpers/issues/5)) ([ec52056](https://github.com/aws/aws-javascript-crypto-helpers/commit/ec52056)) + +### Features + +- **sha256-js:** expose synchronous digest ([#7](https://github.com/aws/aws-javascript-crypto-helpers/issues/7)) ([9edaef7](https://github.com/aws/aws-javascript-crypto-helpers/commit/9edaef7)), closes [#6](https://github.com/aws/aws-javascript-crypto-helpers/issues/6) diff --git a/node_modules/@aws-crypto/sha256-js/LICENSE b/node_modules/@aws-crypto/sha256-js/LICENSE new file mode 100644 index 00000000..ad410e11 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/README.md b/node_modules/@aws-crypto/sha256-js/README.md new file mode 100644 index 00000000..f769f5b0 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/README.md @@ -0,0 +1,29 @@ +# crypto-sha256-js + +A pure JS implementation SHA256. + +## Usage + +- To hash "some data" +``` +import {Sha256} from '@aws-crypto/sha256-js'; + +const hash = new Sha256(); +hash.update('some data'); +const result = await hash.digest(); + +``` + +- To hmac "some data" with "a key" +``` +import {Sha256} from '@aws-crypto/sha256-js'; + +const hash = new Sha256('a key'); +hash.update('some data'); +const result = await hash.digest(); + +``` + +## Test + +`npm test` diff --git a/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.d.ts b/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.d.ts new file mode 100644 index 00000000..1f580b25 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.d.ts @@ -0,0 +1,17 @@ +/** + * @internal + */ +export declare class RawSha256 { + private state; + private temp; + private buffer; + private bufferLength; + private bytesHashed; + /** + * @internal + */ + finished: boolean; + update(data: Uint8Array): void; + digest(): Uint8Array; + private hashBuffer; +} diff --git a/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js b/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js new file mode 100644 index 00000000..68ceaccd --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js @@ -0,0 +1,124 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RawSha256 = void 0; +var constants_1 = require("./constants"); +/** + * @internal + */ +var RawSha256 = /** @class */ (function () { + function RawSha256() { + this.state = Int32Array.from(constants_1.INIT); + this.temp = new Int32Array(64); + this.buffer = new Uint8Array(64); + this.bufferLength = 0; + this.bytesHashed = 0; + /** + * @internal + */ + this.finished = false; + } + RawSha256.prototype.update = function (data) { + if (this.finished) { + throw new Error("Attempted to update an already finished hash."); + } + var position = 0; + var byteLength = data.byteLength; + this.bytesHashed += byteLength; + if (this.bytesHashed * 8 > constants_1.MAX_HASHABLE_LENGTH) { + throw new Error("Cannot hash more than 2^53 - 1 bits"); + } + while (byteLength > 0) { + this.buffer[this.bufferLength++] = data[position++]; + byteLength--; + if (this.bufferLength === constants_1.BLOCK_SIZE) { + this.hashBuffer(); + this.bufferLength = 0; + } + } + }; + RawSha256.prototype.digest = function () { + if (!this.finished) { + var bitsHashed = this.bytesHashed * 8; + var bufferView = new DataView(this.buffer.buffer, this.buffer.byteOffset, this.buffer.byteLength); + var undecoratedLength = this.bufferLength; + bufferView.setUint8(this.bufferLength++, 0x80); + // Ensure the final block has enough room for the hashed length + if (undecoratedLength % constants_1.BLOCK_SIZE >= constants_1.BLOCK_SIZE - 8) { + for (var i = this.bufferLength; i < constants_1.BLOCK_SIZE; i++) { + bufferView.setUint8(i, 0); + } + this.hashBuffer(); + this.bufferLength = 0; + } + for (var i = this.bufferLength; i < constants_1.BLOCK_SIZE - 8; i++) { + bufferView.setUint8(i, 0); + } + bufferView.setUint32(constants_1.BLOCK_SIZE - 8, Math.floor(bitsHashed / 0x100000000), true); + bufferView.setUint32(constants_1.BLOCK_SIZE - 4, bitsHashed); + this.hashBuffer(); + this.finished = true; + } + // The value in state is little-endian rather than big-endian, so flip + // each word into a new Uint8Array + var out = new Uint8Array(constants_1.DIGEST_LENGTH); + for (var i = 0; i < 8; i++) { + out[i * 4] = (this.state[i] >>> 24) & 0xff; + out[i * 4 + 1] = (this.state[i] >>> 16) & 0xff; + out[i * 4 + 2] = (this.state[i] >>> 8) & 0xff; + out[i * 4 + 3] = (this.state[i] >>> 0) & 0xff; + } + return out; + }; + RawSha256.prototype.hashBuffer = function () { + var _a = this, buffer = _a.buffer, state = _a.state; + var state0 = state[0], state1 = state[1], state2 = state[2], state3 = state[3], state4 = state[4], state5 = state[5], state6 = state[6], state7 = state[7]; + for (var i = 0; i < constants_1.BLOCK_SIZE; i++) { + if (i < 16) { + this.temp[i] = + ((buffer[i * 4] & 0xff) << 24) | + ((buffer[i * 4 + 1] & 0xff) << 16) | + ((buffer[i * 4 + 2] & 0xff) << 8) | + (buffer[i * 4 + 3] & 0xff); + } + else { + var u = this.temp[i - 2]; + var t1_1 = ((u >>> 17) | (u << 15)) ^ ((u >>> 19) | (u << 13)) ^ (u >>> 10); + u = this.temp[i - 15]; + var t2_1 = ((u >>> 7) | (u << 25)) ^ ((u >>> 18) | (u << 14)) ^ (u >>> 3); + this.temp[i] = + ((t1_1 + this.temp[i - 7]) | 0) + ((t2_1 + this.temp[i - 16]) | 0); + } + var t1 = ((((((state4 >>> 6) | (state4 << 26)) ^ + ((state4 >>> 11) | (state4 << 21)) ^ + ((state4 >>> 25) | (state4 << 7))) + + ((state4 & state5) ^ (~state4 & state6))) | + 0) + + ((state7 + ((constants_1.KEY[i] + this.temp[i]) | 0)) | 0)) | + 0; + var t2 = ((((state0 >>> 2) | (state0 << 30)) ^ + ((state0 >>> 13) | (state0 << 19)) ^ + ((state0 >>> 22) | (state0 << 10))) + + ((state0 & state1) ^ (state0 & state2) ^ (state1 & state2))) | + 0; + state7 = state6; + state6 = state5; + state5 = state4; + state4 = (state3 + t1) | 0; + state3 = state2; + state2 = state1; + state1 = state0; + state0 = (t1 + t2) | 0; + } + state[0] += state0; + state[1] += state1; + state[2] += state2; + state[3] += state3; + state[4] += state4; + state[5] += state5; + state[6] += state6; + state[7] += state7; + }; + return RawSha256; +}()); +exports.RawSha256 = RawSha256; +//# sourceMappingURL=RawSha256.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js.map b/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js.map new file mode 100644 index 00000000..81659f51 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"RawSha256.js","sourceRoot":"","sources":["../../src/RawSha256.ts"],"names":[],"mappings":";;;AAAA,yCAMqB;AAErB;;GAEG;AACH;IAAA;QACU,UAAK,GAAe,UAAU,CAAC,IAAI,CAAC,gBAAI,CAAC,CAAC;QAC1C,SAAI,GAAe,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;QACtC,WAAM,GAAe,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;QACxC,iBAAY,GAAW,CAAC,CAAC;QACzB,gBAAW,GAAW,CAAC,CAAC;QAEhC;;WAEG;QACH,aAAQ,GAAY,KAAK,CAAC;IA8I5B,CAAC;IA5IC,0BAAM,GAAN,UAAO,IAAgB;QACrB,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;SAClE;QAED,IAAI,QAAQ,GAAG,CAAC,CAAC;QACX,IAAA,UAAU,GAAK,IAAI,WAAT,CAAU;QAC1B,IAAI,CAAC,WAAW,IAAI,UAAU,CAAC;QAE/B,IAAI,IAAI,CAAC,WAAW,GAAG,CAAC,GAAG,+BAAmB,EAAE;YAC9C,MAAM,IAAI,KAAK,CAAC,qCAAqC,CAAC,CAAC;SACxD;QAED,OAAO,UAAU,GAAG,CAAC,EAAE;YACrB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;YACpD,UAAU,EAAE,CAAC;YAEb,IAAI,IAAI,CAAC,YAAY,KAAK,sBAAU,EAAE;gBACpC,IAAI,CAAC,UAAU,EAAE,CAAC;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;aACvB;SACF;IACH,CAAC;IAED,0BAAM,GAAN;QACE,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE;YAClB,IAAM,UAAU,GAAG,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC;YACxC,IAAM,UAAU,GAAG,IAAI,QAAQ,CAC7B,IAAI,CAAC,MAAM,CAAC,MAAM,EAClB,IAAI,CAAC,MAAM,CAAC,UAAU,EACtB,IAAI,CAAC,MAAM,CAAC,UAAU,CACvB,CAAC;YAEF,IAAM,iBAAiB,GAAG,IAAI,CAAC,YAAY,CAAC;YAC5C,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,EAAE,IAAI,CAAC,CAAC;YAE/C,+DAA+D;YAC/D,IAAI,iBAAiB,GAAG,sBAAU,IAAI,sBAAU,GAAG,CAAC,EAAE;gBACpD,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,sBAAU,EAAE,CAAC,EAAE,EAAE;oBACnD,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;iBAC3B;gBACD,IAAI,CAAC,UAAU,EAAE,CAAC;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;aACvB;YAED,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,sBAAU,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;gBACvD,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;aAC3B;YACD,UAAU,CAAC,SAAS,CAClB,sBAAU,GAAG,CAAC,EACd,IAAI,CAAC,KAAK,CAAC,UAAU,GAAG,WAAW,CAAC,EACpC,IAAI,CACL,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,sBAAU,GAAG,CAAC,EAAE,UAAU,CAAC,CAAC;YAEjD,IAAI,CAAC,UAAU,EAAE,CAAC;YAElB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC;SACtB;QAED,sEAAsE;QACtE,kCAAkC;QAClC,IAAM,GAAG,GAAG,IAAI,UAAU,CAAC,yBAAa,CAAC,CAAC;QAC1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;YAC1B,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,CAAC;YAC3C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,CAAC;YAC/C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC;YAC9C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC;SAC/C;QAED,OAAO,GAAG,CAAC;IACb,CAAC;IAEO,8BAAU,GAAlB;QACQ,IAAA,KAAoB,IAAI,EAAtB,MAAM,YAAA,EAAE,KAAK,WAAS,CAAC;QAE/B,IAAI,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACnB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QAEpB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,sBAAU,EAAE,CAAC,EAAE,EAAE;YACnC,IAAI,CAAC,GAAG,EAAE,EAAE;gBACV,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;oBACV,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;wBAC9B,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;wBAClC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC;wBACjC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;aAC9B;iBAAM;gBACL,IAAI,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACzB,IAAM,IAAE,GACN,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC;gBAEnE,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC;gBACtB,IAAM,IAAE,GACN,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;gBAEjE,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;oBACV,CAAC,CAAC,IAAE,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,IAAE,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;aAClE;YAED,IAAM,EAAE,GACN,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBACnC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC;gBACzC,CAAC,CAAC;gBACF,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,eAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACjD,CAAC,CAAC;YAEJ,IAAM,EAAE,GACN,CAAC,CAAC,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBACjC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC,CAAC;gBACnC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC;gBAC9D,CAAC,CAAC;YAEJ,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC;YAC3B,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC;SACxB;QAED,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;IACrB,CAAC;IACH,gBAAC;AAAD,CAAC,AAxJD,IAwJC;AAxJY,8BAAS"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/build/main/constants.d.ts b/node_modules/@aws-crypto/sha256-js/build/main/constants.d.ts new file mode 100644 index 00000000..63bd764e --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/main/constants.d.ts @@ -0,0 +1,20 @@ +/** + * @internal + */ +export declare const BLOCK_SIZE: number; +/** + * @internal + */ +export declare const DIGEST_LENGTH: number; +/** + * @internal + */ +export declare const KEY: Uint32Array; +/** + * @internal + */ +export declare const INIT: number[]; +/** + * @internal + */ +export declare const MAX_HASHABLE_LENGTH: number; diff --git a/node_modules/@aws-crypto/sha256-js/build/main/constants.js b/node_modules/@aws-crypto/sha256-js/build/main/constants.js new file mode 100644 index 00000000..c83aa099 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/main/constants.js @@ -0,0 +1,98 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MAX_HASHABLE_LENGTH = exports.INIT = exports.KEY = exports.DIGEST_LENGTH = exports.BLOCK_SIZE = void 0; +/** + * @internal + */ +exports.BLOCK_SIZE = 64; +/** + * @internal + */ +exports.DIGEST_LENGTH = 32; +/** + * @internal + */ +exports.KEY = new Uint32Array([ + 0x428a2f98, + 0x71374491, + 0xb5c0fbcf, + 0xe9b5dba5, + 0x3956c25b, + 0x59f111f1, + 0x923f82a4, + 0xab1c5ed5, + 0xd807aa98, + 0x12835b01, + 0x243185be, + 0x550c7dc3, + 0x72be5d74, + 0x80deb1fe, + 0x9bdc06a7, + 0xc19bf174, + 0xe49b69c1, + 0xefbe4786, + 0x0fc19dc6, + 0x240ca1cc, + 0x2de92c6f, + 0x4a7484aa, + 0x5cb0a9dc, + 0x76f988da, + 0x983e5152, + 0xa831c66d, + 0xb00327c8, + 0xbf597fc7, + 0xc6e00bf3, + 0xd5a79147, + 0x06ca6351, + 0x14292967, + 0x27b70a85, + 0x2e1b2138, + 0x4d2c6dfc, + 0x53380d13, + 0x650a7354, + 0x766a0abb, + 0x81c2c92e, + 0x92722c85, + 0xa2bfe8a1, + 0xa81a664b, + 0xc24b8b70, + 0xc76c51a3, + 0xd192e819, + 0xd6990624, + 0xf40e3585, + 0x106aa070, + 0x19a4c116, + 0x1e376c08, + 0x2748774c, + 0x34b0bcb5, + 0x391c0cb3, + 0x4ed8aa4a, + 0x5b9cca4f, + 0x682e6ff3, + 0x748f82ee, + 0x78a5636f, + 0x84c87814, + 0x8cc70208, + 0x90befffa, + 0xa4506ceb, + 0xbef9a3f7, + 0xc67178f2 +]); +/** + * @internal + */ +exports.INIT = [ + 0x6a09e667, + 0xbb67ae85, + 0x3c6ef372, + 0xa54ff53a, + 0x510e527f, + 0x9b05688c, + 0x1f83d9ab, + 0x5be0cd19 +]; +/** + * @internal + */ +exports.MAX_HASHABLE_LENGTH = Math.pow(2, 53) - 1; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/build/main/constants.js.map b/node_modules/@aws-crypto/sha256-js/build/main/constants.js.map new file mode 100644 index 00000000..1132c126 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/main/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":";;;AAAA;;GAEG;AACU,QAAA,UAAU,GAAW,EAAE,CAAC;AAErC;;GAEG;AACU,QAAA,aAAa,GAAW,EAAE,CAAC;AAExC;;GAEG;AACU,QAAA,GAAG,GAAG,IAAI,WAAW,CAAC;IACjC,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;CACX,CAAC,CAAC;AAEH;;GAEG;AACU,QAAA,IAAI,GAAG;IAClB,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;CACX,CAAC;AAEF;;GAEG;AACU,QAAA,mBAAmB,GAAG,SAAA,CAAC,EAAI,EAAE,CAAA,GAAG,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/build/main/index.d.ts b/node_modules/@aws-crypto/sha256-js/build/main/index.d.ts new file mode 100644 index 00000000..4554d8a3 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/main/index.d.ts @@ -0,0 +1 @@ +export * from "./jsSha256"; diff --git a/node_modules/@aws-crypto/sha256-js/build/main/index.js b/node_modules/@aws-crypto/sha256-js/build/main/index.js new file mode 100644 index 00000000..4329f109 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/main/index.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var tslib_1 = require("tslib"); +tslib_1.__exportStar(require("./jsSha256"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/build/main/index.js.map b/node_modules/@aws-crypto/sha256-js/build/main/index.js.map new file mode 100644 index 00000000..9f97d549 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/main/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;AAAA,qDAA2B"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.d.ts b/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.d.ts new file mode 100644 index 00000000..d813b256 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.d.ts @@ -0,0 +1,12 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private readonly secret?; + private hash; + private outer?; + private error; + constructor(secret?: SourceData); + update(toHash: SourceData): void; + digestSync(): Uint8Array; + digest(): Promise; + reset(): void; +} diff --git a/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js b/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js new file mode 100644 index 00000000..2a4f2f19 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js @@ -0,0 +1,85 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Sha256 = void 0; +var tslib_1 = require("tslib"); +var constants_1 = require("./constants"); +var RawSha256_1 = require("./RawSha256"); +var util_1 = require("@aws-crypto/util"); +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + this.secret = secret; + this.hash = new RawSha256_1.RawSha256(); + this.reset(); + } + Sha256.prototype.update = function (toHash) { + if ((0, util_1.isEmptyData)(toHash) || this.error) { + return; + } + try { + this.hash.update((0, util_1.convertToBuffer)(toHash)); + } + catch (e) { + this.error = e; + } + }; + /* This synchronous method keeps compatibility + * with the v2 aws-sdk. + */ + Sha256.prototype.digestSync = function () { + if (this.error) { + throw this.error; + } + if (this.outer) { + if (!this.outer.finished) { + this.outer.update(this.hash.digest()); + } + return this.outer.digest(); + } + return this.hash.digest(); + }; + /* The underlying digest method here is synchronous. + * To keep the same interface with the other hash functions + * the default is to expose this as an async method. + * However, it can sometimes be useful to have a sync method. + */ + Sha256.prototype.digest = function () { + return tslib_1.__awaiter(this, void 0, void 0, function () { + return tslib_1.__generator(this, function (_a) { + return [2 /*return*/, this.digestSync()]; + }); + }); + }; + Sha256.prototype.reset = function () { + this.hash = new RawSha256_1.RawSha256(); + if (this.secret) { + this.outer = new RawSha256_1.RawSha256(); + var inner = bufferFromSecret(this.secret); + var outer = new Uint8Array(constants_1.BLOCK_SIZE); + outer.set(inner); + for (var i = 0; i < constants_1.BLOCK_SIZE; i++) { + inner[i] ^= 0x36; + outer[i] ^= 0x5c; + } + this.hash.update(inner); + this.outer.update(outer); + // overwrite the copied key in memory + for (var i = 0; i < inner.byteLength; i++) { + inner[i] = 0; + } + } + }; + return Sha256; +}()); +exports.Sha256 = Sha256; +function bufferFromSecret(secret) { + var input = (0, util_1.convertToBuffer)(secret); + if (input.byteLength > constants_1.BLOCK_SIZE) { + var bufferHash = new RawSha256_1.RawSha256(); + bufferHash.update(input); + input = bufferHash.digest(); + } + var buffer = new Uint8Array(constants_1.BLOCK_SIZE); + buffer.set(input); + return buffer; +} +//# sourceMappingURL=jsSha256.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js.map b/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js.map new file mode 100644 index 00000000..c34eb360 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"jsSha256.js","sourceRoot":"","sources":["../../src/jsSha256.ts"],"names":[],"mappings":";;;;AAAA,yCAAyC;AACzC,yCAAwC;AAExC,yCAAgE;AAEhE;IAME,gBAAY,MAAmB;QAC7B,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,IAAI,GAAG,IAAI,qBAAS,EAAE,CAAC;QAC5B,IAAI,CAAC,KAAK,EAAE,CAAC;IACf,CAAC;IAED,uBAAM,GAAN,UAAO,MAAkB;QACvB,IAAI,IAAA,kBAAW,EAAC,MAAM,CAAC,IAAI,IAAI,CAAC,KAAK,EAAE;YACrC,OAAO;SACR;QAED,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAA,sBAAe,EAAC,MAAM,CAAC,CAAC,CAAC;SAC3C;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC;SAChB;IACH,CAAC;IAED;;OAEG;IACH,2BAAU,GAAV;QACE,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,CAAC,KAAK,CAAC;SAClB;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE;gBACxB,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;aACvC;YAED,OAAO,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC;SAC5B;QAED,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;IAC5B,CAAC;IAED;;;;OAIG;IACG,uBAAM,GAAZ;;;gBACE,sBAAO,IAAI,CAAC,UAAU,EAAE,EAAC;;;KAC1B;IAED,sBAAK,GAAL;QACE,IAAI,CAAC,IAAI,GAAG,IAAI,qBAAS,EAAE,CAAC;QAC5B,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,KAAK,GAAG,IAAI,qBAAS,EAAE,CAAC;YAC7B,IAAM,KAAK,GAAG,gBAAgB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC5C,IAAM,KAAK,GAAG,IAAI,UAAU,CAAC,sBAAU,CAAC,CAAC;YACzC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;YAEjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,sBAAU,EAAE,CAAC,EAAE,EAAE;gBACnC,KAAK,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;gBACjB,KAAK,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;aAClB;YAED,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YACxB,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAEzB,qCAAqC;YACrC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,UAAU,EAAE,CAAC,EAAE,EAAE;gBACzC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;aACd;SACF;IACH,CAAC;IACH,aAAC;AAAD,CAAC,AA1ED,IA0EC;AA1EY,wBAAM;AA4EnB,SAAS,gBAAgB,CAAC,MAAkB;IAC1C,IAAI,KAAK,GAAG,IAAA,sBAAe,EAAC,MAAM,CAAC,CAAC;IAEpC,IAAI,KAAK,CAAC,UAAU,GAAG,sBAAU,EAAE;QACjC,IAAM,UAAU,GAAG,IAAI,qBAAS,EAAE,CAAC;QACnC,UAAU,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACzB,KAAK,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC;KAC7B;IAED,IAAM,MAAM,GAAG,IAAI,UAAU,CAAC,sBAAU,CAAC,CAAC;IAC1C,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.d.ts b/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.d.ts new file mode 100644 index 00000000..d8803432 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.d.ts @@ -0,0 +1,5 @@ +export declare const hashTestVectors: Array<[Uint8Array, Uint8Array]>; +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +export declare const hmacTestVectors: Array<[Uint8Array, Uint8Array, Uint8Array]>; diff --git a/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js b/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js new file mode 100644 index 00000000..3f0dd2f7 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js @@ -0,0 +1,322 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.hmacTestVectors = exports.hashTestVectors = void 0; +var util_hex_encoding_1 = require("@aws-sdk/util-hex-encoding"); +var millionChars = new Uint8Array(1000000); +for (var i = 0; i < 1000000; i++) { + millionChars[i] = 97; +} +exports.hashTestVectors = [ + [ + Uint8Array.from([97, 98, 99]), + (0, util_hex_encoding_1.fromHex)("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad") + ], + [ + new Uint8Array(0), + (0, util_hex_encoding_1.fromHex)("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + ], + [ + (0, util_hex_encoding_1.fromHex)("61"), + (0, util_hex_encoding_1.fromHex)("ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161"), + (0, util_hex_encoding_1.fromHex)("961b6dd3ede3cb8ecbaacbd68de040cd78eb2ed5889130cceb4c49268ea4d506") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161"), + (0, util_hex_encoding_1.fromHex)("9834876dcfb05cb167a5c24953eba58c4ac89b1adf57f28f2f9d09af107ee8f0") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161"), + (0, util_hex_encoding_1.fromHex)("61be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b4") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161"), + (0, util_hex_encoding_1.fromHex)("ed968e840d10d2d313a870bc131a4e2c311d7ad09bdf32b3418147221f51a6e2") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161"), + (0, util_hex_encoding_1.fromHex)("ed02457b5c41d964dbd2f2a609d63fe1bb7528dbe55e1abf5b52c249cd735797") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161"), + (0, util_hex_encoding_1.fromHex)("e46240714b5db3a23eee60479a623efba4d633d27fe4f03c904b9e219a7fbe60") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161"), + (0, util_hex_encoding_1.fromHex)("1f3ce40415a2081fa3eee75fc39fff8e56c22270d1a978a7249b592dcebd20b4") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161"), + (0, util_hex_encoding_1.fromHex)("f2aca93b80cae681221f0445fa4e2cae8a1f9f8fa1e1741d9639caad222f537d") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161"), + (0, util_hex_encoding_1.fromHex)("bf2cb58a68f684d95a3b78ef8f661c9a4e5b09e82cc8f9cc88cce90528caeb27") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("28cb017dfc99073aa1b47c1b30f413e3ce774c4991eb4158de50f9dbb36d8043") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("f24abc34b13fade76e805799f71187da6cd90b9cac373ae65ed57f143bd664e5") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("a689d786e81340e45511dec6c7ab2d978434e5db123362450fe10cfac70d19d0") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("82cab7df0abfb9d95dca4e5937ce2968c798c726fea48c016bf9763221efda13") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("ef2df0b539c6c23de0f4cbe42648c301ae0e22e887340a4599fb4ef4e2678e48") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("0c0beacef8877bbf2416eb00f2b5dc96354e26dd1df5517320459b1236860f8c") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("b860666ee2966dd8f903be44ee605c6e1366f926d9f17a8f49937d11624eb99d") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("c926defaaa3d13eda2fc63a553bb7fb7326bece6e7cb67ca5296e4727d89bab4") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("a0b4aaab8a966e2193ba172d68162c4656860197f256b5f45f0203397ff3f99c") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("42492da06234ad0ac76f5d5debdb6d1ae027cffbe746a1c13b89bb8bc0139137") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("7df8e299c834de198e264c3e374bc58ecd9382252a705c183beb02f275571e3b") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("ec7c494df6d2a7ea36668d656e6b8979e33641bfea378c15038af3964db057a3") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("897d3e95b65f26676081f8b9f3a98b6ee4424566303e8d4e7c7522ebae219eab") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("09f61f8d9cd65e6a0c258087c485b6293541364e42bd97b2d7936580c8aa3c54") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("2f521e2a7d0bd812cbc035f4ed6806eb8d851793b04ba147e8f66b72f5d1f20f") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("9976d549a25115dab4e36d0c1fb8f31cb07da87dd83275977360eb7dc09e88de") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("cc0616e61cbd6e8e5e34e9fb2d320f37de915820206f5696c31f1fbd24aa16de") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("9c547cb8115a44883b9f70ba68f75117cd55359c92611875e386f8af98c172ab") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("6913c9c7fd42fe23df8b6bcd4dbaf1c17748948d97f2980b432319c39eddcf6c") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("3a54fc0cbc0b0ef48b6507b7788096235d10292dd3ae24e22f5aa062d4f9864a") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("61c60b487d1a921e0bcc9bf853dda0fb159b30bf57b2e2d2c753b00be15b5a09") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("3ba3f5f43b92602683c19aee62a20342b084dd5971ddd33808d81a328879a547") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("852785c805c77e71a22340a54e9d95933ed49121e7d2bf3c2d358854bc1359ea") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("a27c896c4859204843166af66f0e902b9c3b3ed6d2fd13d435abc020065c526f") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("629362afc62c74497caed2272e30f8125ecd0965f8d8d7cfc4e260f7f8dd319d") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("22c1d24bcd03e9aee9832efccd6da613fc702793178e5f12c945c7b67ddda933") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("21ec055b38ce759cd4d0f477e9bdec2c5b8199945db4439bae334a964df6246c") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("365a9c3e2c2af0a56e47a9dac51c2c5381bf8f41273bad3175e0e619126ad087") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("b4d5e56e929ba4cda349e9274e3603d0be246b82016bca20f363963c5f2d6845") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("e33cdf9c7f7120b98e8c78408953e07f2ecd183006b5606df349b4c212acf43e") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("c0f8bd4dbc2b0c03107c1c37913f2a7501f521467f45dd0fef6958e9a4692719") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("7a538607fdaab9296995929f451565bbb8142e1844117322aafd2b3d76b01aff") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("66d34fba71f8f450f7e45598853e53bfc23bbd129027cbb131a2f4ffd7878cd0") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("16849877c6c21ef0bfa68e4f6747300ddb171b170b9f00e189edc4c2fc4db93e") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("52789e3423b72beeb898456a4f49662e46b0cbb960784c5ef4b1399d327e7c27") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("6643110c5628fff59edf76d82d5bf573bf800f16a4d65dfb1e5d6f1a46296d0b") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("11eaed932c6c6fddfc2efc394e609facf4abe814fc6180d03b14fce13a07d0e5") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("97daac0ee9998dfcad6c9c0970da5ca411c86233a944c25b47566f6a7bc1ddd5") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("8f9bec6a62dd28ebd36d1227745592de6658b36974a3bb98a4c582f683ea6c42") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("160b4e433e384e05e537dc59b467f7cb2403f0214db15c5db58862a3f1156d2e") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("bfc5fe0e360152ca98c50fab4ed7e3078c17debc2917740d5000913b686ca129") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("6c1b3dc7a706b9dc81352a6716b9c666c608d8626272c64b914ab05572fc6e84") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("abe346a7259fc90b4c27185419628e5e6af6466b1ae9b5446cac4bfc26cf05c4") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("a3f01b6939256127582ac8ae9fb47a382a244680806a3f613a118851c1ca1d47") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("9f4390f8d30c2dd92ec9f095b65e2b9ae9b0a925a5258e241c9f1e910f734318") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("b35439a4ac6f0948b6d6f9e3c6af0f5f590ce20f1bde7090ef7970686ec6738a") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("f13b2d724659eb3bf47f2dd6af1accc87b81f09f59f2b75e5c0bed6589dfe8c6") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("d5c039b748aa64665782974ec3dc3025c042edf54dcdc2b5de31385b094cb678") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("111bb261277afd65f0744b247cd3e47d386d71563d0ed995517807d5ebd4fba3") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("11ee391211c6256460b6ed375957fadd8061cafbb31daf967db875aebd5aaad4") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("35d5fc17cfbbadd00f5e710ada39f194c5ad7c766ad67072245f1fad45f0f530") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("f506898cc7c2e092f9eb9fadae7ba50383f5b46a2a4fe5597dbb553a78981268") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("7d3e74a05d7db15bce4ad9ec0658ea98e3f06eeecf16b4c6fff2da457ddc2f34") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("ffe054fe7ae0cb6dc65c3af9b61d5209f439851db43d0ba5997337df154668eb") + ], + [ + (0, util_hex_encoding_1.fromHex)("de188941a3375d3a8a061e67576e926dc71a7fa3f0cceb97452b4d3227965f9ea8cc75076d9fb9c5417aa5cb30fc22198b34982dbb629e"), + (0, util_hex_encoding_1.fromHex)("038051e9c324393bd1ca1978dd0952c2aa3742ca4f1bd5cd4611cea83892d382") + ], + [ + millionChars, + (0, util_hex_encoding_1.fromHex)("cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0") + ], + [ + (0, util_hex_encoding_1.fromHex)("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + (0, util_hex_encoding_1.fromHex)("45ad4b37c6e2fc0a2cfcc1b5da524132ec707615c2cae1dbbc43c97aa521db81") + ] +]; +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +exports.hmacTestVectors = [ + [ + (0, util_hex_encoding_1.fromHex)("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b"), + (0, util_hex_encoding_1.fromHex)("4869205468657265"), + (0, util_hex_encoding_1.fromHex)("b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c2e32cff7") + ], + [ + (0, util_hex_encoding_1.fromHex)("4a656665"), + (0, util_hex_encoding_1.fromHex)("7768617420646f2079612077616e7420666f72206e6f7468696e673f"), + (0, util_hex_encoding_1.fromHex)("5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b964ec3843") + ], + [ + (0, util_hex_encoding_1.fromHex)("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + (0, util_hex_encoding_1.fromHex)("dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd"), + (0, util_hex_encoding_1.fromHex)("773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514ced565fe") + ], + [ + (0, util_hex_encoding_1.fromHex)("0102030405060708090a0b0c0d0e0f10111213141516171819"), + (0, util_hex_encoding_1.fromHex)("cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd"), + (0, util_hex_encoding_1.fromHex)("82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff46729665b") + ], + [ + (0, util_hex_encoding_1.fromHex)("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + (0, util_hex_encoding_1.fromHex)("54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a65204b6579202d2048617368204b6579204669727374"), + (0, util_hex_encoding_1.fromHex)("60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f0ee37f54") + ], + [ + (0, util_hex_encoding_1.fromHex)("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + (0, util_hex_encoding_1.fromHex)("5468697320697320612074657374207573696e672061206c6172676572207468616e20626c6f636b2d73697a65206b657920616e642061206c6172676572207468616e20626c6f636b2d73697a6520646174612e20546865206b6579206e6565647320746f20626520686173686564206265666f7265206265696e6720757365642062792074686520484d414320616c676f726974686d2e"), + (0, util_hex_encoding_1.fromHex)("9b09ffa71b942fcb27635fbcd5b0e944bfdc63644f0713938a7f51535c3a35e2") + ] +]; +//# sourceMappingURL=knownHashes.fixture.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js.map b/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js.map new file mode 100644 index 00000000..8ffc02e0 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js.map @@ -0,0 +1 @@ +{"version":3,"file":"knownHashes.fixture.js","sourceRoot":"","sources":["../../src/knownHashes.fixture.ts"],"names":[],"mappings":";;;AAAA,gEAAqD;AAErD,IAAM,YAAY,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,CAAC;AAC7C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,EAAE,CAAC,EAAE,EAAE;IAChC,YAAY,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC;CACtB;AAEY,QAAA,eAAe,GAAoC;IAC9D;QACE,UAAU,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC;QAC7B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAI,UAAU,CAAC,CAAC,CAAC;QACjB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,IAAI,CAAC;QACb,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,MAAM,CAAC;QACf,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,QAAQ,CAAC;QACjB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,UAAU,CAAC;QACnB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,YAAY,CAAC;QACrB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,cAAc,CAAC;QACvB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,gBAAgB,CAAC;QACzB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,kBAAkB,CAAC;QAC3B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,oBAAoB,CAAC;QAC7B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,sBAAsB,CAAC;QAC/B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,wBAAwB,CAAC;QACjC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,0BAA0B,CAAC;QACnC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,4BAA4B,CAAC;QACrC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,8BAA8B,CAAC;QACvC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,gCAAgC,CAAC;QACzC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,kCAAkC,CAAC;QAC3C,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,oCAAoC,CAAC;QAC7C,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,sCAAsC,CAAC;QAC/C,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,wCAAwC,CAAC;QACjD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,0CAA0C,CAAC;QACnD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,4CAA4C,CAAC;QACrD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,8CAA8C,CAAC;QACvD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,gDAAgD,CAAC;QACzD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,kDAAkD,CAAC;QAC3D,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,oDAAoD,CAAC;QAC7D,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,sDAAsD,CAAC;QAC/D,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,wDAAwD,CAAC;QACjE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,0DAA0D,CAAC;QACnE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,4DAA4D,CAAC;QACrE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,8DAA8D,CAAC;QACvE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,gEAAgE,CAAC;QACzE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;QAC3E,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,oEAAoE,CACrE;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,sEAAsE,CACvE;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wEAAwE,CACzE;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,0EAA0E,CAC3E;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,4EAA4E,CAC7E;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,8EAA8E,CAC/E;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gFAAgF,CACjF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,kFAAkF,CACnF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,oFAAoF,CACrF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,sFAAsF,CACvF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wFAAwF,CACzF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,0FAA0F,CAC3F;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,4FAA4F,CAC7F;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,8FAA8F,CAC/F;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gGAAgG,CACjG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,kGAAkG,CACnG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,oGAAoG,CACrG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,sGAAsG,CACvG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wGAAwG,CACzG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,0GAA0G,CAC3G;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,4GAA4G,CAC7G;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,8GAA8G,CAC/G;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gHAAgH,CACjH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,kHAAkH,CACnH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,oHAAoH,CACrH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,sHAAsH,CACvH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wHAAwH,CACzH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,0HAA0H,CAC3H;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,4HAA4H,CAC7H;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,8HAA8H,CAC/H;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gIAAgI,CACjI;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,kIAAkI,CACnI;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gHAAgH,CACjH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,YAAY;QACZ,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wQAAwQ,CACzQ;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;CACF,CAAC;AAEF;;GAEG;AACU,QAAA,eAAe,GAAgD;IAC1E;QACE,IAAA,2BAAO,EAAC,0CAA0C,CAAC;QACnD,IAAA,2BAAO,EAAC,kBAAkB,CAAC;QAC3B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,UAAU,CAAC;QACnB,IAAA,2BAAO,EAAC,0DAA0D,CAAC;QACnE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,0CAA0C,CAAC;QACnD,IAAA,2BAAO,EACL,sGAAsG,CACvG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,oDAAoD,CAAC;QAC7D,IAAA,2BAAO,EACL,sGAAsG,CACvG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wQAAwQ,CACzQ;QACD,IAAA,2BAAO,EACL,8GAA8G,CAC/G;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wQAAwQ,CACzQ;QACD,IAAA,2BAAO,EACL,kTAAkT,CACnT;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;CACF,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.d.ts b/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.d.ts new file mode 100644 index 00000000..1f580b25 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.d.ts @@ -0,0 +1,17 @@ +/** + * @internal + */ +export declare class RawSha256 { + private state; + private temp; + private buffer; + private bufferLength; + private bytesHashed; + /** + * @internal + */ + finished: boolean; + update(data: Uint8Array): void; + digest(): Uint8Array; + private hashBuffer; +} diff --git a/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js b/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js new file mode 100644 index 00000000..f799acd2 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js @@ -0,0 +1,121 @@ +import { BLOCK_SIZE, DIGEST_LENGTH, INIT, KEY, MAX_HASHABLE_LENGTH } from "./constants"; +/** + * @internal + */ +var RawSha256 = /** @class */ (function () { + function RawSha256() { + this.state = Int32Array.from(INIT); + this.temp = new Int32Array(64); + this.buffer = new Uint8Array(64); + this.bufferLength = 0; + this.bytesHashed = 0; + /** + * @internal + */ + this.finished = false; + } + RawSha256.prototype.update = function (data) { + if (this.finished) { + throw new Error("Attempted to update an already finished hash."); + } + var position = 0; + var byteLength = data.byteLength; + this.bytesHashed += byteLength; + if (this.bytesHashed * 8 > MAX_HASHABLE_LENGTH) { + throw new Error("Cannot hash more than 2^53 - 1 bits"); + } + while (byteLength > 0) { + this.buffer[this.bufferLength++] = data[position++]; + byteLength--; + if (this.bufferLength === BLOCK_SIZE) { + this.hashBuffer(); + this.bufferLength = 0; + } + } + }; + RawSha256.prototype.digest = function () { + if (!this.finished) { + var bitsHashed = this.bytesHashed * 8; + var bufferView = new DataView(this.buffer.buffer, this.buffer.byteOffset, this.buffer.byteLength); + var undecoratedLength = this.bufferLength; + bufferView.setUint8(this.bufferLength++, 0x80); + // Ensure the final block has enough room for the hashed length + if (undecoratedLength % BLOCK_SIZE >= BLOCK_SIZE - 8) { + for (var i = this.bufferLength; i < BLOCK_SIZE; i++) { + bufferView.setUint8(i, 0); + } + this.hashBuffer(); + this.bufferLength = 0; + } + for (var i = this.bufferLength; i < BLOCK_SIZE - 8; i++) { + bufferView.setUint8(i, 0); + } + bufferView.setUint32(BLOCK_SIZE - 8, Math.floor(bitsHashed / 0x100000000), true); + bufferView.setUint32(BLOCK_SIZE - 4, bitsHashed); + this.hashBuffer(); + this.finished = true; + } + // The value in state is little-endian rather than big-endian, so flip + // each word into a new Uint8Array + var out = new Uint8Array(DIGEST_LENGTH); + for (var i = 0; i < 8; i++) { + out[i * 4] = (this.state[i] >>> 24) & 0xff; + out[i * 4 + 1] = (this.state[i] >>> 16) & 0xff; + out[i * 4 + 2] = (this.state[i] >>> 8) & 0xff; + out[i * 4 + 3] = (this.state[i] >>> 0) & 0xff; + } + return out; + }; + RawSha256.prototype.hashBuffer = function () { + var _a = this, buffer = _a.buffer, state = _a.state; + var state0 = state[0], state1 = state[1], state2 = state[2], state3 = state[3], state4 = state[4], state5 = state[5], state6 = state[6], state7 = state[7]; + for (var i = 0; i < BLOCK_SIZE; i++) { + if (i < 16) { + this.temp[i] = + ((buffer[i * 4] & 0xff) << 24) | + ((buffer[i * 4 + 1] & 0xff) << 16) | + ((buffer[i * 4 + 2] & 0xff) << 8) | + (buffer[i * 4 + 3] & 0xff); + } + else { + var u = this.temp[i - 2]; + var t1_1 = ((u >>> 17) | (u << 15)) ^ ((u >>> 19) | (u << 13)) ^ (u >>> 10); + u = this.temp[i - 15]; + var t2_1 = ((u >>> 7) | (u << 25)) ^ ((u >>> 18) | (u << 14)) ^ (u >>> 3); + this.temp[i] = + ((t1_1 + this.temp[i - 7]) | 0) + ((t2_1 + this.temp[i - 16]) | 0); + } + var t1 = ((((((state4 >>> 6) | (state4 << 26)) ^ + ((state4 >>> 11) | (state4 << 21)) ^ + ((state4 >>> 25) | (state4 << 7))) + + ((state4 & state5) ^ (~state4 & state6))) | + 0) + + ((state7 + ((KEY[i] + this.temp[i]) | 0)) | 0)) | + 0; + var t2 = ((((state0 >>> 2) | (state0 << 30)) ^ + ((state0 >>> 13) | (state0 << 19)) ^ + ((state0 >>> 22) | (state0 << 10))) + + ((state0 & state1) ^ (state0 & state2) ^ (state1 & state2))) | + 0; + state7 = state6; + state6 = state5; + state5 = state4; + state4 = (state3 + t1) | 0; + state3 = state2; + state2 = state1; + state1 = state0; + state0 = (t1 + t2) | 0; + } + state[0] += state0; + state[1] += state1; + state[2] += state2; + state[3] += state3; + state[4] += state4; + state[5] += state5; + state[6] += state6; + state[7] += state7; + }; + return RawSha256; +}()); +export { RawSha256 }; +//# sourceMappingURL=RawSha256.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js.map b/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js.map new file mode 100644 index 00000000..c4d50a9c --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"RawSha256.js","sourceRoot":"","sources":["../../src/RawSha256.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,UAAU,EACV,aAAa,EACb,IAAI,EACJ,GAAG,EACH,mBAAmB,EACpB,MAAM,aAAa,CAAC;AAErB;;GAEG;AACH;IAAA;QACU,UAAK,GAAe,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC1C,SAAI,GAAe,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;QACtC,WAAM,GAAe,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;QACxC,iBAAY,GAAW,CAAC,CAAC;QACzB,gBAAW,GAAW,CAAC,CAAC;QAEhC;;WAEG;QACH,aAAQ,GAAY,KAAK,CAAC;IA8I5B,CAAC;IA5IC,0BAAM,GAAN,UAAO,IAAgB;QACrB,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;SAClE;QAED,IAAI,QAAQ,GAAG,CAAC,CAAC;QACX,IAAA,UAAU,GAAK,IAAI,WAAT,CAAU;QAC1B,IAAI,CAAC,WAAW,IAAI,UAAU,CAAC;QAE/B,IAAI,IAAI,CAAC,WAAW,GAAG,CAAC,GAAG,mBAAmB,EAAE;YAC9C,MAAM,IAAI,KAAK,CAAC,qCAAqC,CAAC,CAAC;SACxD;QAED,OAAO,UAAU,GAAG,CAAC,EAAE;YACrB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;YACpD,UAAU,EAAE,CAAC;YAEb,IAAI,IAAI,CAAC,YAAY,KAAK,UAAU,EAAE;gBACpC,IAAI,CAAC,UAAU,EAAE,CAAC;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;aACvB;SACF;IACH,CAAC;IAED,0BAAM,GAAN;QACE,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE;YAClB,IAAM,UAAU,GAAG,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC;YACxC,IAAM,UAAU,GAAG,IAAI,QAAQ,CAC7B,IAAI,CAAC,MAAM,CAAC,MAAM,EAClB,IAAI,CAAC,MAAM,CAAC,UAAU,EACtB,IAAI,CAAC,MAAM,CAAC,UAAU,CACvB,CAAC;YAEF,IAAM,iBAAiB,GAAG,IAAI,CAAC,YAAY,CAAC;YAC5C,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,EAAE,IAAI,CAAC,CAAC;YAE/C,+DAA+D;YAC/D,IAAI,iBAAiB,GAAG,UAAU,IAAI,UAAU,GAAG,CAAC,EAAE;gBACpD,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;oBACnD,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;iBAC3B;gBACD,IAAI,CAAC,UAAU,EAAE,CAAC;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;aACvB;YAED,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,UAAU,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;gBACvD,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;aAC3B;YACD,UAAU,CAAC,SAAS,CAClB,UAAU,GAAG,CAAC,EACd,IAAI,CAAC,KAAK,CAAC,UAAU,GAAG,WAAW,CAAC,EACpC,IAAI,CACL,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,EAAE,UAAU,CAAC,CAAC;YAEjD,IAAI,CAAC,UAAU,EAAE,CAAC;YAElB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC;SACtB;QAED,sEAAsE;QACtE,kCAAkC;QAClC,IAAM,GAAG,GAAG,IAAI,UAAU,CAAC,aAAa,CAAC,CAAC;QAC1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;YAC1B,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,CAAC;YAC3C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,CAAC;YAC/C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC;YAC9C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC;SAC/C;QAED,OAAO,GAAG,CAAC;IACb,CAAC;IAEO,8BAAU,GAAlB;QACQ,IAAA,KAAoB,IAAI,EAAtB,MAAM,YAAA,EAAE,KAAK,WAAS,CAAC;QAE/B,IAAI,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACnB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QAEpB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;YACnC,IAAI,CAAC,GAAG,EAAE,EAAE;gBACV,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;oBACV,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;wBAC9B,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;wBAClC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC;wBACjC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;aAC9B;iBAAM;gBACL,IAAI,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACzB,IAAM,IAAE,GACN,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC;gBAEnE,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC;gBACtB,IAAM,IAAE,GACN,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;gBAEjE,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;oBACV,CAAC,CAAC,IAAE,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,IAAE,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;aAClE;YAED,IAAM,EAAE,GACN,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBACnC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC;gBACzC,CAAC,CAAC;gBACF,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACjD,CAAC,CAAC;YAEJ,IAAM,EAAE,GACN,CAAC,CAAC,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBACjC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC,CAAC;gBACnC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC;gBAC9D,CAAC,CAAC;YAEJ,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC;YAC3B,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC;SACxB;QAED,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;IACrB,CAAC;IACH,gBAAC;AAAD,CAAC,AAxJD,IAwJC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/build/module/constants.d.ts b/node_modules/@aws-crypto/sha256-js/build/module/constants.d.ts new file mode 100644 index 00000000..63bd764e --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/module/constants.d.ts @@ -0,0 +1,20 @@ +/** + * @internal + */ +export declare const BLOCK_SIZE: number; +/** + * @internal + */ +export declare const DIGEST_LENGTH: number; +/** + * @internal + */ +export declare const KEY: Uint32Array; +/** + * @internal + */ +export declare const INIT: number[]; +/** + * @internal + */ +export declare const MAX_HASHABLE_LENGTH: number; diff --git a/node_modules/@aws-crypto/sha256-js/build/module/constants.js b/node_modules/@aws-crypto/sha256-js/build/module/constants.js new file mode 100644 index 00000000..68037b38 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/module/constants.js @@ -0,0 +1,95 @@ +/** + * @internal + */ +export var BLOCK_SIZE = 64; +/** + * @internal + */ +export var DIGEST_LENGTH = 32; +/** + * @internal + */ +export var KEY = new Uint32Array([ + 0x428a2f98, + 0x71374491, + 0xb5c0fbcf, + 0xe9b5dba5, + 0x3956c25b, + 0x59f111f1, + 0x923f82a4, + 0xab1c5ed5, + 0xd807aa98, + 0x12835b01, + 0x243185be, + 0x550c7dc3, + 0x72be5d74, + 0x80deb1fe, + 0x9bdc06a7, + 0xc19bf174, + 0xe49b69c1, + 0xefbe4786, + 0x0fc19dc6, + 0x240ca1cc, + 0x2de92c6f, + 0x4a7484aa, + 0x5cb0a9dc, + 0x76f988da, + 0x983e5152, + 0xa831c66d, + 0xb00327c8, + 0xbf597fc7, + 0xc6e00bf3, + 0xd5a79147, + 0x06ca6351, + 0x14292967, + 0x27b70a85, + 0x2e1b2138, + 0x4d2c6dfc, + 0x53380d13, + 0x650a7354, + 0x766a0abb, + 0x81c2c92e, + 0x92722c85, + 0xa2bfe8a1, + 0xa81a664b, + 0xc24b8b70, + 0xc76c51a3, + 0xd192e819, + 0xd6990624, + 0xf40e3585, + 0x106aa070, + 0x19a4c116, + 0x1e376c08, + 0x2748774c, + 0x34b0bcb5, + 0x391c0cb3, + 0x4ed8aa4a, + 0x5b9cca4f, + 0x682e6ff3, + 0x748f82ee, + 0x78a5636f, + 0x84c87814, + 0x8cc70208, + 0x90befffa, + 0xa4506ceb, + 0xbef9a3f7, + 0xc67178f2 +]); +/** + * @internal + */ +export var INIT = [ + 0x6a09e667, + 0xbb67ae85, + 0x3c6ef372, + 0xa54ff53a, + 0x510e527f, + 0x9b05688c, + 0x1f83d9ab, + 0x5be0cd19 +]; +/** + * @internal + */ +export var MAX_HASHABLE_LENGTH = Math.pow(2, 53) - 1; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/build/module/constants.js.map b/node_modules/@aws-crypto/sha256-js/build/module/constants.js.map new file mode 100644 index 00000000..6c930898 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/module/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,MAAM,CAAC,IAAM,UAAU,GAAW,EAAE,CAAC;AAErC;;GAEG;AACH,MAAM,CAAC,IAAM,aAAa,GAAW,EAAE,CAAC;AAExC;;GAEG;AACH,MAAM,CAAC,IAAM,GAAG,GAAG,IAAI,WAAW,CAAC;IACjC,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;CACX,CAAC,CAAC;AAEH;;GAEG;AACH,MAAM,CAAC,IAAM,IAAI,GAAG;IAClB,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;CACX,CAAC;AAEF;;GAEG;AACH,MAAM,CAAC,IAAM,mBAAmB,GAAG,SAAA,CAAC,EAAI,EAAE,CAAA,GAAG,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/build/module/index.d.ts b/node_modules/@aws-crypto/sha256-js/build/module/index.d.ts new file mode 100644 index 00000000..4554d8a3 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/module/index.d.ts @@ -0,0 +1 @@ +export * from "./jsSha256"; diff --git a/node_modules/@aws-crypto/sha256-js/build/module/index.js b/node_modules/@aws-crypto/sha256-js/build/module/index.js new file mode 100644 index 00000000..a8f73a09 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/module/index.js @@ -0,0 +1,2 @@ +export * from "./jsSha256"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/build/module/index.js.map b/node_modules/@aws-crypto/sha256-js/build/module/index.js.map new file mode 100644 index 00000000..030d7950 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/module/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,YAAY,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.d.ts b/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.d.ts new file mode 100644 index 00000000..d813b256 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.d.ts @@ -0,0 +1,12 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private readonly secret?; + private hash; + private outer?; + private error; + constructor(secret?: SourceData); + update(toHash: SourceData): void; + digestSync(): Uint8Array; + digest(): Promise; + reset(): void; +} diff --git a/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js b/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js new file mode 100644 index 00000000..fa40899f --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js @@ -0,0 +1,82 @@ +import { __awaiter, __generator } from "tslib"; +import { BLOCK_SIZE } from "./constants"; +import { RawSha256 } from "./RawSha256"; +import { isEmptyData, convertToBuffer } from "@aws-crypto/util"; +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + this.secret = secret; + this.hash = new RawSha256(); + this.reset(); + } + Sha256.prototype.update = function (toHash) { + if (isEmptyData(toHash) || this.error) { + return; + } + try { + this.hash.update(convertToBuffer(toHash)); + } + catch (e) { + this.error = e; + } + }; + /* This synchronous method keeps compatibility + * with the v2 aws-sdk. + */ + Sha256.prototype.digestSync = function () { + if (this.error) { + throw this.error; + } + if (this.outer) { + if (!this.outer.finished) { + this.outer.update(this.hash.digest()); + } + return this.outer.digest(); + } + return this.hash.digest(); + }; + /* The underlying digest method here is synchronous. + * To keep the same interface with the other hash functions + * the default is to expose this as an async method. + * However, it can sometimes be useful to have a sync method. + */ + Sha256.prototype.digest = function () { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + return [2 /*return*/, this.digestSync()]; + }); + }); + }; + Sha256.prototype.reset = function () { + this.hash = new RawSha256(); + if (this.secret) { + this.outer = new RawSha256(); + var inner = bufferFromSecret(this.secret); + var outer = new Uint8Array(BLOCK_SIZE); + outer.set(inner); + for (var i = 0; i < BLOCK_SIZE; i++) { + inner[i] ^= 0x36; + outer[i] ^= 0x5c; + } + this.hash.update(inner); + this.outer.update(outer); + // overwrite the copied key in memory + for (var i = 0; i < inner.byteLength; i++) { + inner[i] = 0; + } + } + }; + return Sha256; +}()); +export { Sha256 }; +function bufferFromSecret(secret) { + var input = convertToBuffer(secret); + if (input.byteLength > BLOCK_SIZE) { + var bufferHash = new RawSha256(); + bufferHash.update(input); + input = bufferHash.digest(); + } + var buffer = new Uint8Array(BLOCK_SIZE); + buffer.set(input); + return buffer; +} +//# sourceMappingURL=jsSha256.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js.map b/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js.map new file mode 100644 index 00000000..94fa4019 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"jsSha256.js","sourceRoot":"","sources":["../../src/jsSha256.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AACzC,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAExC,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAEhE;IAME,gBAAY,MAAmB;QAC7B,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,IAAI,GAAG,IAAI,SAAS,EAAE,CAAC;QAC5B,IAAI,CAAC,KAAK,EAAE,CAAC;IACf,CAAC;IAED,uBAAM,GAAN,UAAO,MAAkB;QACvB,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,IAAI,CAAC,KAAK,EAAE;YACrC,OAAO;SACR;QAED,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,CAAC;SAC3C;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC;SAChB;IACH,CAAC;IAED;;OAEG;IACH,2BAAU,GAAV;QACE,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,CAAC,KAAK,CAAC;SAClB;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE;gBACxB,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;aACvC;YAED,OAAO,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC;SAC5B;QAED,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;IAC5B,CAAC;IAED;;;;OAIG;IACG,uBAAM,GAAZ;;;gBACE,sBAAO,IAAI,CAAC,UAAU,EAAE,EAAC;;;KAC1B;IAED,sBAAK,GAAL;QACE,IAAI,CAAC,IAAI,GAAG,IAAI,SAAS,EAAE,CAAC;QAC5B,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,KAAK,GAAG,IAAI,SAAS,EAAE,CAAC;YAC7B,IAAM,KAAK,GAAG,gBAAgB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC5C,IAAM,KAAK,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,CAAC;YACzC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;YAEjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;gBACnC,KAAK,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;gBACjB,KAAK,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;aAClB;YAED,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YACxB,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAEzB,qCAAqC;YACrC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,UAAU,EAAE,CAAC,EAAE,EAAE;gBACzC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;aACd;SACF;IACH,CAAC;IACH,aAAC;AAAD,CAAC,AA1ED,IA0EC;;AAED,SAAS,gBAAgB,CAAC,MAAkB;IAC1C,IAAI,KAAK,GAAG,eAAe,CAAC,MAAM,CAAC,CAAC;IAEpC,IAAI,KAAK,CAAC,UAAU,GAAG,UAAU,EAAE;QACjC,IAAM,UAAU,GAAG,IAAI,SAAS,EAAE,CAAC;QACnC,UAAU,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACzB,KAAK,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC;KAC7B;IAED,IAAM,MAAM,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,CAAC;IAC1C,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.d.ts b/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.d.ts new file mode 100644 index 00000000..d8803432 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.d.ts @@ -0,0 +1,5 @@ +export declare const hashTestVectors: Array<[Uint8Array, Uint8Array]>; +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +export declare const hmacTestVectors: Array<[Uint8Array, Uint8Array, Uint8Array]>; diff --git a/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js b/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js new file mode 100644 index 00000000..c2d26637 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js @@ -0,0 +1,319 @@ +import { fromHex } from "@aws-sdk/util-hex-encoding"; +var millionChars = new Uint8Array(1000000); +for (var i = 0; i < 1000000; i++) { + millionChars[i] = 97; +} +export var hashTestVectors = [ + [ + Uint8Array.from([97, 98, 99]), + fromHex("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad") + ], + [ + new Uint8Array(0), + fromHex("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + ], + [ + fromHex("61"), + fromHex("ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb") + ], + [ + fromHex("6161"), + fromHex("961b6dd3ede3cb8ecbaacbd68de040cd78eb2ed5889130cceb4c49268ea4d506") + ], + [ + fromHex("616161"), + fromHex("9834876dcfb05cb167a5c24953eba58c4ac89b1adf57f28f2f9d09af107ee8f0") + ], + [ + fromHex("61616161"), + fromHex("61be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b4") + ], + [ + fromHex("6161616161"), + fromHex("ed968e840d10d2d313a870bc131a4e2c311d7ad09bdf32b3418147221f51a6e2") + ], + [ + fromHex("616161616161"), + fromHex("ed02457b5c41d964dbd2f2a609d63fe1bb7528dbe55e1abf5b52c249cd735797") + ], + [ + fromHex("61616161616161"), + fromHex("e46240714b5db3a23eee60479a623efba4d633d27fe4f03c904b9e219a7fbe60") + ], + [ + fromHex("6161616161616161"), + fromHex("1f3ce40415a2081fa3eee75fc39fff8e56c22270d1a978a7249b592dcebd20b4") + ], + [ + fromHex("616161616161616161"), + fromHex("f2aca93b80cae681221f0445fa4e2cae8a1f9f8fa1e1741d9639caad222f537d") + ], + [ + fromHex("61616161616161616161"), + fromHex("bf2cb58a68f684d95a3b78ef8f661c9a4e5b09e82cc8f9cc88cce90528caeb27") + ], + [ + fromHex("6161616161616161616161"), + fromHex("28cb017dfc99073aa1b47c1b30f413e3ce774c4991eb4158de50f9dbb36d8043") + ], + [ + fromHex("616161616161616161616161"), + fromHex("f24abc34b13fade76e805799f71187da6cd90b9cac373ae65ed57f143bd664e5") + ], + [ + fromHex("61616161616161616161616161"), + fromHex("a689d786e81340e45511dec6c7ab2d978434e5db123362450fe10cfac70d19d0") + ], + [ + fromHex("6161616161616161616161616161"), + fromHex("82cab7df0abfb9d95dca4e5937ce2968c798c726fea48c016bf9763221efda13") + ], + [ + fromHex("616161616161616161616161616161"), + fromHex("ef2df0b539c6c23de0f4cbe42648c301ae0e22e887340a4599fb4ef4e2678e48") + ], + [ + fromHex("61616161616161616161616161616161"), + fromHex("0c0beacef8877bbf2416eb00f2b5dc96354e26dd1df5517320459b1236860f8c") + ], + [ + fromHex("6161616161616161616161616161616161"), + fromHex("b860666ee2966dd8f903be44ee605c6e1366f926d9f17a8f49937d11624eb99d") + ], + [ + fromHex("616161616161616161616161616161616161"), + fromHex("c926defaaa3d13eda2fc63a553bb7fb7326bece6e7cb67ca5296e4727d89bab4") + ], + [ + fromHex("61616161616161616161616161616161616161"), + fromHex("a0b4aaab8a966e2193ba172d68162c4656860197f256b5f45f0203397ff3f99c") + ], + [ + fromHex("6161616161616161616161616161616161616161"), + fromHex("42492da06234ad0ac76f5d5debdb6d1ae027cffbe746a1c13b89bb8bc0139137") + ], + [ + fromHex("616161616161616161616161616161616161616161"), + fromHex("7df8e299c834de198e264c3e374bc58ecd9382252a705c183beb02f275571e3b") + ], + [ + fromHex("61616161616161616161616161616161616161616161"), + fromHex("ec7c494df6d2a7ea36668d656e6b8979e33641bfea378c15038af3964db057a3") + ], + [ + fromHex("6161616161616161616161616161616161616161616161"), + fromHex("897d3e95b65f26676081f8b9f3a98b6ee4424566303e8d4e7c7522ebae219eab") + ], + [ + fromHex("616161616161616161616161616161616161616161616161"), + fromHex("09f61f8d9cd65e6a0c258087c485b6293541364e42bd97b2d7936580c8aa3c54") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161"), + fromHex("2f521e2a7d0bd812cbc035f4ed6806eb8d851793b04ba147e8f66b72f5d1f20f") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161"), + fromHex("9976d549a25115dab4e36d0c1fb8f31cb07da87dd83275977360eb7dc09e88de") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161"), + fromHex("cc0616e61cbd6e8e5e34e9fb2d320f37de915820206f5696c31f1fbd24aa16de") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161"), + fromHex("9c547cb8115a44883b9f70ba68f75117cd55359c92611875e386f8af98c172ab") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161"), + fromHex("6913c9c7fd42fe23df8b6bcd4dbaf1c17748948d97f2980b432319c39eddcf6c") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161"), + fromHex("3a54fc0cbc0b0ef48b6507b7788096235d10292dd3ae24e22f5aa062d4f9864a") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161"), + fromHex("61c60b487d1a921e0bcc9bf853dda0fb159b30bf57b2e2d2c753b00be15b5a09") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161"), + fromHex("3ba3f5f43b92602683c19aee62a20342b084dd5971ddd33808d81a328879a547") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("852785c805c77e71a22340a54e9d95933ed49121e7d2bf3c2d358854bc1359ea") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("a27c896c4859204843166af66f0e902b9c3b3ed6d2fd13d435abc020065c526f") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("629362afc62c74497caed2272e30f8125ecd0965f8d8d7cfc4e260f7f8dd319d") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("22c1d24bcd03e9aee9832efccd6da613fc702793178e5f12c945c7b67ddda933") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("21ec055b38ce759cd4d0f477e9bdec2c5b8199945db4439bae334a964df6246c") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("365a9c3e2c2af0a56e47a9dac51c2c5381bf8f41273bad3175e0e619126ad087") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("b4d5e56e929ba4cda349e9274e3603d0be246b82016bca20f363963c5f2d6845") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("e33cdf9c7f7120b98e8c78408953e07f2ecd183006b5606df349b4c212acf43e") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("c0f8bd4dbc2b0c03107c1c37913f2a7501f521467f45dd0fef6958e9a4692719") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("7a538607fdaab9296995929f451565bbb8142e1844117322aafd2b3d76b01aff") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("66d34fba71f8f450f7e45598853e53bfc23bbd129027cbb131a2f4ffd7878cd0") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("16849877c6c21ef0bfa68e4f6747300ddb171b170b9f00e189edc4c2fc4db93e") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("52789e3423b72beeb898456a4f49662e46b0cbb960784c5ef4b1399d327e7c27") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("6643110c5628fff59edf76d82d5bf573bf800f16a4d65dfb1e5d6f1a46296d0b") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("11eaed932c6c6fddfc2efc394e609facf4abe814fc6180d03b14fce13a07d0e5") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("97daac0ee9998dfcad6c9c0970da5ca411c86233a944c25b47566f6a7bc1ddd5") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("8f9bec6a62dd28ebd36d1227745592de6658b36974a3bb98a4c582f683ea6c42") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("160b4e433e384e05e537dc59b467f7cb2403f0214db15c5db58862a3f1156d2e") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("bfc5fe0e360152ca98c50fab4ed7e3078c17debc2917740d5000913b686ca129") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("6c1b3dc7a706b9dc81352a6716b9c666c608d8626272c64b914ab05572fc6e84") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("abe346a7259fc90b4c27185419628e5e6af6466b1ae9b5446cac4bfc26cf05c4") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("a3f01b6939256127582ac8ae9fb47a382a244680806a3f613a118851c1ca1d47") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("9f4390f8d30c2dd92ec9f095b65e2b9ae9b0a925a5258e241c9f1e910f734318") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("b35439a4ac6f0948b6d6f9e3c6af0f5f590ce20f1bde7090ef7970686ec6738a") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("f13b2d724659eb3bf47f2dd6af1accc87b81f09f59f2b75e5c0bed6589dfe8c6") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("d5c039b748aa64665782974ec3dc3025c042edf54dcdc2b5de31385b094cb678") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("111bb261277afd65f0744b247cd3e47d386d71563d0ed995517807d5ebd4fba3") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("11ee391211c6256460b6ed375957fadd8061cafbb31daf967db875aebd5aaad4") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("35d5fc17cfbbadd00f5e710ada39f194c5ad7c766ad67072245f1fad45f0f530") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("f506898cc7c2e092f9eb9fadae7ba50383f5b46a2a4fe5597dbb553a78981268") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("7d3e74a05d7db15bce4ad9ec0658ea98e3f06eeecf16b4c6fff2da457ddc2f34") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("ffe054fe7ae0cb6dc65c3af9b61d5209f439851db43d0ba5997337df154668eb") + ], + [ + fromHex("de188941a3375d3a8a061e67576e926dc71a7fa3f0cceb97452b4d3227965f9ea8cc75076d9fb9c5417aa5cb30fc22198b34982dbb629e"), + fromHex("038051e9c324393bd1ca1978dd0952c2aa3742ca4f1bd5cd4611cea83892d382") + ], + [ + millionChars, + fromHex("cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex("45ad4b37c6e2fc0a2cfcc1b5da524132ec707615c2cae1dbbc43c97aa521db81") + ] +]; +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +export var hmacTestVectors = [ + [ + fromHex("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b"), + fromHex("4869205468657265"), + fromHex("b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c2e32cff7") + ], + [ + fromHex("4a656665"), + fromHex("7768617420646f2079612077616e7420666f72206e6f7468696e673f"), + fromHex("5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b964ec3843") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex("dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd"), + fromHex("773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514ced565fe") + ], + [ + fromHex("0102030405060708090a0b0c0d0e0f10111213141516171819"), + fromHex("cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd"), + fromHex("82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff46729665b") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex("54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a65204b6579202d2048617368204b6579204669727374"), + fromHex("60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f0ee37f54") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex("5468697320697320612074657374207573696e672061206c6172676572207468616e20626c6f636b2d73697a65206b657920616e642061206c6172676572207468616e20626c6f636b2d73697a6520646174612e20546865206b6579206e6565647320746f20626520686173686564206265666f7265206265696e6720757365642062792074686520484d414320616c676f726974686d2e"), + fromHex("9b09ffa71b942fcb27635fbcd5b0e944bfdc63644f0713938a7f51535c3a35e2") + ] +]; +//# sourceMappingURL=knownHashes.fixture.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js.map b/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js.map new file mode 100644 index 00000000..1232159d --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js.map @@ -0,0 +1 @@ +{"version":3,"file":"knownHashes.fixture.js","sourceRoot":"","sources":["../../src/knownHashes.fixture.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,4BAA4B,CAAC;AAErD,IAAM,YAAY,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,CAAC;AAC7C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,EAAE,CAAC,EAAE,EAAE;IAChC,YAAY,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC;CACtB;AAED,MAAM,CAAC,IAAM,eAAe,GAAoC;IAC9D;QACE,UAAU,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC;QAC7B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAI,UAAU,CAAC,CAAC,CAAC;QACjB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,IAAI,CAAC;QACb,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,MAAM,CAAC;QACf,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,QAAQ,CAAC;QACjB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,UAAU,CAAC;QACnB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,YAAY,CAAC;QACrB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,cAAc,CAAC;QACvB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,gBAAgB,CAAC;QACzB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,kBAAkB,CAAC;QAC3B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,oBAAoB,CAAC;QAC7B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,sBAAsB,CAAC;QAC/B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,wBAAwB,CAAC;QACjC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,0BAA0B,CAAC;QACnC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,4BAA4B,CAAC;QACrC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,8BAA8B,CAAC;QACvC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,gCAAgC,CAAC;QACzC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,kCAAkC,CAAC;QAC3C,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,oCAAoC,CAAC;QAC7C,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,sCAAsC,CAAC;QAC/C,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,wCAAwC,CAAC;QACjD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,0CAA0C,CAAC;QACnD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,4CAA4C,CAAC;QACrD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,8CAA8C,CAAC;QACvD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,gDAAgD,CAAC;QACzD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,kDAAkD,CAAC;QAC3D,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,oDAAoD,CAAC;QAC7D,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,sDAAsD,CAAC;QAC/D,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,wDAAwD,CAAC;QACjE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,0DAA0D,CAAC;QACnE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,4DAA4D,CAAC;QACrE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,8DAA8D,CAAC;QACvE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,gEAAgE,CAAC;QACzE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,kEAAkE,CAAC;QAC3E,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,oEAAoE,CACrE;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,sEAAsE,CACvE;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wEAAwE,CACzE;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,0EAA0E,CAC3E;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,4EAA4E,CAC7E;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,8EAA8E,CAC/E;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gFAAgF,CACjF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,kFAAkF,CACnF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,oFAAoF,CACrF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,sFAAsF,CACvF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wFAAwF,CACzF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,0FAA0F,CAC3F;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,4FAA4F,CAC7F;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,8FAA8F,CAC/F;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gGAAgG,CACjG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,kGAAkG,CACnG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,oGAAoG,CACrG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,sGAAsG,CACvG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wGAAwG,CACzG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,0GAA0G,CAC3G;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,4GAA4G,CAC7G;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,8GAA8G,CAC/G;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gHAAgH,CACjH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,kHAAkH,CACnH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,oHAAoH,CACrH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,sHAAsH,CACvH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wHAAwH,CACzH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,0HAA0H,CAC3H;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,4HAA4H,CAC7H;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,8HAA8H,CAC/H;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gIAAgI,CACjI;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,kIAAkI,CACnI;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gHAAgH,CACjH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,YAAY;QACZ,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wQAAwQ,CACzQ;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;CACF,CAAC;AAEF;;GAEG;AACH,MAAM,CAAC,IAAM,eAAe,GAAgD;IAC1E;QACE,OAAO,CAAC,0CAA0C,CAAC;QACnD,OAAO,CAAC,kBAAkB,CAAC;QAC3B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,UAAU,CAAC;QACnB,OAAO,CAAC,0DAA0D,CAAC;QACnE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,0CAA0C,CAAC;QACnD,OAAO,CACL,sGAAsG,CACvG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,oDAAoD,CAAC;QAC7D,OAAO,CACL,sGAAsG,CACvG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wQAAwQ,CACzQ;QACD,OAAO,CACL,8GAA8G,CAC/G;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wQAAwQ,CACzQ;QACD,OAAO,CACL,kTAAkT,CACnT;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;CACF,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/sha256-js/package.json b/node_modules/@aws-crypto/sha256-js/package.json new file mode 100644 index 00000000..e8ef52d7 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/package.json @@ -0,0 +1,32 @@ +{ + "name": "@aws-crypto/sha256-js", + "version": "5.2.0", + "scripts": { + "prepublishOnly": "tsc -p tsconfig.json && tsc -p tsconfig.module.json", + "pretest": "tsc -p tsconfig.test.json", + "test": "mocha --require ts-node/register test/**/*test.ts" + }, + "main": "./build/main/index.js", + "module": "./build/module/index.js", + "types": "./build/main/index.d.ts", + "repository": { + "type": "git", + "url": "git@github.com:aws/aws-sdk-js-crypto-helpers.git" + }, + "author": { + "name": "AWS Crypto Tools Team", + "email": "aws-cryptools@amazon.com", + "url": "https://docs.aws.amazon.com/aws-crypto-tools/index.html?id=docs_gateway#lang/en_us" + }, + "homepage": "https://github.com/aws/aws-sdk-js-crypto-helpers/tree/master/packages/sha256-js", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "gitHead": "c11b171b35ec5c093364f0e0d8dc4ab1af68e748" +} diff --git a/node_modules/@aws-crypto/sha256-js/src/RawSha256.ts b/node_modules/@aws-crypto/sha256-js/src/RawSha256.ts new file mode 100644 index 00000000..f4a385c0 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/src/RawSha256.ts @@ -0,0 +1,164 @@ +import { + BLOCK_SIZE, + DIGEST_LENGTH, + INIT, + KEY, + MAX_HASHABLE_LENGTH +} from "./constants"; + +/** + * @internal + */ +export class RawSha256 { + private state: Int32Array = Int32Array.from(INIT); + private temp: Int32Array = new Int32Array(64); + private buffer: Uint8Array = new Uint8Array(64); + private bufferLength: number = 0; + private bytesHashed: number = 0; + + /** + * @internal + */ + finished: boolean = false; + + update(data: Uint8Array): void { + if (this.finished) { + throw new Error("Attempted to update an already finished hash."); + } + + let position = 0; + let { byteLength } = data; + this.bytesHashed += byteLength; + + if (this.bytesHashed * 8 > MAX_HASHABLE_LENGTH) { + throw new Error("Cannot hash more than 2^53 - 1 bits"); + } + + while (byteLength > 0) { + this.buffer[this.bufferLength++] = data[position++]; + byteLength--; + + if (this.bufferLength === BLOCK_SIZE) { + this.hashBuffer(); + this.bufferLength = 0; + } + } + } + + digest(): Uint8Array { + if (!this.finished) { + const bitsHashed = this.bytesHashed * 8; + const bufferView = new DataView( + this.buffer.buffer, + this.buffer.byteOffset, + this.buffer.byteLength + ); + + const undecoratedLength = this.bufferLength; + bufferView.setUint8(this.bufferLength++, 0x80); + + // Ensure the final block has enough room for the hashed length + if (undecoratedLength % BLOCK_SIZE >= BLOCK_SIZE - 8) { + for (let i = this.bufferLength; i < BLOCK_SIZE; i++) { + bufferView.setUint8(i, 0); + } + this.hashBuffer(); + this.bufferLength = 0; + } + + for (let i = this.bufferLength; i < BLOCK_SIZE - 8; i++) { + bufferView.setUint8(i, 0); + } + bufferView.setUint32( + BLOCK_SIZE - 8, + Math.floor(bitsHashed / 0x100000000), + true + ); + bufferView.setUint32(BLOCK_SIZE - 4, bitsHashed); + + this.hashBuffer(); + + this.finished = true; + } + + // The value in state is little-endian rather than big-endian, so flip + // each word into a new Uint8Array + const out = new Uint8Array(DIGEST_LENGTH); + for (let i = 0; i < 8; i++) { + out[i * 4] = (this.state[i] >>> 24) & 0xff; + out[i * 4 + 1] = (this.state[i] >>> 16) & 0xff; + out[i * 4 + 2] = (this.state[i] >>> 8) & 0xff; + out[i * 4 + 3] = (this.state[i] >>> 0) & 0xff; + } + + return out; + } + + private hashBuffer(): void { + const { buffer, state } = this; + + let state0 = state[0], + state1 = state[1], + state2 = state[2], + state3 = state[3], + state4 = state[4], + state5 = state[5], + state6 = state[6], + state7 = state[7]; + + for (let i = 0; i < BLOCK_SIZE; i++) { + if (i < 16) { + this.temp[i] = + ((buffer[i * 4] & 0xff) << 24) | + ((buffer[i * 4 + 1] & 0xff) << 16) | + ((buffer[i * 4 + 2] & 0xff) << 8) | + (buffer[i * 4 + 3] & 0xff); + } else { + let u = this.temp[i - 2]; + const t1 = + ((u >>> 17) | (u << 15)) ^ ((u >>> 19) | (u << 13)) ^ (u >>> 10); + + u = this.temp[i - 15]; + const t2 = + ((u >>> 7) | (u << 25)) ^ ((u >>> 18) | (u << 14)) ^ (u >>> 3); + + this.temp[i] = + ((t1 + this.temp[i - 7]) | 0) + ((t2 + this.temp[i - 16]) | 0); + } + + const t1 = + ((((((state4 >>> 6) | (state4 << 26)) ^ + ((state4 >>> 11) | (state4 << 21)) ^ + ((state4 >>> 25) | (state4 << 7))) + + ((state4 & state5) ^ (~state4 & state6))) | + 0) + + ((state7 + ((KEY[i] + this.temp[i]) | 0)) | 0)) | + 0; + + const t2 = + ((((state0 >>> 2) | (state0 << 30)) ^ + ((state0 >>> 13) | (state0 << 19)) ^ + ((state0 >>> 22) | (state0 << 10))) + + ((state0 & state1) ^ (state0 & state2) ^ (state1 & state2))) | + 0; + + state7 = state6; + state6 = state5; + state5 = state4; + state4 = (state3 + t1) | 0; + state3 = state2; + state2 = state1; + state1 = state0; + state0 = (t1 + t2) | 0; + } + + state[0] += state0; + state[1] += state1; + state[2] += state2; + state[3] += state3; + state[4] += state4; + state[5] += state5; + state[6] += state6; + state[7] += state7; + } +} diff --git a/node_modules/@aws-crypto/sha256-js/src/constants.ts b/node_modules/@aws-crypto/sha256-js/src/constants.ts new file mode 100644 index 00000000..8cede572 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/src/constants.ts @@ -0,0 +1,98 @@ +/** + * @internal + */ +export const BLOCK_SIZE: number = 64; + +/** + * @internal + */ +export const DIGEST_LENGTH: number = 32; + +/** + * @internal + */ +export const KEY = new Uint32Array([ + 0x428a2f98, + 0x71374491, + 0xb5c0fbcf, + 0xe9b5dba5, + 0x3956c25b, + 0x59f111f1, + 0x923f82a4, + 0xab1c5ed5, + 0xd807aa98, + 0x12835b01, + 0x243185be, + 0x550c7dc3, + 0x72be5d74, + 0x80deb1fe, + 0x9bdc06a7, + 0xc19bf174, + 0xe49b69c1, + 0xefbe4786, + 0x0fc19dc6, + 0x240ca1cc, + 0x2de92c6f, + 0x4a7484aa, + 0x5cb0a9dc, + 0x76f988da, + 0x983e5152, + 0xa831c66d, + 0xb00327c8, + 0xbf597fc7, + 0xc6e00bf3, + 0xd5a79147, + 0x06ca6351, + 0x14292967, + 0x27b70a85, + 0x2e1b2138, + 0x4d2c6dfc, + 0x53380d13, + 0x650a7354, + 0x766a0abb, + 0x81c2c92e, + 0x92722c85, + 0xa2bfe8a1, + 0xa81a664b, + 0xc24b8b70, + 0xc76c51a3, + 0xd192e819, + 0xd6990624, + 0xf40e3585, + 0x106aa070, + 0x19a4c116, + 0x1e376c08, + 0x2748774c, + 0x34b0bcb5, + 0x391c0cb3, + 0x4ed8aa4a, + 0x5b9cca4f, + 0x682e6ff3, + 0x748f82ee, + 0x78a5636f, + 0x84c87814, + 0x8cc70208, + 0x90befffa, + 0xa4506ceb, + 0xbef9a3f7, + 0xc67178f2 +]); + +/** + * @internal + */ +export const INIT = [ + 0x6a09e667, + 0xbb67ae85, + 0x3c6ef372, + 0xa54ff53a, + 0x510e527f, + 0x9b05688c, + 0x1f83d9ab, + 0x5be0cd19 +]; + +/** + * @internal + */ +export const MAX_HASHABLE_LENGTH = 2 ** 53 - 1; diff --git a/node_modules/@aws-crypto/sha256-js/src/index.ts b/node_modules/@aws-crypto/sha256-js/src/index.ts new file mode 100644 index 00000000..4554d8a3 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/src/index.ts @@ -0,0 +1 @@ +export * from "./jsSha256"; diff --git a/node_modules/@aws-crypto/sha256-js/src/jsSha256.ts b/node_modules/@aws-crypto/sha256-js/src/jsSha256.ts new file mode 100644 index 00000000..f7bd9934 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/src/jsSha256.ts @@ -0,0 +1,94 @@ +import { BLOCK_SIZE } from "./constants"; +import { RawSha256 } from "./RawSha256"; +import { Checksum, SourceData } from "@aws-sdk/types"; +import { isEmptyData, convertToBuffer } from "@aws-crypto/util"; + +export class Sha256 implements Checksum { + private readonly secret?: SourceData; + private hash: RawSha256; + private outer?: RawSha256; + private error: any; + + constructor(secret?: SourceData) { + this.secret = secret; + this.hash = new RawSha256(); + this.reset(); + } + + update(toHash: SourceData): void { + if (isEmptyData(toHash) || this.error) { + return; + } + + try { + this.hash.update(convertToBuffer(toHash)); + } catch (e) { + this.error = e; + } + } + + /* This synchronous method keeps compatibility + * with the v2 aws-sdk. + */ + digestSync(): Uint8Array { + if (this.error) { + throw this.error; + } + + if (this.outer) { + if (!this.outer.finished) { + this.outer.update(this.hash.digest()); + } + + return this.outer.digest(); + } + + return this.hash.digest(); + } + + /* The underlying digest method here is synchronous. + * To keep the same interface with the other hash functions + * the default is to expose this as an async method. + * However, it can sometimes be useful to have a sync method. + */ + async digest(): Promise { + return this.digestSync(); + } + + reset(): void { + this.hash = new RawSha256(); + if (this.secret) { + this.outer = new RawSha256(); + const inner = bufferFromSecret(this.secret); + const outer = new Uint8Array(BLOCK_SIZE); + outer.set(inner); + + for (let i = 0; i < BLOCK_SIZE; i++) { + inner[i] ^= 0x36; + outer[i] ^= 0x5c; + } + + this.hash.update(inner); + this.outer.update(outer); + + // overwrite the copied key in memory + for (let i = 0; i < inner.byteLength; i++) { + inner[i] = 0; + } + } + } +} + +function bufferFromSecret(secret: SourceData): Uint8Array { + let input = convertToBuffer(secret); + + if (input.byteLength > BLOCK_SIZE) { + const bufferHash = new RawSha256(); + bufferHash.update(input); + input = bufferHash.digest(); + } + + const buffer = new Uint8Array(BLOCK_SIZE); + buffer.set(input); + return buffer; +} diff --git a/node_modules/@aws-crypto/sha256-js/src/knownHashes.fixture.ts b/node_modules/@aws-crypto/sha256-js/src/knownHashes.fixture.ts new file mode 100644 index 00000000..c83dae28 --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/src/knownHashes.fixture.ts @@ -0,0 +1,401 @@ +import { fromHex } from "@aws-sdk/util-hex-encoding"; + +const millionChars = new Uint8Array(1000000); +for (let i = 0; i < 1000000; i++) { + millionChars[i] = 97; +} + +export const hashTestVectors: Array<[Uint8Array, Uint8Array]> = [ + [ + Uint8Array.from([97, 98, 99]), + fromHex("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad") + ], + [ + new Uint8Array(0), + fromHex("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + ], + [ + fromHex("61"), + fromHex("ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb") + ], + [ + fromHex("6161"), + fromHex("961b6dd3ede3cb8ecbaacbd68de040cd78eb2ed5889130cceb4c49268ea4d506") + ], + [ + fromHex("616161"), + fromHex("9834876dcfb05cb167a5c24953eba58c4ac89b1adf57f28f2f9d09af107ee8f0") + ], + [ + fromHex("61616161"), + fromHex("61be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b4") + ], + [ + fromHex("6161616161"), + fromHex("ed968e840d10d2d313a870bc131a4e2c311d7ad09bdf32b3418147221f51a6e2") + ], + [ + fromHex("616161616161"), + fromHex("ed02457b5c41d964dbd2f2a609d63fe1bb7528dbe55e1abf5b52c249cd735797") + ], + [ + fromHex("61616161616161"), + fromHex("e46240714b5db3a23eee60479a623efba4d633d27fe4f03c904b9e219a7fbe60") + ], + [ + fromHex("6161616161616161"), + fromHex("1f3ce40415a2081fa3eee75fc39fff8e56c22270d1a978a7249b592dcebd20b4") + ], + [ + fromHex("616161616161616161"), + fromHex("f2aca93b80cae681221f0445fa4e2cae8a1f9f8fa1e1741d9639caad222f537d") + ], + [ + fromHex("61616161616161616161"), + fromHex("bf2cb58a68f684d95a3b78ef8f661c9a4e5b09e82cc8f9cc88cce90528caeb27") + ], + [ + fromHex("6161616161616161616161"), + fromHex("28cb017dfc99073aa1b47c1b30f413e3ce774c4991eb4158de50f9dbb36d8043") + ], + [ + fromHex("616161616161616161616161"), + fromHex("f24abc34b13fade76e805799f71187da6cd90b9cac373ae65ed57f143bd664e5") + ], + [ + fromHex("61616161616161616161616161"), + fromHex("a689d786e81340e45511dec6c7ab2d978434e5db123362450fe10cfac70d19d0") + ], + [ + fromHex("6161616161616161616161616161"), + fromHex("82cab7df0abfb9d95dca4e5937ce2968c798c726fea48c016bf9763221efda13") + ], + [ + fromHex("616161616161616161616161616161"), + fromHex("ef2df0b539c6c23de0f4cbe42648c301ae0e22e887340a4599fb4ef4e2678e48") + ], + [ + fromHex("61616161616161616161616161616161"), + fromHex("0c0beacef8877bbf2416eb00f2b5dc96354e26dd1df5517320459b1236860f8c") + ], + [ + fromHex("6161616161616161616161616161616161"), + fromHex("b860666ee2966dd8f903be44ee605c6e1366f926d9f17a8f49937d11624eb99d") + ], + [ + fromHex("616161616161616161616161616161616161"), + fromHex("c926defaaa3d13eda2fc63a553bb7fb7326bece6e7cb67ca5296e4727d89bab4") + ], + [ + fromHex("61616161616161616161616161616161616161"), + fromHex("a0b4aaab8a966e2193ba172d68162c4656860197f256b5f45f0203397ff3f99c") + ], + [ + fromHex("6161616161616161616161616161616161616161"), + fromHex("42492da06234ad0ac76f5d5debdb6d1ae027cffbe746a1c13b89bb8bc0139137") + ], + [ + fromHex("616161616161616161616161616161616161616161"), + fromHex("7df8e299c834de198e264c3e374bc58ecd9382252a705c183beb02f275571e3b") + ], + [ + fromHex("61616161616161616161616161616161616161616161"), + fromHex("ec7c494df6d2a7ea36668d656e6b8979e33641bfea378c15038af3964db057a3") + ], + [ + fromHex("6161616161616161616161616161616161616161616161"), + fromHex("897d3e95b65f26676081f8b9f3a98b6ee4424566303e8d4e7c7522ebae219eab") + ], + [ + fromHex("616161616161616161616161616161616161616161616161"), + fromHex("09f61f8d9cd65e6a0c258087c485b6293541364e42bd97b2d7936580c8aa3c54") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161"), + fromHex("2f521e2a7d0bd812cbc035f4ed6806eb8d851793b04ba147e8f66b72f5d1f20f") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161"), + fromHex("9976d549a25115dab4e36d0c1fb8f31cb07da87dd83275977360eb7dc09e88de") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161"), + fromHex("cc0616e61cbd6e8e5e34e9fb2d320f37de915820206f5696c31f1fbd24aa16de") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161"), + fromHex("9c547cb8115a44883b9f70ba68f75117cd55359c92611875e386f8af98c172ab") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161"), + fromHex("6913c9c7fd42fe23df8b6bcd4dbaf1c17748948d97f2980b432319c39eddcf6c") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161"), + fromHex("3a54fc0cbc0b0ef48b6507b7788096235d10292dd3ae24e22f5aa062d4f9864a") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161"), + fromHex("61c60b487d1a921e0bcc9bf853dda0fb159b30bf57b2e2d2c753b00be15b5a09") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161"), + fromHex("3ba3f5f43b92602683c19aee62a20342b084dd5971ddd33808d81a328879a547") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("852785c805c77e71a22340a54e9d95933ed49121e7d2bf3c2d358854bc1359ea") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("a27c896c4859204843166af66f0e902b9c3b3ed6d2fd13d435abc020065c526f") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("629362afc62c74497caed2272e30f8125ecd0965f8d8d7cfc4e260f7f8dd319d") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("22c1d24bcd03e9aee9832efccd6da613fc702793178e5f12c945c7b67ddda933") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("21ec055b38ce759cd4d0f477e9bdec2c5b8199945db4439bae334a964df6246c") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("365a9c3e2c2af0a56e47a9dac51c2c5381bf8f41273bad3175e0e619126ad087") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("b4d5e56e929ba4cda349e9274e3603d0be246b82016bca20f363963c5f2d6845") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("e33cdf9c7f7120b98e8c78408953e07f2ecd183006b5606df349b4c212acf43e") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("c0f8bd4dbc2b0c03107c1c37913f2a7501f521467f45dd0fef6958e9a4692719") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("7a538607fdaab9296995929f451565bbb8142e1844117322aafd2b3d76b01aff") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("66d34fba71f8f450f7e45598853e53bfc23bbd129027cbb131a2f4ffd7878cd0") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("16849877c6c21ef0bfa68e4f6747300ddb171b170b9f00e189edc4c2fc4db93e") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("52789e3423b72beeb898456a4f49662e46b0cbb960784c5ef4b1399d327e7c27") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("6643110c5628fff59edf76d82d5bf573bf800f16a4d65dfb1e5d6f1a46296d0b") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("11eaed932c6c6fddfc2efc394e609facf4abe814fc6180d03b14fce13a07d0e5") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("97daac0ee9998dfcad6c9c0970da5ca411c86233a944c25b47566f6a7bc1ddd5") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("8f9bec6a62dd28ebd36d1227745592de6658b36974a3bb98a4c582f683ea6c42") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("160b4e433e384e05e537dc59b467f7cb2403f0214db15c5db58862a3f1156d2e") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("bfc5fe0e360152ca98c50fab4ed7e3078c17debc2917740d5000913b686ca129") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("6c1b3dc7a706b9dc81352a6716b9c666c608d8626272c64b914ab05572fc6e84") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("abe346a7259fc90b4c27185419628e5e6af6466b1ae9b5446cac4bfc26cf05c4") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("a3f01b6939256127582ac8ae9fb47a382a244680806a3f613a118851c1ca1d47") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("9f4390f8d30c2dd92ec9f095b65e2b9ae9b0a925a5258e241c9f1e910f734318") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("b35439a4ac6f0948b6d6f9e3c6af0f5f590ce20f1bde7090ef7970686ec6738a") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("f13b2d724659eb3bf47f2dd6af1accc87b81f09f59f2b75e5c0bed6589dfe8c6") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("d5c039b748aa64665782974ec3dc3025c042edf54dcdc2b5de31385b094cb678") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("111bb261277afd65f0744b247cd3e47d386d71563d0ed995517807d5ebd4fba3") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("11ee391211c6256460b6ed375957fadd8061cafbb31daf967db875aebd5aaad4") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("35d5fc17cfbbadd00f5e710ada39f194c5ad7c766ad67072245f1fad45f0f530") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("f506898cc7c2e092f9eb9fadae7ba50383f5b46a2a4fe5597dbb553a78981268") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("7d3e74a05d7db15bce4ad9ec0658ea98e3f06eeecf16b4c6fff2da457ddc2f34") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("ffe054fe7ae0cb6dc65c3af9b61d5209f439851db43d0ba5997337df154668eb") + ], + [ + fromHex( + "de188941a3375d3a8a061e67576e926dc71a7fa3f0cceb97452b4d3227965f9ea8cc75076d9fb9c5417aa5cb30fc22198b34982dbb629e" + ), + fromHex("038051e9c324393bd1ca1978dd0952c2aa3742ca4f1bd5cd4611cea83892d382") + ], + [ + millionChars, + fromHex("cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0") + ], + [ + fromHex( + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + ), + fromHex("45ad4b37c6e2fc0a2cfcc1b5da524132ec707615c2cae1dbbc43c97aa521db81") + ] +]; + +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +export const hmacTestVectors: Array<[Uint8Array, Uint8Array, Uint8Array]> = [ + [ + fromHex("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b"), + fromHex("4869205468657265"), + fromHex("b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c2e32cff7") + ], + [ + fromHex("4a656665"), + fromHex("7768617420646f2079612077616e7420666f72206e6f7468696e673f"), + fromHex("5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b964ec3843") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex( + "dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd" + ), + fromHex("773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514ced565fe") + ], + [ + fromHex("0102030405060708090a0b0c0d0e0f10111213141516171819"), + fromHex( + "cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd" + ), + fromHex("82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff46729665b") + ], + [ + fromHex( + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + ), + fromHex( + "54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a65204b6579202d2048617368204b6579204669727374" + ), + fromHex("60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f0ee37f54") + ], + [ + fromHex( + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + ), + fromHex( + "5468697320697320612074657374207573696e672061206c6172676572207468616e20626c6f636b2d73697a65206b657920616e642061206c6172676572207468616e20626c6f636b2d73697a6520646174612e20546865206b6579206e6565647320746f20626520686173686564206265666f7265206265696e6720757365642062792074686520484d414320616c676f726974686d2e" + ), + fromHex("9b09ffa71b942fcb27635fbcd5b0e944bfdc63644f0713938a7f51535c3a35e2") + ] +]; diff --git a/node_modules/@aws-crypto/sha256-js/tsconfig.json b/node_modules/@aws-crypto/sha256-js/tsconfig.json new file mode 100644 index 00000000..fb9aa95f --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "rootDir": "./src", + "outDir": "./build/main", + "lib": ["dom"], + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules/**"] +} diff --git a/node_modules/@aws-crypto/sha256-js/tsconfig.module.json b/node_modules/@aws-crypto/sha256-js/tsconfig.module.json new file mode 100644 index 00000000..7d0cfddc --- /dev/null +++ b/node_modules/@aws-crypto/sha256-js/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "outDir": "build/module", + "module": "esnext", + } +} diff --git a/node_modules/@aws-crypto/supports-web-crypto/CHANGELOG.md b/node_modules/@aws-crypto/supports-web-crypto/CHANGELOG.md new file mode 100644 index 00000000..13023adf --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/CHANGELOG.md @@ -0,0 +1,66 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.1.0...v5.2.0) (2023-10-16) + +### Features + +- support ESM artifacts in all packages ([#752](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/752)) ([e930ffb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e930ffba5cfef66dd242049e7d514ced232c1e3b)) + +# [5.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.0.0...v5.1.0) (2023-09-22) + +### Bug Fixes + +- Update tsc to 2.x ([#735](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/735)) ([782e0de](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/782e0de9f5fef41f694130580a69d940894b6b8c)) + +# [5.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v4.0.1...v5.0.0) (2023-07-13) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [4.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v3.0.0...v4.0.0) (2023-02-20) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.2...v3.0.0) (2023-01-12) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +## [2.0.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.1...v2.0.2) (2022-09-07) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [2.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.2...v2.0.0) (2021-10-25) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [1.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/supports-web-crypto@1.0.0-alpha.0...@aws-crypto/supports-web-crypto@1.0.0) (2020-10-22) + +### Bug Fixes + +- replace `sourceRoot` -> `rootDir` in tsconfig ([#169](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/169)) ([d437167](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/d437167b51d1c56a4fcc2bb8a446b74a7e3b7e06)) + +# [1.0.0-alpha.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/supports-web-crypto@0.1.0-preview.4...@aws-crypto/supports-web-crypto@1.0.0-alpha.0) (2020-02-07) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [0.1.0-preview.4](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/supports-web-crypto@0.1.0-preview.2...@aws-crypto/supports-web-crypto@0.1.0-preview.4) (2020-01-16) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.3](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/supports-web-crypto@0.1.0-preview.2...@aws-crypto/supports-web-crypto@0.1.0-preview.3) (2019-11-15) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.2](https://github.com/aws/aws-javascript-crypto-helpers/compare/@aws-crypto/supports-web-crypto@0.1.0-preview.1...@aws-crypto/supports-web-crypto@0.1.0-preview.2) (2019-10-30) + +### Bug Fixes + +- remove /src/ from .npmignore (for sourcemaps) ([#5](https://github.com/aws/aws-javascript-crypto-helpers/issues/5)) ([ec52056](https://github.com/aws/aws-javascript-crypto-helpers/commit/ec52056)) diff --git a/node_modules/@aws-crypto/supports-web-crypto/LICENSE b/node_modules/@aws-crypto/supports-web-crypto/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-crypto/supports-web-crypto/README.md b/node_modules/@aws-crypto/supports-web-crypto/README.md new file mode 100644 index 00000000..78913571 --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/README.md @@ -0,0 +1,32 @@ +# @aws-crypto/supports-web-crypto + +Functions to check web crypto support for browsers. + +## Usage + +``` +import {supportsWebCrypto} from '@aws-crypto/supports-web-crypto'; + +if (supportsWebCrypto(window)) { + // window.crypto.subtle.encrypt will exist +} + +``` + +## supportsWebCrypto + +Used to make sure `window.crypto.subtle` exists and implements crypto functions +as well as a cryptographic secure random source exists. + +## supportsSecureRandom + +Used to make sure that a cryptographic secure random source exists. +Does not check for `window.crypto.subtle`. + +## supportsSubtleCrypto + +## supportsZeroByteGCM + +## Test + +`npm test` diff --git a/node_modules/@aws-crypto/supports-web-crypto/build/main/index.d.ts b/node_modules/@aws-crypto/supports-web-crypto/build/main/index.d.ts new file mode 100644 index 00000000..9725c9c2 --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/build/main/index.d.ts @@ -0,0 +1 @@ +export * from "./supportsWebCrypto"; diff --git a/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js b/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js new file mode 100644 index 00000000..cc4c93f7 --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var tslib_1 = require("tslib"); +tslib_1.__exportStar(require("./supportsWebCrypto"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js.map b/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js.map new file mode 100644 index 00000000..df0dd2c9 --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;AAAA,8DAAoC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.d.ts b/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.d.ts new file mode 100644 index 00000000..f2723dc6 --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.d.ts @@ -0,0 +1,4 @@ +export declare function supportsWebCrypto(window: Window): boolean; +export declare function supportsSecureRandom(window: Window): boolean; +export declare function supportsSubtleCrypto(subtle: SubtleCrypto): boolean; +export declare function supportsZeroByteGCM(subtle: SubtleCrypto): Promise; diff --git a/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js b/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js new file mode 100644 index 00000000..378f31e2 --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js @@ -0,0 +1,69 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.supportsZeroByteGCM = exports.supportsSubtleCrypto = exports.supportsSecureRandom = exports.supportsWebCrypto = void 0; +var tslib_1 = require("tslib"); +var subtleCryptoMethods = [ + "decrypt", + "digest", + "encrypt", + "exportKey", + "generateKey", + "importKey", + "sign", + "verify" +]; +function supportsWebCrypto(window) { + if (supportsSecureRandom(window) && + typeof window.crypto.subtle === "object") { + var subtle = window.crypto.subtle; + return supportsSubtleCrypto(subtle); + } + return false; +} +exports.supportsWebCrypto = supportsWebCrypto; +function supportsSecureRandom(window) { + if (typeof window === "object" && typeof window.crypto === "object") { + var getRandomValues = window.crypto.getRandomValues; + return typeof getRandomValues === "function"; + } + return false; +} +exports.supportsSecureRandom = supportsSecureRandom; +function supportsSubtleCrypto(subtle) { + return (subtle && + subtleCryptoMethods.every(function (methodName) { return typeof subtle[methodName] === "function"; })); +} +exports.supportsSubtleCrypto = supportsSubtleCrypto; +function supportsZeroByteGCM(subtle) { + return tslib_1.__awaiter(this, void 0, void 0, function () { + var key, zeroByteAuthTag, _a; + return tslib_1.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!supportsSubtleCrypto(subtle)) + return [2 /*return*/, false]; + _b.label = 1; + case 1: + _b.trys.push([1, 4, , 5]); + return [4 /*yield*/, subtle.generateKey({ name: "AES-GCM", length: 128 }, false, ["encrypt"])]; + case 2: + key = _b.sent(); + return [4 /*yield*/, subtle.encrypt({ + name: "AES-GCM", + iv: new Uint8Array(Array(12)), + additionalData: new Uint8Array(Array(16)), + tagLength: 128 + }, key, new Uint8Array(0))]; + case 3: + zeroByteAuthTag = _b.sent(); + return [2 /*return*/, zeroByteAuthTag.byteLength === 16]; + case 4: + _a = _b.sent(); + return [2 /*return*/, false]; + case 5: return [2 /*return*/]; + } + }); + }); +} +exports.supportsZeroByteGCM = supportsZeroByteGCM; +//# sourceMappingURL=supportsWebCrypto.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js.map b/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js.map new file mode 100644 index 00000000..1cc0ea36 --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js.map @@ -0,0 +1 @@ +{"version":3,"file":"supportsWebCrypto.js","sourceRoot":"","sources":["../../src/supportsWebCrypto.ts"],"names":[],"mappings":";;;;AAUA,IAAM,mBAAmB,GAA8B;IACrD,SAAS;IACT,QAAQ;IACR,SAAS;IACT,WAAW;IACX,aAAa;IACb,WAAW;IACX,MAAM;IACN,QAAQ;CACT,CAAC;AAEF,SAAgB,iBAAiB,CAAC,MAAc;IAC9C,IACE,oBAAoB,CAAC,MAAM,CAAC;QAC5B,OAAO,MAAM,CAAC,MAAM,CAAC,MAAM,KAAK,QAAQ,EACxC;QACQ,IAAA,MAAM,GAAK,MAAM,CAAC,MAAM,OAAlB,CAAmB;QAEjC,OAAO,oBAAoB,CAAC,MAAM,CAAC,CAAC;KACrC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAXD,8CAWC;AAED,SAAgB,oBAAoB,CAAC,MAAc;IACjD,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ,EAAE;QAC3D,IAAA,eAAe,GAAK,MAAM,CAAC,MAAM,gBAAlB,CAAmB;QAE1C,OAAO,OAAO,eAAe,KAAK,UAAU,CAAC;KAC9C;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AARD,oDAQC;AAED,SAAgB,oBAAoB,CAAC,MAAoB;IACvD,OAAO,CACL,MAAM;QACN,mBAAmB,CAAC,KAAK,CACvB,UAAA,UAAU,IAAI,OAAA,OAAO,MAAM,CAAC,UAAU,CAAC,KAAK,UAAU,EAAxC,CAAwC,CACvD,CACF,CAAC;AACJ,CAAC;AAPD,oDAOC;AAED,SAAsB,mBAAmB,CAAC,MAAoB;;;;;;oBAC5D,IAAI,CAAC,oBAAoB,CAAC,MAAM,CAAC;wBAAE,sBAAO,KAAK,EAAC;;;;oBAElC,qBAAM,MAAM,CAAC,WAAW,CAClC,EAAE,IAAI,EAAE,SAAS,EAAE,MAAM,EAAE,GAAG,EAAE,EAChC,KAAK,EACL,CAAC,SAAS,CAAC,CACZ,EAAA;;oBAJK,GAAG,GAAG,SAIX;oBACuB,qBAAM,MAAM,CAAC,OAAO,CAC1C;4BACE,IAAI,EAAE,SAAS;4BACf,EAAE,EAAE,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;4BAC7B,cAAc,EAAE,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;4BACzC,SAAS,EAAE,GAAG;yBACf,EACD,GAAG,EACH,IAAI,UAAU,CAAC,CAAC,CAAC,CAClB,EAAA;;oBATK,eAAe,GAAG,SASvB;oBACD,sBAAO,eAAe,CAAC,UAAU,KAAK,EAAE,EAAC;;;oBAEzC,sBAAO,KAAK,EAAC;;;;;CAEhB;AAtBD,kDAsBC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/supports-web-crypto/build/module/index.d.ts b/node_modules/@aws-crypto/supports-web-crypto/build/module/index.d.ts new file mode 100644 index 00000000..9725c9c2 --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/build/module/index.d.ts @@ -0,0 +1 @@ +export * from "./supportsWebCrypto"; diff --git a/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js b/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js new file mode 100644 index 00000000..f5527ea5 --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js @@ -0,0 +1,2 @@ +export * from "./supportsWebCrypto"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js.map b/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js.map new file mode 100644 index 00000000..b2df4309 --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,qBAAqB,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.d.ts b/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.d.ts new file mode 100644 index 00000000..f2723dc6 --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.d.ts @@ -0,0 +1,4 @@ +export declare function supportsWebCrypto(window: Window): boolean; +export declare function supportsSecureRandom(window: Window): boolean; +export declare function supportsSubtleCrypto(subtle: SubtleCrypto): boolean; +export declare function supportsZeroByteGCM(subtle: SubtleCrypto): Promise; diff --git a/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js b/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js new file mode 100644 index 00000000..70b46e6d --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js @@ -0,0 +1,62 @@ +import { __awaiter, __generator } from "tslib"; +var subtleCryptoMethods = [ + "decrypt", + "digest", + "encrypt", + "exportKey", + "generateKey", + "importKey", + "sign", + "verify" +]; +export function supportsWebCrypto(window) { + if (supportsSecureRandom(window) && + typeof window.crypto.subtle === "object") { + var subtle = window.crypto.subtle; + return supportsSubtleCrypto(subtle); + } + return false; +} +export function supportsSecureRandom(window) { + if (typeof window === "object" && typeof window.crypto === "object") { + var getRandomValues = window.crypto.getRandomValues; + return typeof getRandomValues === "function"; + } + return false; +} +export function supportsSubtleCrypto(subtle) { + return (subtle && + subtleCryptoMethods.every(function (methodName) { return typeof subtle[methodName] === "function"; })); +} +export function supportsZeroByteGCM(subtle) { + return __awaiter(this, void 0, void 0, function () { + var key, zeroByteAuthTag, _a; + return __generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!supportsSubtleCrypto(subtle)) + return [2 /*return*/, false]; + _b.label = 1; + case 1: + _b.trys.push([1, 4, , 5]); + return [4 /*yield*/, subtle.generateKey({ name: "AES-GCM", length: 128 }, false, ["encrypt"])]; + case 2: + key = _b.sent(); + return [4 /*yield*/, subtle.encrypt({ + name: "AES-GCM", + iv: new Uint8Array(Array(12)), + additionalData: new Uint8Array(Array(16)), + tagLength: 128 + }, key, new Uint8Array(0))]; + case 3: + zeroByteAuthTag = _b.sent(); + return [2 /*return*/, zeroByteAuthTag.byteLength === 16]; + case 4: + _a = _b.sent(); + return [2 /*return*/, false]; + case 5: return [2 /*return*/]; + } + }); + }); +} +//# sourceMappingURL=supportsWebCrypto.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js.map b/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js.map new file mode 100644 index 00000000..967fc193 --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js.map @@ -0,0 +1 @@ +{"version":3,"file":"supportsWebCrypto.js","sourceRoot":"","sources":["../../src/supportsWebCrypto.ts"],"names":[],"mappings":";AAUA,IAAM,mBAAmB,GAA8B;IACrD,SAAS;IACT,QAAQ;IACR,SAAS;IACT,WAAW;IACX,aAAa;IACb,WAAW;IACX,MAAM;IACN,QAAQ;CACT,CAAC;AAEF,MAAM,UAAU,iBAAiB,CAAC,MAAc;IAC9C,IACE,oBAAoB,CAAC,MAAM,CAAC;QAC5B,OAAO,MAAM,CAAC,MAAM,CAAC,MAAM,KAAK,QAAQ,EACxC;QACQ,IAAA,MAAM,GAAK,MAAM,CAAC,MAAM,OAAlB,CAAmB;QAEjC,OAAO,oBAAoB,CAAC,MAAM,CAAC,CAAC;KACrC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED,MAAM,UAAU,oBAAoB,CAAC,MAAc;IACjD,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ,EAAE;QAC3D,IAAA,eAAe,GAAK,MAAM,CAAC,MAAM,gBAAlB,CAAmB;QAE1C,OAAO,OAAO,eAAe,KAAK,UAAU,CAAC;KAC9C;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED,MAAM,UAAU,oBAAoB,CAAC,MAAoB;IACvD,OAAO,CACL,MAAM;QACN,mBAAmB,CAAC,KAAK,CACvB,UAAA,UAAU,IAAI,OAAA,OAAO,MAAM,CAAC,UAAU,CAAC,KAAK,UAAU,EAAxC,CAAwC,CACvD,CACF,CAAC;AACJ,CAAC;AAED,MAAM,UAAgB,mBAAmB,CAAC,MAAoB;;;;;;oBAC5D,IAAI,CAAC,oBAAoB,CAAC,MAAM,CAAC;wBAAE,sBAAO,KAAK,EAAC;;;;oBAElC,qBAAM,MAAM,CAAC,WAAW,CAClC,EAAE,IAAI,EAAE,SAAS,EAAE,MAAM,EAAE,GAAG,EAAE,EAChC,KAAK,EACL,CAAC,SAAS,CAAC,CACZ,EAAA;;oBAJK,GAAG,GAAG,SAIX;oBACuB,qBAAM,MAAM,CAAC,OAAO,CAC1C;4BACE,IAAI,EAAE,SAAS;4BACf,EAAE,EAAE,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;4BAC7B,cAAc,EAAE,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;4BACzC,SAAS,EAAE,GAAG;yBACf,EACD,GAAG,EACH,IAAI,UAAU,CAAC,CAAC,CAAC,CAClB,EAAA;;oBATK,eAAe,GAAG,SASvB;oBACD,sBAAO,eAAe,CAAC,UAAU,KAAK,EAAE,EAAC;;;oBAEzC,sBAAO,KAAK,EAAC;;;;;CAEhB"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/supports-web-crypto/package.json b/node_modules/@aws-crypto/supports-web-crypto/package.json new file mode 100644 index 00000000..a97bf013 --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/package.json @@ -0,0 +1,28 @@ +{ + "name": "@aws-crypto/supports-web-crypto", + "version": "5.2.0", + "description": "Provides functions for detecting if the host environment supports the WebCrypto API", + "scripts": { + "prepublishOnly": "tsc -p tsconfig.json && tsc -p tsconfig.module.json", + "pretest": "tsc -p tsconfig.test.json", + "test": "mocha --require ts-node/register test/**/*test.ts" + }, + "repository": { + "type": "git", + "url": "git@github.com:aws/aws-sdk-js-crypto-helpers.git" + }, + "author": { + "name": "AWS Crypto Tools Team", + "email": "aws-cryptools@amazon.com", + "url": "https://docs.aws.amazon.com/aws-crypto-tools/index.html?id=docs_gateway#lang/en_us" + }, + "homepage": "https://github.com/aws/aws-sdk-js-crypto-helpers/tree/master/packages/supports-web-crypto", + "license": "Apache-2.0", + "main": "./build/main/index.js", + "module": "./build/module/index.js", + "types": "./build/main/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "gitHead": "c11b171b35ec5c093364f0e0d8dc4ab1af68e748" +} diff --git a/node_modules/@aws-crypto/supports-web-crypto/src/index.ts b/node_modules/@aws-crypto/supports-web-crypto/src/index.ts new file mode 100644 index 00000000..9725c9c2 --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/src/index.ts @@ -0,0 +1 @@ +export * from "./supportsWebCrypto"; diff --git a/node_modules/@aws-crypto/supports-web-crypto/src/supportsWebCrypto.ts b/node_modules/@aws-crypto/supports-web-crypto/src/supportsWebCrypto.ts new file mode 100644 index 00000000..7eef6291 --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/src/supportsWebCrypto.ts @@ -0,0 +1,76 @@ +type SubtleCryptoMethod = + | "decrypt" + | "digest" + | "encrypt" + | "exportKey" + | "generateKey" + | "importKey" + | "sign" + | "verify"; + +const subtleCryptoMethods: Array = [ + "decrypt", + "digest", + "encrypt", + "exportKey", + "generateKey", + "importKey", + "sign", + "verify" +]; + +export function supportsWebCrypto(window: Window): boolean { + if ( + supportsSecureRandom(window) && + typeof window.crypto.subtle === "object" + ) { + const { subtle } = window.crypto; + + return supportsSubtleCrypto(subtle); + } + + return false; +} + +export function supportsSecureRandom(window: Window): boolean { + if (typeof window === "object" && typeof window.crypto === "object") { + const { getRandomValues } = window.crypto; + + return typeof getRandomValues === "function"; + } + + return false; +} + +export function supportsSubtleCrypto(subtle: SubtleCrypto) { + return ( + subtle && + subtleCryptoMethods.every( + methodName => typeof subtle[methodName] === "function" + ) + ); +} + +export async function supportsZeroByteGCM(subtle: SubtleCrypto) { + if (!supportsSubtleCrypto(subtle)) return false; + try { + const key = await subtle.generateKey( + { name: "AES-GCM", length: 128 }, + false, + ["encrypt"] + ); + const zeroByteAuthTag = await subtle.encrypt( + { + name: "AES-GCM", + iv: new Uint8Array(Array(12)), + additionalData: new Uint8Array(Array(16)), + tagLength: 128 + }, + key, + new Uint8Array(0) + ); + return zeroByteAuthTag.byteLength === 16; + } catch { + return false; + } +} diff --git a/node_modules/@aws-crypto/supports-web-crypto/tsconfig.json b/node_modules/@aws-crypto/supports-web-crypto/tsconfig.json new file mode 100644 index 00000000..efca6de8 --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "lib": ["dom"], + "rootDir": "./src", + "outDir": "./build/main", + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules/**"] +} diff --git a/node_modules/@aws-crypto/supports-web-crypto/tsconfig.module.json b/node_modules/@aws-crypto/supports-web-crypto/tsconfig.module.json new file mode 100644 index 00000000..7d0cfddc --- /dev/null +++ b/node_modules/@aws-crypto/supports-web-crypto/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "outDir": "build/module", + "module": "esnext", + } +} diff --git a/node_modules/@aws-crypto/util/CHANGELOG.md b/node_modules/@aws-crypto/util/CHANGELOG.md new file mode 100644 index 00000000..df2cecbb --- /dev/null +++ b/node_modules/@aws-crypto/util/CHANGELOG.md @@ -0,0 +1,71 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.1.0...v5.2.0) (2023-10-16) + +### Features + +- support ESM artifacts in all packages ([#752](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/752)) ([e930ffb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e930ffba5cfef66dd242049e7d514ced232c1e3b)) + +# [5.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.0.0...v5.1.0) (2023-09-22) + +### Bug Fixes + +- Update tsc to 2.x ([#735](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/735)) ([782e0de](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/782e0de9f5fef41f694130580a69d940894b6b8c)) + +### Features + +- Use @smithy/util-utf8 ([#730](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/730)) ([00fb851](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/00fb851ca3559d5a1f370f9256814de1210826b8)), closes [#699](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/699) + +# [5.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v4.0.1...v5.0.0) (2023-07-13) + +**Note:** Version bump only for package @aws-crypto/util + +# [4.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v3.0.0...v4.0.0) (2023-02-20) + +**Note:** Version bump only for package @aws-crypto/util + +# [3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.2...v3.0.0) (2023-01-12) + +- feat!: replace Hash implementations with Checksum interface (#492) ([da43dc0](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/da43dc0fdf669d9ebb5bfb1b1f7c79e46c4aaae1)), closes [#492](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/492) + +### BREAKING CHANGES + +- All classes that implemented `Hash` now implement `Checksum`. + +## [2.0.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.1...v2.0.2) (2022-09-07) + +### Bug Fixes + +- **#337:** update @aws-sdk/types ([#373](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/373)) ([b26a811](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/b26a811a392f5209c7ec7e57251500d4d78f97ff)), closes [#337](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/337) +- **docs:** update README for packages/util ([#382](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/382)) ([f3e650e](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/f3e650e1b4792ffbea2e8a1a015fd55fb951a3a4)) + +## [2.0.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.0...v2.0.1) (2021-12-09) + +### Bug Fixes + +- **uint32ArrayFrom:** increment index & polyfill for Uint32Array ([#270](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/270)) ([a70d603](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/a70d603f3ba7600d3c1213f297d4160a4b3793bd)) + +# [2.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.2...v2.0.0) (2021-10-25) + +**Note:** Version bump only for package @aws-crypto/util + +## [1.2.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.1...v1.2.2) (2021-10-12) + +### Bug Fixes + +- **crc32c:** ie11 does not support Array.from ([#221](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/221)) ([5f49547](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/5f495472ab8988cf203e0f2a70a51f7e1fcd7e60)) + +## [1.2.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.0...v1.2.1) (2021-09-17) + +### Bug Fixes + +- better pollyfill check for Buffer ([#217](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/217)) ([bc97da2](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/bc97da29aaf473943e4407c9a29cc30f74f15723)) + +# [1.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.1.1...v1.2.0) (2021-09-17) + +### Features + +- add @aws-crypto/util ([8f489cb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/8f489cbe4c0e134f826bac66f1bf5172597048b9)) diff --git a/node_modules/@aws-crypto/util/LICENSE b/node_modules/@aws-crypto/util/LICENSE new file mode 100644 index 00000000..980a15ac --- /dev/null +++ b/node_modules/@aws-crypto/util/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-crypto/util/README.md b/node_modules/@aws-crypto/util/README.md new file mode 100644 index 00000000..4c1c8aab --- /dev/null +++ b/node_modules/@aws-crypto/util/README.md @@ -0,0 +1,16 @@ +# @aws-crypto/util + +Helper functions + +## Usage + +``` +import { convertToBuffer } from '@aws-crypto/util'; + +const data = "asdf"; +const utf8EncodedUint8Array = convertToBuffer(data); +``` + +## Test + +`npm test` diff --git a/node_modules/@aws-crypto/util/build/main/convertToBuffer.d.ts b/node_modules/@aws-crypto/util/build/main/convertToBuffer.d.ts new file mode 100644 index 00000000..697a5cde --- /dev/null +++ b/node_modules/@aws-crypto/util/build/main/convertToBuffer.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function convertToBuffer(data: SourceData): Uint8Array; diff --git a/node_modules/@aws-crypto/util/build/main/convertToBuffer.js b/node_modules/@aws-crypto/util/build/main/convertToBuffer.js new file mode 100644 index 00000000..85bc8af4 --- /dev/null +++ b/node_modules/@aws-crypto/util/build/main/convertToBuffer.js @@ -0,0 +1,24 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.convertToBuffer = void 0; +var util_utf8_1 = require("@smithy/util-utf8"); +// Quick polyfill +var fromUtf8 = typeof Buffer !== "undefined" && Buffer.from + ? function (input) { return Buffer.from(input, "utf8"); } + : util_utf8_1.fromUtf8; +function convertToBuffer(data) { + // Already a Uint8, do nothing + if (data instanceof Uint8Array) + return data; + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +} +exports.convertToBuffer = convertToBuffer; +//# sourceMappingURL=convertToBuffer.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/build/main/convertToBuffer.js.map b/node_modules/@aws-crypto/util/build/main/convertToBuffer.js.map new file mode 100644 index 00000000..916d7871 --- /dev/null +++ b/node_modules/@aws-crypto/util/build/main/convertToBuffer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"convertToBuffer.js","sourceRoot":"","sources":["../../src/convertToBuffer.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAGtC,+CAAgE;AAEhE,iBAAiB;AACjB,IAAM,QAAQ,GACZ,OAAO,MAAM,KAAK,WAAW,IAAI,MAAM,CAAC,IAAI;IAC1C,CAAC,CAAC,UAAC,KAAa,IAAK,OAAA,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,EAA1B,CAA0B;IAC/C,CAAC,CAAC,oBAAe,CAAC;AAEtB,SAAgB,eAAe,CAAC,IAAgB;IAC9C,8BAA8B;IAC9B,IAAI,IAAI,YAAY,UAAU;QAAE,OAAO,IAAI,CAAC;IAE5C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAC;KACvB;IAED,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;QAC5B,OAAO,IAAI,UAAU,CACnB,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,UAAU,EACf,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC,iBAAiB,CAC/C,CAAC;KACH;IAED,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC;AAC9B,CAAC;AAjBD,0CAiBC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/build/main/index.d.ts b/node_modules/@aws-crypto/util/build/main/index.d.ts new file mode 100644 index 00000000..783c73c4 --- /dev/null +++ b/node_modules/@aws-crypto/util/build/main/index.d.ts @@ -0,0 +1,4 @@ +export { convertToBuffer } from "./convertToBuffer"; +export { isEmptyData } from "./isEmptyData"; +export { numToUint8 } from "./numToUint8"; +export { uint32ArrayFrom } from './uint32ArrayFrom'; diff --git a/node_modules/@aws-crypto/util/build/main/index.js b/node_modules/@aws-crypto/util/build/main/index.js new file mode 100644 index 00000000..94e1ca90 --- /dev/null +++ b/node_modules/@aws-crypto/util/build/main/index.js @@ -0,0 +1,14 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.uint32ArrayFrom = exports.numToUint8 = exports.isEmptyData = exports.convertToBuffer = void 0; +var convertToBuffer_1 = require("./convertToBuffer"); +Object.defineProperty(exports, "convertToBuffer", { enumerable: true, get: function () { return convertToBuffer_1.convertToBuffer; } }); +var isEmptyData_1 = require("./isEmptyData"); +Object.defineProperty(exports, "isEmptyData", { enumerable: true, get: function () { return isEmptyData_1.isEmptyData; } }); +var numToUint8_1 = require("./numToUint8"); +Object.defineProperty(exports, "numToUint8", { enumerable: true, get: function () { return numToUint8_1.numToUint8; } }); +var uint32ArrayFrom_1 = require("./uint32ArrayFrom"); +Object.defineProperty(exports, "uint32ArrayFrom", { enumerable: true, get: function () { return uint32ArrayFrom_1.uint32ArrayFrom; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/build/main/index.js.map b/node_modules/@aws-crypto/util/build/main/index.js.map new file mode 100644 index 00000000..a1701724 --- /dev/null +++ b/node_modules/@aws-crypto/util/build/main/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAEtC,qDAAoD;AAA3C,kHAAA,eAAe,OAAA;AACxB,6CAA4C;AAAnC,0GAAA,WAAW,OAAA;AACpB,2CAA0C;AAAjC,wGAAA,UAAU,OAAA;AACnB,qDAAkD;AAA1C,kHAAA,eAAe,OAAA"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/build/main/isEmptyData.d.ts b/node_modules/@aws-crypto/util/build/main/isEmptyData.d.ts new file mode 100644 index 00000000..43ae4a7c --- /dev/null +++ b/node_modules/@aws-crypto/util/build/main/isEmptyData.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function isEmptyData(data: SourceData): boolean; diff --git a/node_modules/@aws-crypto/util/build/main/isEmptyData.js b/node_modules/@aws-crypto/util/build/main/isEmptyData.js new file mode 100644 index 00000000..6af1e89e --- /dev/null +++ b/node_modules/@aws-crypto/util/build/main/isEmptyData.js @@ -0,0 +1,13 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isEmptyData = void 0; +function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +exports.isEmptyData = isEmptyData; +//# sourceMappingURL=isEmptyData.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/build/main/isEmptyData.js.map b/node_modules/@aws-crypto/util/build/main/isEmptyData.js.map new file mode 100644 index 00000000..e1eaa02b --- /dev/null +++ b/node_modules/@aws-crypto/util/build/main/isEmptyData.js.map @@ -0,0 +1 @@ +{"version":3,"file":"isEmptyData.js","sourceRoot":"","sources":["../../src/isEmptyData.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAItC,SAAgB,WAAW,CAAC,IAAgB;IAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;KAC1B;IAED,OAAO,IAAI,CAAC,UAAU,KAAK,CAAC,CAAC;AAC/B,CAAC;AAND,kCAMC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/build/main/numToUint8.d.ts b/node_modules/@aws-crypto/util/build/main/numToUint8.d.ts new file mode 100644 index 00000000..5b702e8e --- /dev/null +++ b/node_modules/@aws-crypto/util/build/main/numToUint8.d.ts @@ -0,0 +1 @@ +export declare function numToUint8(num: number): Uint8Array; diff --git a/node_modules/@aws-crypto/util/build/main/numToUint8.js b/node_modules/@aws-crypto/util/build/main/numToUint8.js new file mode 100644 index 00000000..2f070e10 --- /dev/null +++ b/node_modules/@aws-crypto/util/build/main/numToUint8.js @@ -0,0 +1,15 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.numToUint8 = void 0; +function numToUint8(num) { + return new Uint8Array([ + (num & 0xff000000) >> 24, + (num & 0x00ff0000) >> 16, + (num & 0x0000ff00) >> 8, + num & 0x000000ff, + ]); +} +exports.numToUint8 = numToUint8; +//# sourceMappingURL=numToUint8.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/build/main/numToUint8.js.map b/node_modules/@aws-crypto/util/build/main/numToUint8.js.map new file mode 100644 index 00000000..fea3aca7 --- /dev/null +++ b/node_modules/@aws-crypto/util/build/main/numToUint8.js.map @@ -0,0 +1 @@ +{"version":3,"file":"numToUint8.js","sourceRoot":"","sources":["../../src/numToUint8.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAEtC,SAAgB,UAAU,CAAC,GAAW;IACpC,OAAO,IAAI,UAAU,CAAC;QACpB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,EAAE;QACxB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,EAAE;QACxB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,CAAC;QACvB,GAAG,GAAG,UAAU;KACjB,CAAC,CAAC;AACL,CAAC;AAPD,gCAOC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.d.ts b/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.d.ts new file mode 100644 index 00000000..fea66075 --- /dev/null +++ b/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.d.ts @@ -0,0 +1 @@ +export declare function uint32ArrayFrom(a_lookUpTable: Array): Uint32Array; diff --git a/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js b/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js new file mode 100644 index 00000000..226cdc3d --- /dev/null +++ b/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js @@ -0,0 +1,20 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.uint32ArrayFrom = void 0; +// IE 11 does not support Array.from, so we do it manually +function uint32ArrayFrom(a_lookUpTable) { + if (!Uint32Array.from) { + var return_array = new Uint32Array(a_lookUpTable.length); + var a_index = 0; + while (a_index < a_lookUpTable.length) { + return_array[a_index] = a_lookUpTable[a_index]; + a_index += 1; + } + return return_array; + } + return Uint32Array.from(a_lookUpTable); +} +exports.uint32ArrayFrom = uint32ArrayFrom; +//# sourceMappingURL=uint32ArrayFrom.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js.map b/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js.map new file mode 100644 index 00000000..fe016e14 --- /dev/null +++ b/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js.map @@ -0,0 +1 @@ +{"version":3,"file":"uint32ArrayFrom.js","sourceRoot":"","sources":["../../src/uint32ArrayFrom.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAEtC,0DAA0D;AAC1D,SAAgB,eAAe,CAAC,aAA4B;IAC1D,IAAI,CAAC,WAAW,CAAC,IAAI,EAAE;QACrB,IAAM,YAAY,GAAG,IAAI,WAAW,CAAC,aAAa,CAAC,MAAM,CAAC,CAAA;QAC1D,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,OAAO,GAAG,aAAa,CAAC,MAAM,EAAE;YACrC,YAAY,CAAC,OAAO,CAAC,GAAG,aAAa,CAAC,OAAO,CAAC,CAAA;YAC9C,OAAO,IAAI,CAAC,CAAA;SACb;QACD,OAAO,YAAY,CAAA;KACpB;IACD,OAAO,WAAW,CAAC,IAAI,CAAC,aAAa,CAAC,CAAA;AACxC,CAAC;AAXD,0CAWC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/build/module/convertToBuffer.d.ts b/node_modules/@aws-crypto/util/build/module/convertToBuffer.d.ts new file mode 100644 index 00000000..697a5cde --- /dev/null +++ b/node_modules/@aws-crypto/util/build/module/convertToBuffer.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function convertToBuffer(data: SourceData): Uint8Array; diff --git a/node_modules/@aws-crypto/util/build/module/convertToBuffer.js b/node_modules/@aws-crypto/util/build/module/convertToBuffer.js new file mode 100644 index 00000000..c700d1e2 --- /dev/null +++ b/node_modules/@aws-crypto/util/build/module/convertToBuffer.js @@ -0,0 +1,20 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { fromUtf8 as fromUtf8Browser } from "@smithy/util-utf8"; +// Quick polyfill +var fromUtf8 = typeof Buffer !== "undefined" && Buffer.from + ? function (input) { return Buffer.from(input, "utf8"); } + : fromUtf8Browser; +export function convertToBuffer(data) { + // Already a Uint8, do nothing + if (data instanceof Uint8Array) + return data; + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +} +//# sourceMappingURL=convertToBuffer.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/build/module/convertToBuffer.js.map b/node_modules/@aws-crypto/util/build/module/convertToBuffer.js.map new file mode 100644 index 00000000..92694a4e --- /dev/null +++ b/node_modules/@aws-crypto/util/build/module/convertToBuffer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"convertToBuffer.js","sourceRoot":"","sources":["../../src/convertToBuffer.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAGtC,OAAO,EAAE,QAAQ,IAAI,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAEhE,iBAAiB;AACjB,IAAM,QAAQ,GACZ,OAAO,MAAM,KAAK,WAAW,IAAI,MAAM,CAAC,IAAI;IAC1C,CAAC,CAAC,UAAC,KAAa,IAAK,OAAA,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,EAA1B,CAA0B;IAC/C,CAAC,CAAC,eAAe,CAAC;AAEtB,MAAM,UAAU,eAAe,CAAC,IAAgB;IAC9C,8BAA8B;IAC9B,IAAI,IAAI,YAAY,UAAU;QAAE,OAAO,IAAI,CAAC;IAE5C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAC;KACvB;IAED,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;QAC5B,OAAO,IAAI,UAAU,CACnB,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,UAAU,EACf,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC,iBAAiB,CAC/C,CAAC;KACH;IAED,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC;AAC9B,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/build/module/index.d.ts b/node_modules/@aws-crypto/util/build/module/index.d.ts new file mode 100644 index 00000000..783c73c4 --- /dev/null +++ b/node_modules/@aws-crypto/util/build/module/index.d.ts @@ -0,0 +1,4 @@ +export { convertToBuffer } from "./convertToBuffer"; +export { isEmptyData } from "./isEmptyData"; +export { numToUint8 } from "./numToUint8"; +export { uint32ArrayFrom } from './uint32ArrayFrom'; diff --git a/node_modules/@aws-crypto/util/build/module/index.js b/node_modules/@aws-crypto/util/build/module/index.js new file mode 100644 index 00000000..077e8b67 --- /dev/null +++ b/node_modules/@aws-crypto/util/build/module/index.js @@ -0,0 +1,7 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +export { convertToBuffer } from "./convertToBuffer"; +export { isEmptyData } from "./isEmptyData"; +export { numToUint8 } from "./numToUint8"; +export { uint32ArrayFrom } from './uint32ArrayFrom'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/build/module/index.js.map b/node_modules/@aws-crypto/util/build/module/index.js.map new file mode 100644 index 00000000..4ddb12d2 --- /dev/null +++ b/node_modules/@aws-crypto/util/build/module/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAEtC,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AACpD,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAC,eAAe,EAAC,MAAM,mBAAmB,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/build/module/isEmptyData.d.ts b/node_modules/@aws-crypto/util/build/module/isEmptyData.d.ts new file mode 100644 index 00000000..43ae4a7c --- /dev/null +++ b/node_modules/@aws-crypto/util/build/module/isEmptyData.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function isEmptyData(data: SourceData): boolean; diff --git a/node_modules/@aws-crypto/util/build/module/isEmptyData.js b/node_modules/@aws-crypto/util/build/module/isEmptyData.js new file mode 100644 index 00000000..13841c75 --- /dev/null +++ b/node_modules/@aws-crypto/util/build/module/isEmptyData.js @@ -0,0 +1,9 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +export function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +//# sourceMappingURL=isEmptyData.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/build/module/isEmptyData.js.map b/node_modules/@aws-crypto/util/build/module/isEmptyData.js.map new file mode 100644 index 00000000..fe0fa025 --- /dev/null +++ b/node_modules/@aws-crypto/util/build/module/isEmptyData.js.map @@ -0,0 +1 @@ +{"version":3,"file":"isEmptyData.js","sourceRoot":"","sources":["../../src/isEmptyData.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAItC,MAAM,UAAU,WAAW,CAAC,IAAgB;IAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;KAC1B;IAED,OAAO,IAAI,CAAC,UAAU,KAAK,CAAC,CAAC;AAC/B,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/build/module/numToUint8.d.ts b/node_modules/@aws-crypto/util/build/module/numToUint8.d.ts new file mode 100644 index 00000000..5b702e8e --- /dev/null +++ b/node_modules/@aws-crypto/util/build/module/numToUint8.d.ts @@ -0,0 +1 @@ +export declare function numToUint8(num: number): Uint8Array; diff --git a/node_modules/@aws-crypto/util/build/module/numToUint8.js b/node_modules/@aws-crypto/util/build/module/numToUint8.js new file mode 100644 index 00000000..0ca6e47d --- /dev/null +++ b/node_modules/@aws-crypto/util/build/module/numToUint8.js @@ -0,0 +1,11 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +export function numToUint8(num) { + return new Uint8Array([ + (num & 0xff000000) >> 24, + (num & 0x00ff0000) >> 16, + (num & 0x0000ff00) >> 8, + num & 0x000000ff, + ]); +} +//# sourceMappingURL=numToUint8.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/build/module/numToUint8.js.map b/node_modules/@aws-crypto/util/build/module/numToUint8.js.map new file mode 100644 index 00000000..ac53e334 --- /dev/null +++ b/node_modules/@aws-crypto/util/build/module/numToUint8.js.map @@ -0,0 +1 @@ +{"version":3,"file":"numToUint8.js","sourceRoot":"","sources":["../../src/numToUint8.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAEtC,MAAM,UAAU,UAAU,CAAC,GAAW;IACpC,OAAO,IAAI,UAAU,CAAC;QACpB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,EAAE;QACxB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,EAAE;QACxB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,CAAC;QACvB,GAAG,GAAG,UAAU;KACjB,CAAC,CAAC;AACL,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.d.ts b/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.d.ts new file mode 100644 index 00000000..fea66075 --- /dev/null +++ b/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.d.ts @@ -0,0 +1 @@ +export declare function uint32ArrayFrom(a_lookUpTable: Array): Uint32Array; diff --git a/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js b/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js new file mode 100644 index 00000000..c69435e9 --- /dev/null +++ b/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js @@ -0,0 +1,16 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// IE 11 does not support Array.from, so we do it manually +export function uint32ArrayFrom(a_lookUpTable) { + if (!Uint32Array.from) { + var return_array = new Uint32Array(a_lookUpTable.length); + var a_index = 0; + while (a_index < a_lookUpTable.length) { + return_array[a_index] = a_lookUpTable[a_index]; + a_index += 1; + } + return return_array; + } + return Uint32Array.from(a_lookUpTable); +} +//# sourceMappingURL=uint32ArrayFrom.js.map \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js.map b/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js.map new file mode 100644 index 00000000..7384b0a4 --- /dev/null +++ b/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js.map @@ -0,0 +1 @@ +{"version":3,"file":"uint32ArrayFrom.js","sourceRoot":"","sources":["../../src/uint32ArrayFrom.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAEtC,0DAA0D;AAC1D,MAAM,UAAU,eAAe,CAAC,aAA4B;IAC1D,IAAI,CAAC,WAAW,CAAC,IAAI,EAAE;QACrB,IAAM,YAAY,GAAG,IAAI,WAAW,CAAC,aAAa,CAAC,MAAM,CAAC,CAAA;QAC1D,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,OAAO,GAAG,aAAa,CAAC,MAAM,EAAE;YACrC,YAAY,CAAC,OAAO,CAAC,GAAG,aAAa,CAAC,OAAO,CAAC,CAAA;YAC9C,OAAO,IAAI,CAAC,CAAA;SACb;QACD,OAAO,YAAY,CAAA;KACpB;IACD,OAAO,WAAW,CAAC,IAAI,CAAC,aAAa,CAAC,CAAA;AACxC,CAAC"} \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/LICENSE b/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/README.md b/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/README.md new file mode 100644 index 00000000..31853f24 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/README.md @@ -0,0 +1,10 @@ +# @smithy/is-array-buffer + +[![NPM version](https://img.shields.io/npm/v/@smithy/is-array-buffer/latest.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/is-array-buffer.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-cjs/index.js b/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-cjs/index.js new file mode 100644 index 00000000..5d792e71 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-cjs/index.js @@ -0,0 +1,32 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isArrayBuffer: () => isArrayBuffer +}); +module.exports = __toCommonJS(src_exports); +var isArrayBuffer = /* @__PURE__ */ __name((arg) => typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer || Object.prototype.toString.call(arg) === "[object ArrayBuffer]", "isArrayBuffer"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + isArrayBuffer +}); + diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-es/index.js b/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-es/index.js new file mode 100644 index 00000000..8096cca3 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-es/index.js @@ -0,0 +1,2 @@ +export const isArrayBuffer = (arg) => (typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer) || + Object.prototype.toString.call(arg) === "[object ArrayBuffer]"; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts b/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts new file mode 100644 index 00000000..64f452e7 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts b/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..ca8fd6bd --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/package.json b/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/package.json new file mode 100644 index 00000000..ed8affc7 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/is-array-buffer", + "version": "2.2.0", + "description": "Provides a function for detecting if an argument is an ArrayBuffer", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline is-array-buffer", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/is-array-buffer", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/is-array-buffer" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/LICENSE b/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/README.md b/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/README.md new file mode 100644 index 00000000..c896b04a --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/README.md @@ -0,0 +1,10 @@ +# @smithy/util-buffer-from + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-buffer-from/latest.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-buffer-from.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-cjs/index.js b/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-cjs/index.js new file mode 100644 index 00000000..c6738d94 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-cjs/index.js @@ -0,0 +1,47 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromArrayBuffer: () => fromArrayBuffer, + fromString: () => fromString +}); +module.exports = __toCommonJS(src_exports); +var import_is_array_buffer = require("@smithy/is-array-buffer"); +var import_buffer = require("buffer"); +var fromArrayBuffer = /* @__PURE__ */ __name((input, offset = 0, length = input.byteLength - offset) => { + if (!(0, import_is_array_buffer.isArrayBuffer)(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return import_buffer.Buffer.from(input, offset, length); +}, "fromArrayBuffer"); +var fromString = /* @__PURE__ */ __name((input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? import_buffer.Buffer.from(input, encoding) : import_buffer.Buffer.from(input); +}, "fromString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromArrayBuffer, + fromString +}); + diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-es/index.js b/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-es/index.js new file mode 100644 index 00000000..718f8315 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-es/index.js @@ -0,0 +1,14 @@ +import { isArrayBuffer } from "@smithy/is-array-buffer"; +import { Buffer } from "buffer"; +export const fromArrayBuffer = (input, offset = 0, length = input.byteLength - offset) => { + if (!isArrayBuffer(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return Buffer.from(input, offset, length); +}; +export const fromString = (input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? Buffer.from(input, encoding) : Buffer.from(input); +}; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts b/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts new file mode 100644 index 00000000..a523134a --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts b/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..f9173f74 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/package.json b/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/package.json new file mode 100644 index 00000000..a12e51cc --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/util-buffer-from", + "version": "2.2.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-buffer-from", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^14.14.31", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-buffer-from", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-buffer-from" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/LICENSE b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/README.md b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/README.md new file mode 100644 index 00000000..fc5db6d8 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/README.md @@ -0,0 +1,4 @@ +# @smithy/util-utf8 + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-utf8/latest.svg)](https://www.npmjs.com/package/@smithy/util-utf8) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-utf8.svg)](https://www.npmjs.com/package/@smithy/util-utf8) diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/index.js b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/index.js new file mode 100644 index 00000000..0b22680a --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/index.js @@ -0,0 +1,65 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromUtf8: () => fromUtf8, + toUint8Array: () => toUint8Array, + toUtf8: () => toUtf8 +}); +module.exports = __toCommonJS(src_exports); + +// src/fromUtf8.ts +var import_util_buffer_from = require("@smithy/util-buffer-from"); +var fromUtf8 = /* @__PURE__ */ __name((input) => { + const buf = (0, import_util_buffer_from.fromString)(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}, "fromUtf8"); + +// src/toUint8Array.ts +var toUint8Array = /* @__PURE__ */ __name((data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}, "toUint8Array"); + +// src/toUtf8.ts + +var toUtf8 = /* @__PURE__ */ __name((input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return (0, import_util_buffer_from.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}, "toUtf8"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromUtf8, + toUint8Array, + toUtf8 +}); + diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js new file mode 100644 index 00000000..73441900 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js @@ -0,0 +1 @@ +export const fromUtf8 = (input) => new TextEncoder().encode(input); diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js new file mode 100644 index 00000000..6dc438b3 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js @@ -0,0 +1,5 @@ +import { fromString } from "@smithy/util-buffer-from"; +export const fromUtf8 = (input) => { + const buf = fromString(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/index.js b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/index.js new file mode 100644 index 00000000..00ba4657 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js new file mode 100644 index 00000000..2cd36f75 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js @@ -0,0 +1,10 @@ +import { fromUtf8 } from "./fromUtf8"; +export const toUint8Array = (data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js new file mode 100644 index 00000000..c2921278 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js @@ -0,0 +1,9 @@ +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return new TextDecoder("utf-8").decode(input); +}; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.js b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.js new file mode 100644 index 00000000..7be8745a --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.js @@ -0,0 +1,10 @@ +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return fromArrayBuffer(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts new file mode 100644 index 00000000..dd919817 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts new file mode 100644 index 00000000..dd919817 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/index.d.ts b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/index.d.ts new file mode 100644 index 00000000..00ba4657 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts new file mode 100644 index 00000000..11b6342e --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts new file mode 100644 index 00000000..8494acd8 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts new file mode 100644 index 00000000..8494acd8 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts new file mode 100644 index 00000000..39f3d6dd --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts new file mode 100644 index 00000000..39f3d6dd --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..ef9761d7 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts new file mode 100644 index 00000000..562fe101 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts new file mode 100644 index 00000000..33511ad7 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts new file mode 100644 index 00000000..33511ad7 --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/package.json b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/package.json new file mode 100644 index 00000000..78bfb4df --- /dev/null +++ b/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-utf8", + "version": "2.3.0", + "description": "A UTF-8 string <-> UInt8Array converter", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-utf8", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/fromUtf8": "./dist-es/fromUtf8.browser", + "./dist-es/toUtf8": "./dist-es/toUtf8.browser" + }, + "react-native": {}, + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-utf8", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-utf8" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@aws-crypto/util/package.json b/node_modules/@aws-crypto/util/package.json new file mode 100644 index 00000000..431107a1 --- /dev/null +++ b/node_modules/@aws-crypto/util/package.json @@ -0,0 +1,32 @@ +{ + "name": "@aws-crypto/util", + "version": "5.2.0", + "scripts": { + "prepublishOnly": "tsc -p tsconfig.json && tsc -p tsconfig.module.json", + "pretest": "tsc -p tsconfig.test.json", + "test": "mocha --require ts-node/register test/**/*test.ts" + }, + "main": "./build/main/index.js", + "module": "./build/module/index.js", + "types": "./build/main/index.d.ts", + "repository": { + "type": "git", + "url": "git@github.com:aws/aws-sdk-js-crypto-helpers.git" + }, + "author": { + "name": "AWS Crypto Tools Team", + "email": "aws-cryptools@amazon.com", + "url": "https://docs.aws.amazon.com/aws-crypto-tools/index.html?id=docs_gateway#lang/en_us" + }, + "homepage": "https://github.com/aws/aws-sdk-js-crypto-helpers/tree/master/packages/util", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.222.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + }, + "publishConfig": { + "access": "public" + }, + "gitHead": "c11b171b35ec5c093364f0e0d8dc4ab1af68e748" +} diff --git a/node_modules/@aws-crypto/util/src/convertToBuffer.ts b/node_modules/@aws-crypto/util/src/convertToBuffer.ts new file mode 100644 index 00000000..f9f163e7 --- /dev/null +++ b/node_modules/@aws-crypto/util/src/convertToBuffer.ts @@ -0,0 +1,30 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { SourceData } from "@aws-sdk/types"; +import { fromUtf8 as fromUtf8Browser } from "@smithy/util-utf8"; + +// Quick polyfill +const fromUtf8 = + typeof Buffer !== "undefined" && Buffer.from + ? (input: string) => Buffer.from(input, "utf8") + : fromUtf8Browser; + +export function convertToBuffer(data: SourceData): Uint8Array { + // Already a Uint8, do nothing + if (data instanceof Uint8Array) return data; + + if (typeof data === "string") { + return fromUtf8(data); + } + + if (ArrayBuffer.isView(data)) { + return new Uint8Array( + data.buffer, + data.byteOffset, + data.byteLength / Uint8Array.BYTES_PER_ELEMENT + ); + } + + return new Uint8Array(data); +} diff --git a/node_modules/@aws-crypto/util/src/index.ts b/node_modules/@aws-crypto/util/src/index.ts new file mode 100644 index 00000000..2f6c62a7 --- /dev/null +++ b/node_modules/@aws-crypto/util/src/index.ts @@ -0,0 +1,7 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +export { convertToBuffer } from "./convertToBuffer"; +export { isEmptyData } from "./isEmptyData"; +export { numToUint8 } from "./numToUint8"; +export {uint32ArrayFrom} from './uint32ArrayFrom'; diff --git a/node_modules/@aws-crypto/util/src/isEmptyData.ts b/node_modules/@aws-crypto/util/src/isEmptyData.ts new file mode 100644 index 00000000..089764de --- /dev/null +++ b/node_modules/@aws-crypto/util/src/isEmptyData.ts @@ -0,0 +1,12 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { SourceData } from "@aws-sdk/types"; + +export function isEmptyData(data: SourceData): boolean { + if (typeof data === "string") { + return data.length === 0; + } + + return data.byteLength === 0; +} diff --git a/node_modules/@aws-crypto/util/src/numToUint8.ts b/node_modules/@aws-crypto/util/src/numToUint8.ts new file mode 100644 index 00000000..2f40aceb --- /dev/null +++ b/node_modules/@aws-crypto/util/src/numToUint8.ts @@ -0,0 +1,11 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +export function numToUint8(num: number) { + return new Uint8Array([ + (num & 0xff000000) >> 24, + (num & 0x00ff0000) >> 16, + (num & 0x0000ff00) >> 8, + num & 0x000000ff, + ]); +} diff --git a/node_modules/@aws-crypto/util/src/uint32ArrayFrom.ts b/node_modules/@aws-crypto/util/src/uint32ArrayFrom.ts new file mode 100644 index 00000000..b9b6d887 --- /dev/null +++ b/node_modules/@aws-crypto/util/src/uint32ArrayFrom.ts @@ -0,0 +1,16 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// IE 11 does not support Array.from, so we do it manually +export function uint32ArrayFrom(a_lookUpTable: Array): Uint32Array { + if (!Uint32Array.from) { + const return_array = new Uint32Array(a_lookUpTable.length) + let a_index = 0 + while (a_index < a_lookUpTable.length) { + return_array[a_index] = a_lookUpTable[a_index] + a_index += 1 + } + return return_array + } + return Uint32Array.from(a_lookUpTable) +} diff --git a/node_modules/@aws-crypto/util/tsconfig.json b/node_modules/@aws-crypto/util/tsconfig.json new file mode 100644 index 00000000..2b996d08 --- /dev/null +++ b/node_modules/@aws-crypto/util/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "rootDir": "./src", + "outDir": "./build/main", + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules/**"] +} diff --git a/node_modules/@aws-crypto/util/tsconfig.module.json b/node_modules/@aws-crypto/util/tsconfig.module.json new file mode 100644 index 00000000..7d0cfddc --- /dev/null +++ b/node_modules/@aws-crypto/util/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "outDir": "build/module", + "module": "esnext", + } +} diff --git a/node_modules/@aws-sdk/client-s3/LICENSE b/node_modules/@aws-sdk/client-s3/LICENSE new file mode 100644 index 00000000..dd65ae06 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-sdk/client-s3/README.md b/node_modules/@aws-sdk/client-s3/README.md new file mode 100644 index 00000000..4f123cb3 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/README.md @@ -0,0 +1,989 @@ + + +# @aws-sdk/client-s3 + +## Description + +AWS SDK for JavaScript S3 Client for Node.js, Browser and React Native. + +

+ +## Installing + +To install this package, simply type add or install @aws-sdk/client-s3 +using your favorite package manager: + +- `npm install @aws-sdk/client-s3` +- `yarn add @aws-sdk/client-s3` +- `pnpm add @aws-sdk/client-s3` + +## Getting Started + +### Import + +The AWS SDK is modulized by clients and commands. +To send a request, you only need to import the `S3Client` and +the commands you need, for example `ListBucketsCommand`: + +```js +// ES5 example +const { S3Client, ListBucketsCommand } = require("@aws-sdk/client-s3"); +``` + +```ts +// ES6+ example +import { S3Client, ListBucketsCommand } from "@aws-sdk/client-s3"; +``` + +### Usage + +To send a request, you: + +- Initiate client with configuration (e.g. credentials, region). +- Initiate command with input parameters. +- Call `send` operation on client with command object as input. +- If you are using a custom http handler, you may call `destroy()` to close open connections. + +```js +// a client can be shared by different commands. +const client = new S3Client({ region: "REGION" }); + +const params = { + /** input parameters */ +}; +const command = new ListBucketsCommand(params); +``` + +#### Async/await + +We recommend using [await](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/await) +operator to wait for the promise returned by send operation as follows: + +```js +// async/await. +try { + const data = await client.send(command); + // process data. +} catch (error) { + // error handling. +} finally { + // finally. +} +``` + +Async-await is clean, concise, intuitive, easy to debug and has better error handling +as compared to using Promise chains or callbacks. + +#### Promises + +You can also use [Promise chaining](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Using_promises#chaining) +to execute send operation. + +```js +client.send(command).then( + (data) => { + // process data. + }, + (error) => { + // error handling. + } +); +``` + +Promises can also be called using `.catch()` and `.finally()` as follows: + +```js +client + .send(command) + .then((data) => { + // process data. + }) + .catch((error) => { + // error handling. + }) + .finally(() => { + // finally. + }); +``` + +#### Callbacks + +We do not recommend using callbacks because of [callback hell](http://callbackhell.com/), +but they are supported by the send operation. + +```js +// callbacks. +client.send(command, (err, data) => { + // process err and data. +}); +``` + +#### v2 compatible style + +The client can also send requests using v2 compatible style. +However, it results in a bigger bundle size and may be dropped in next major version. More details in the blog post +on [modular packages in AWS SDK for JavaScript](https://aws.amazon.com/blogs/developer/modular-packages-in-aws-sdk-for-javascript/) + +```ts +import * as AWS from "@aws-sdk/client-s3"; +const client = new AWS.S3({ region: "REGION" }); + +// async/await. +try { + const data = await client.listBuckets(params); + // process data. +} catch (error) { + // error handling. +} + +// Promises. +client + .listBuckets(params) + .then((data) => { + // process data. + }) + .catch((error) => { + // error handling. + }); + +// callbacks. +client.listBuckets(params, (err, data) => { + // process err and data. +}); +``` + +### Troubleshooting + +When the service returns an exception, the error will include the exception information, +as well as response metadata (e.g. request id). + +```js +try { + const data = await client.send(command); + // process data. +} catch (error) { + const { requestId, cfId, extendedRequestId } = error.$metadata; + console.log({ requestId, cfId, extendedRequestId }); + /** + * The keys within exceptions are also parsed. + * You can access them by specifying exception names: + * if (error.name === 'SomeServiceException') { + * const value = error.specialKeyInException; + * } + */ +} +``` + +## Getting Help + +Please use these community resources for getting help. +We use the GitHub issues for tracking bugs and feature requests, but have limited bandwidth to address them. + +- Visit [Developer Guide](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/welcome.html) + or [API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/index.html). +- Check out the blog posts tagged with [`aws-sdk-js`](https://aws.amazon.com/blogs/developer/tag/aws-sdk-js/) + on AWS Developer Blog. +- Ask a question on [StackOverflow](https://stackoverflow.com/questions/tagged/aws-sdk-js) and tag it with `aws-sdk-js`. +- Join the AWS JavaScript community on [gitter](https://gitter.im/aws/aws-sdk-js-v3). +- If it turns out that you may have found a bug, please [open an issue](https://github.com/aws/aws-sdk-js-v3/issues/new/choose). + +To test your universal JavaScript code in Node.js, browser and react-native environments, +visit our [code samples repo](https://github.com/aws-samples/aws-sdk-js-tests). + +## Contributing + +This client code is generated automatically. Any modifications will be overwritten the next time the `@aws-sdk/client-s3` package is updated. +To contribute to client you can check our [generate clients scripts](https://github.com/aws/aws-sdk-js-v3/tree/main/scripts/generate-clients). + +## License + +This SDK is distributed under the +[Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0), +see LICENSE for more information. + +## Client Commands (Operations List) + +
+ +AbortMultipartUpload + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/AbortMultipartUploadCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/AbortMultipartUploadCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/AbortMultipartUploadCommandOutput/) + +
+
+ +CompleteMultipartUpload + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/CompleteMultipartUploadCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/CompleteMultipartUploadCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/CompleteMultipartUploadCommandOutput/) + +
+
+ +CopyObject + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/CopyObjectCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/CopyObjectCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/CopyObjectCommandOutput/) + +
+
+ +CreateBucket + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/CreateBucketCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/CreateBucketCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/CreateBucketCommandOutput/) + +
+
+ +CreateBucketMetadataTableConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/CreateBucketMetadataTableConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/CreateBucketMetadataTableConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/CreateBucketMetadataTableConfigurationCommandOutput/) + +
+
+ +CreateMultipartUpload + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/CreateMultipartUploadCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/CreateMultipartUploadCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/CreateMultipartUploadCommandOutput/) + +
+
+ +CreateSession + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/CreateSessionCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/CreateSessionCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/CreateSessionCommandOutput/) + +
+
+ +DeleteBucket + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/DeleteBucketCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketCommandOutput/) + +
+
+ +DeleteBucketAnalyticsConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/DeleteBucketAnalyticsConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketAnalyticsConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketAnalyticsConfigurationCommandOutput/) + +
+
+ +DeleteBucketCors + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/DeleteBucketCorsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketCorsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketCorsCommandOutput/) + +
+
+ +DeleteBucketEncryption + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/DeleteBucketEncryptionCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketEncryptionCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketEncryptionCommandOutput/) + +
+
+ +DeleteBucketIntelligentTieringConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/DeleteBucketIntelligentTieringConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketIntelligentTieringConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketIntelligentTieringConfigurationCommandOutput/) + +
+
+ +DeleteBucketInventoryConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/DeleteBucketInventoryConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketInventoryConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketInventoryConfigurationCommandOutput/) + +
+
+ +DeleteBucketLifecycle + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/DeleteBucketLifecycleCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketLifecycleCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketLifecycleCommandOutput/) + +
+
+ +DeleteBucketMetadataTableConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/DeleteBucketMetadataTableConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketMetadataTableConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketMetadataTableConfigurationCommandOutput/) + +
+
+ +DeleteBucketMetricsConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/DeleteBucketMetricsConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketMetricsConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketMetricsConfigurationCommandOutput/) + +
+
+ +DeleteBucketOwnershipControls + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/DeleteBucketOwnershipControlsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketOwnershipControlsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketOwnershipControlsCommandOutput/) + +
+
+ +DeleteBucketPolicy + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/DeleteBucketPolicyCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketPolicyCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketPolicyCommandOutput/) + +
+
+ +DeleteBucketReplication + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/DeleteBucketReplicationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketReplicationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketReplicationCommandOutput/) + +
+
+ +DeleteBucketTagging + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/DeleteBucketTaggingCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketTaggingCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketTaggingCommandOutput/) + +
+
+ +DeleteBucketWebsite + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/DeleteBucketWebsiteCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketWebsiteCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteBucketWebsiteCommandOutput/) + +
+
+ +DeleteObject + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/DeleteObjectCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteObjectCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteObjectCommandOutput/) + +
+
+ +DeleteObjects + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/DeleteObjectsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteObjectsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteObjectsCommandOutput/) + +
+
+ +DeleteObjectTagging + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/DeleteObjectTaggingCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteObjectTaggingCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeleteObjectTaggingCommandOutput/) + +
+
+ +DeletePublicAccessBlock + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/DeletePublicAccessBlockCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeletePublicAccessBlockCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/DeletePublicAccessBlockCommandOutput/) + +
+
+ +GetBucketAccelerateConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketAccelerateConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketAccelerateConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketAccelerateConfigurationCommandOutput/) + +
+
+ +GetBucketAcl + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketAclCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketAclCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketAclCommandOutput/) + +
+
+ +GetBucketAnalyticsConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketAnalyticsConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketAnalyticsConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketAnalyticsConfigurationCommandOutput/) + +
+
+ +GetBucketCors + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketCorsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketCorsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketCorsCommandOutput/) + +
+
+ +GetBucketEncryption + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketEncryptionCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketEncryptionCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketEncryptionCommandOutput/) + +
+
+ +GetBucketIntelligentTieringConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketIntelligentTieringConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketIntelligentTieringConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketIntelligentTieringConfigurationCommandOutput/) + +
+
+ +GetBucketInventoryConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketInventoryConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketInventoryConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketInventoryConfigurationCommandOutput/) + +
+
+ +GetBucketLifecycleConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketLifecycleConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketLifecycleConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketLifecycleConfigurationCommandOutput/) + +
+
+ +GetBucketLocation + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketLocationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketLocationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketLocationCommandOutput/) + +
+
+ +GetBucketLogging + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketLoggingCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketLoggingCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketLoggingCommandOutput/) + +
+
+ +GetBucketMetadataTableConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketMetadataTableConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketMetadataTableConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketMetadataTableConfigurationCommandOutput/) + +
+
+ +GetBucketMetricsConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketMetricsConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketMetricsConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketMetricsConfigurationCommandOutput/) + +
+
+ +GetBucketNotificationConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketNotificationConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketNotificationConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketNotificationConfigurationCommandOutput/) + +
+
+ +GetBucketOwnershipControls + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketOwnershipControlsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketOwnershipControlsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketOwnershipControlsCommandOutput/) + +
+
+ +GetBucketPolicy + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketPolicyCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketPolicyCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketPolicyCommandOutput/) + +
+
+ +GetBucketPolicyStatus + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketPolicyStatusCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketPolicyStatusCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketPolicyStatusCommandOutput/) + +
+
+ +GetBucketReplication + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketReplicationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketReplicationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketReplicationCommandOutput/) + +
+
+ +GetBucketRequestPayment + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketRequestPaymentCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketRequestPaymentCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketRequestPaymentCommandOutput/) + +
+
+ +GetBucketTagging + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketTaggingCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketTaggingCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketTaggingCommandOutput/) + +
+
+ +GetBucketVersioning + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketVersioningCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketVersioningCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketVersioningCommandOutput/) + +
+
+ +GetBucketWebsite + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetBucketWebsiteCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketWebsiteCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetBucketWebsiteCommandOutput/) + +
+
+ +GetObject + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetObjectCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetObjectCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetObjectCommandOutput/) + +
+
+ +GetObjectAcl + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetObjectAclCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetObjectAclCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetObjectAclCommandOutput/) + +
+
+ +GetObjectAttributes + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetObjectAttributesCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetObjectAttributesCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetObjectAttributesCommandOutput/) + +
+
+ +GetObjectLegalHold + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetObjectLegalHoldCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetObjectLegalHoldCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetObjectLegalHoldCommandOutput/) + +
+
+ +GetObjectLockConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetObjectLockConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetObjectLockConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetObjectLockConfigurationCommandOutput/) + +
+
+ +GetObjectRetention + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetObjectRetentionCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetObjectRetentionCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetObjectRetentionCommandOutput/) + +
+
+ +GetObjectTagging + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetObjectTaggingCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetObjectTaggingCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetObjectTaggingCommandOutput/) + +
+
+ +GetObjectTorrent + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetObjectTorrentCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetObjectTorrentCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetObjectTorrentCommandOutput/) + +
+
+ +GetPublicAccessBlock + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetPublicAccessBlockCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetPublicAccessBlockCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/GetPublicAccessBlockCommandOutput/) + +
+
+ +HeadBucket + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/HeadBucketCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/HeadBucketCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/HeadBucketCommandOutput/) + +
+
+ +HeadObject + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/HeadObjectCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/HeadObjectCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/HeadObjectCommandOutput/) + +
+
+ +ListBucketAnalyticsConfigurations + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/ListBucketAnalyticsConfigurationsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListBucketAnalyticsConfigurationsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListBucketAnalyticsConfigurationsCommandOutput/) + +
+
+ +ListBucketIntelligentTieringConfigurations + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/ListBucketIntelligentTieringConfigurationsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListBucketIntelligentTieringConfigurationsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListBucketIntelligentTieringConfigurationsCommandOutput/) + +
+
+ +ListBucketInventoryConfigurations + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/ListBucketInventoryConfigurationsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListBucketInventoryConfigurationsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListBucketInventoryConfigurationsCommandOutput/) + +
+
+ +ListBucketMetricsConfigurations + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/ListBucketMetricsConfigurationsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListBucketMetricsConfigurationsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListBucketMetricsConfigurationsCommandOutput/) + +
+
+ +ListBuckets + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/ListBucketsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListBucketsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListBucketsCommandOutput/) + +
+
+ +ListDirectoryBuckets + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/ListDirectoryBucketsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListDirectoryBucketsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListDirectoryBucketsCommandOutput/) + +
+
+ +ListMultipartUploads + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/ListMultipartUploadsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListMultipartUploadsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListMultipartUploadsCommandOutput/) + +
+
+ +ListObjects + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/ListObjectsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListObjectsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListObjectsCommandOutput/) + +
+
+ +ListObjectsV2 + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/ListObjectsV2Command/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListObjectsV2CommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListObjectsV2CommandOutput/) + +
+
+ +ListObjectVersions + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/ListObjectVersionsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListObjectVersionsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListObjectVersionsCommandOutput/) + +
+
+ +ListParts + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/ListPartsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListPartsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/ListPartsCommandOutput/) + +
+
+ +PutBucketAccelerateConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutBucketAccelerateConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketAccelerateConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketAccelerateConfigurationCommandOutput/) + +
+
+ +PutBucketAcl + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutBucketAclCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketAclCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketAclCommandOutput/) + +
+
+ +PutBucketAnalyticsConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutBucketAnalyticsConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketAnalyticsConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketAnalyticsConfigurationCommandOutput/) + +
+
+ +PutBucketCors + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutBucketCorsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketCorsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketCorsCommandOutput/) + +
+
+ +PutBucketEncryption + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutBucketEncryptionCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketEncryptionCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketEncryptionCommandOutput/) + +
+
+ +PutBucketIntelligentTieringConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutBucketIntelligentTieringConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketIntelligentTieringConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketIntelligentTieringConfigurationCommandOutput/) + +
+
+ +PutBucketInventoryConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutBucketInventoryConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketInventoryConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketInventoryConfigurationCommandOutput/) + +
+
+ +PutBucketLifecycleConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutBucketLifecycleConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketLifecycleConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketLifecycleConfigurationCommandOutput/) + +
+
+ +PutBucketLogging + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutBucketLoggingCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketLoggingCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketLoggingCommandOutput/) + +
+
+ +PutBucketMetricsConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutBucketMetricsConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketMetricsConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketMetricsConfigurationCommandOutput/) + +
+
+ +PutBucketNotificationConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutBucketNotificationConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketNotificationConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketNotificationConfigurationCommandOutput/) + +
+
+ +PutBucketOwnershipControls + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutBucketOwnershipControlsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketOwnershipControlsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketOwnershipControlsCommandOutput/) + +
+
+ +PutBucketPolicy + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutBucketPolicyCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketPolicyCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketPolicyCommandOutput/) + +
+
+ +PutBucketReplication + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutBucketReplicationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketReplicationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketReplicationCommandOutput/) + +
+
+ +PutBucketRequestPayment + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutBucketRequestPaymentCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketRequestPaymentCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketRequestPaymentCommandOutput/) + +
+
+ +PutBucketTagging + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutBucketTaggingCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketTaggingCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketTaggingCommandOutput/) + +
+
+ +PutBucketVersioning + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutBucketVersioningCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketVersioningCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketVersioningCommandOutput/) + +
+
+ +PutBucketWebsite + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutBucketWebsiteCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketWebsiteCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutBucketWebsiteCommandOutput/) + +
+
+ +PutObject + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutObjectCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutObjectCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutObjectCommandOutput/) + +
+
+ +PutObjectAcl + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutObjectAclCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutObjectAclCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutObjectAclCommandOutput/) + +
+
+ +PutObjectLegalHold + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutObjectLegalHoldCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutObjectLegalHoldCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutObjectLegalHoldCommandOutput/) + +
+
+ +PutObjectLockConfiguration + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutObjectLockConfigurationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutObjectLockConfigurationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutObjectLockConfigurationCommandOutput/) + +
+
+ +PutObjectRetention + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutObjectRetentionCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutObjectRetentionCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutObjectRetentionCommandOutput/) + +
+
+ +PutObjectTagging + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutObjectTaggingCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutObjectTaggingCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutObjectTaggingCommandOutput/) + +
+
+ +PutPublicAccessBlock + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/PutPublicAccessBlockCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutPublicAccessBlockCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/PutPublicAccessBlockCommandOutput/) + +
+
+ +RestoreObject + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/RestoreObjectCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/RestoreObjectCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/RestoreObjectCommandOutput/) + +
+
+ +SelectObjectContent + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/SelectObjectContentCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/SelectObjectContentCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/SelectObjectContentCommandOutput/) + +
+
+ +UploadPart + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/UploadPartCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/UploadPartCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/UploadPartCommandOutput/) + +
+
+ +UploadPartCopy + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/UploadPartCopyCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/UploadPartCopyCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/UploadPartCopyCommandOutput/) + +
+
+ +WriteGetObjectResponse + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/WriteGetObjectResponseCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/WriteGetObjectResponseCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-s3/Interface/WriteGetObjectResponseCommandOutput/) + +
diff --git a/node_modules/@aws-sdk/client-s3/dist-cjs/auth/httpAuthSchemeProvider.js b/node_modules/@aws-sdk/client-s3/dist-cjs/auth/httpAuthSchemeProvider.js new file mode 100644 index 00000000..ab2db5e4 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-cjs/auth/httpAuthSchemeProvider.js @@ -0,0 +1,127 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthSchemeConfig = exports.defaultS3HttpAuthSchemeProvider = exports.defaultS3HttpAuthSchemeParametersProvider = void 0; +const core_1 = require("@aws-sdk/core"); +const signature_v4_multi_region_1 = require("@aws-sdk/signature-v4-multi-region"); +const middleware_endpoint_1 = require("@smithy/middleware-endpoint"); +const util_middleware_1 = require("@smithy/util-middleware"); +const endpointResolver_1 = require("../endpoint/endpointResolver"); +const createEndpointRuleSetHttpAuthSchemeParametersProvider = (defaultHttpAuthSchemeParametersProvider) => async (config, context, input) => { + if (!input) { + throw new Error(`Could not find \`input\` for \`defaultEndpointRuleSetHttpAuthSchemeParametersProvider\``); + } + const defaultParameters = await defaultHttpAuthSchemeParametersProvider(config, context, input); + const instructionsFn = (0, util_middleware_1.getSmithyContext)(context)?.commandInstance?.constructor + ?.getEndpointParameterInstructions; + if (!instructionsFn) { + throw new Error(`getEndpointParameterInstructions() is not defined on \`${context.commandName}\``); + } + const endpointParameters = await (0, middleware_endpoint_1.resolveParams)(input, { getEndpointParameterInstructions: instructionsFn }, config); + return Object.assign(defaultParameters, endpointParameters); +}; +const _defaultS3HttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultS3HttpAuthSchemeParametersProvider = createEndpointRuleSetHttpAuthSchemeParametersProvider(_defaultS3HttpAuthSchemeParametersProvider); +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "s3", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createAwsAuthSigv4aHttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4a", + signingProperties: { + name: "s3", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +const createEndpointRuleSetHttpAuthSchemeProvider = (defaultEndpointResolver, defaultHttpAuthSchemeResolver, createHttpAuthOptionFunctions) => { + const endpointRuleSetHttpAuthSchemeProvider = (authParameters) => { + const endpoint = defaultEndpointResolver(authParameters); + const authSchemes = endpoint.properties?.authSchemes; + if (!authSchemes) { + return defaultHttpAuthSchemeResolver(authParameters); + } + const options = []; + for (const scheme of authSchemes) { + const { name: resolvedName, properties = {}, ...rest } = scheme; + const name = resolvedName.toLowerCase(); + if (resolvedName !== name) { + console.warn(`HttpAuthScheme has been normalized with lowercasing: \`${resolvedName}\` to \`${name}\``); + } + let schemeId; + if (name === "sigv4a") { + schemeId = "aws.auth#sigv4a"; + const sigv4Present = authSchemes.find((s) => { + const name = s.name.toLowerCase(); + return name !== "sigv4a" && name.startsWith("sigv4"); + }); + if (!signature_v4_multi_region_1.signatureV4CrtContainer.CrtSignerV4 && sigv4Present) { + continue; + } + } + else if (name.startsWith("sigv4")) { + schemeId = "aws.auth#sigv4"; + } + else { + throw new Error(`Unknown HttpAuthScheme found in \`@smithy.rules#endpointRuleSet\`: \`${name}\``); + } + const createOption = createHttpAuthOptionFunctions[schemeId]; + if (!createOption) { + throw new Error(`Could not find HttpAuthOption create function for \`${schemeId}\``); + } + const option = createOption(authParameters); + option.schemeId = schemeId; + option.signingProperties = { ...(option.signingProperties || {}), ...rest, ...properties }; + options.push(option); + } + return options; + }; + return endpointRuleSetHttpAuthSchemeProvider; +}; +const _defaultS3HttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + options.push(createAwsAuthSigv4aHttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultS3HttpAuthSchemeProvider = createEndpointRuleSetHttpAuthSchemeProvider(endpointResolver_1.defaultEndpointResolver, _defaultS3HttpAuthSchemeProvider, { + "aws.auth#sigv4": createAwsAuthSigv4HttpAuthOption, + "aws.auth#sigv4a": createAwsAuthSigv4aHttpAuthOption, +}); +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, core_1.resolveAwsSdkSigV4Config)(config); + const config_1 = (0, core_1.resolveAwsSdkSigV4AConfig)(config_0); + return Object.assign(config_1, { + authSchemePreference: (0, util_middleware_1.normalizeProvider)(config.authSchemePreference ?? []), + }); +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; diff --git a/node_modules/@aws-sdk/client-s3/dist-cjs/endpoint/endpointResolver.js b/node_modules/@aws-sdk/client-s3/dist-cjs/endpoint/endpointResolver.js new file mode 100644 index 00000000..c5ecbab2 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-cjs/endpoint/endpointResolver.js @@ -0,0 +1,33 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = require("@aws-sdk/util-endpoints"); +const util_endpoints_2 = require("@smithy/util-endpoints"); +const ruleset_1 = require("./ruleset"); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: [ + "Accelerate", + "Bucket", + "DisableAccessPoints", + "DisableMultiRegionAccessPoints", + "DisableS3ExpressSessionAuth", + "Endpoint", + "ForcePathStyle", + "Region", + "UseArnRegion", + "UseDualStack", + "UseFIPS", + "UseGlobalEndpoint", + "UseObjectLambdaEndpoint", + "UseS3ExpressControlEndpoint", + ], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; diff --git a/node_modules/@aws-sdk/client-s3/dist-cjs/endpoint/ruleset.js b/node_modules/@aws-sdk/client-s3/dist-cjs/endpoint/ruleset.js new file mode 100644 index 00000000..fa0eed79 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-cjs/endpoint/ruleset.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ruleSet = void 0; +const cp = "required", cq = "type", cr = "rules", cs = "conditions", ct = "fn", cu = "argv", cv = "ref", cw = "assign", cx = "url", cy = "properties", cz = "backend", cA = "authSchemes", cB = "disableDoubleEncoding", cC = "signingName", cD = "signingRegion", cE = "headers", cF = "signingRegionSet"; +const a = 6, b = false, c = true, d = "isSet", e = "booleanEquals", f = "error", g = "aws.partition", h = "stringEquals", i = "getAttr", j = "name", k = "substring", l = "bucketSuffix", m = "parseURL", n = "endpoint", o = "tree", p = "aws.isVirtualHostableS3Bucket", q = "{url#scheme}://{Bucket}.{url#authority}{url#path}", r = "not", s = "accessPointSuffix", t = "{url#scheme}://{url#authority}{url#path}", u = "hardwareType", v = "regionPrefix", w = "bucketAliasSuffix", x = "outpostId", y = "isValidHostLabel", z = "sigv4a", A = "s3-outposts", B = "s3", C = "{url#scheme}://{url#authority}{url#normalizedPath}{Bucket}", D = "https://{Bucket}.s3-accelerate.{partitionResult#dnsSuffix}", E = "https://{Bucket}.s3.{partitionResult#dnsSuffix}", F = "aws.parseArn", G = "bucketArn", H = "arnType", I = "", J = "s3-object-lambda", K = "accesspoint", L = "accessPointName", M = "{url#scheme}://{accessPointName}-{bucketArn#accountId}.{url#authority}{url#path}", N = "mrapPartition", O = "outpostType", P = "arnPrefix", Q = "{url#scheme}://{url#authority}{url#normalizedPath}{uri_encoded_bucket}", R = "https://s3.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", S = "https://s3.{partitionResult#dnsSuffix}", T = { [cp]: false, [cq]: "String" }, U = { [cp]: true, "default": false, [cq]: "Boolean" }, V = { [cp]: false, [cq]: "Boolean" }, W = { [ct]: e, [cu]: [{ [cv]: "Accelerate" }, true] }, X = { [ct]: e, [cu]: [{ [cv]: "UseFIPS" }, true] }, Y = { [ct]: e, [cu]: [{ [cv]: "UseDualStack" }, true] }, Z = { [ct]: d, [cu]: [{ [cv]: "Endpoint" }] }, aa = { [ct]: g, [cu]: [{ [cv]: "Region" }], [cw]: "partitionResult" }, ab = { [ct]: h, [cu]: [{ [ct]: i, [cu]: [{ [cv]: "partitionResult" }, j] }, "aws-cn"] }, ac = { [ct]: d, [cu]: [{ [cv]: "Bucket" }] }, ad = { [cv]: "Bucket" }, ae = { [cs]: [Y], [f]: "S3Express does not support Dual-stack.", [cq]: f }, af = { [cs]: [W], [f]: "S3Express does not support S3 Accelerate.", [cq]: f }, ag = { [cs]: [Z, { [ct]: m, [cu]: [{ [cv]: "Endpoint" }], [cw]: "url" }], [cr]: [{ [cs]: [{ [ct]: d, [cu]: [{ [cv]: "DisableS3ExpressSessionAuth" }] }, { [ct]: e, [cu]: [{ [cv]: "DisableS3ExpressSessionAuth" }, true] }], [cr]: [{ [cs]: [{ [ct]: e, [cu]: [{ [ct]: i, [cu]: [{ [cv]: "url" }, "isIp"] }, true] }], [cr]: [{ [cs]: [{ [ct]: "uriEncode", [cu]: [ad], [cw]: "uri_encoded_bucket" }], [cr]: [{ [n]: { [cx]: "{url#scheme}://{url#authority}/{uri_encoded_bucket}{url#path}", [cy]: { [cz]: "S3Express", [cA]: [{ [cB]: true, [j]: "sigv4", [cC]: "s3express", [cD]: "{Region}" }] }, [cE]: {} }, [cq]: n }], [cq]: o }], [cq]: o }, { [cs]: [{ [ct]: p, [cu]: [ad, false] }], [cr]: [{ [n]: { [cx]: q, [cy]: { [cz]: "S3Express", [cA]: [{ [cB]: true, [j]: "sigv4", [cC]: "s3express", [cD]: "{Region}" }] }, [cE]: {} }, [cq]: n }], [cq]: o }, { [f]: "S3Express bucket name is not a valid virtual hostable name.", [cq]: f }], [cq]: o }, { [cs]: [{ [ct]: e, [cu]: [{ [ct]: i, [cu]: [{ [cv]: "url" }, "isIp"] }, true] }], [cr]: [{ [cs]: [{ [ct]: "uriEncode", [cu]: [ad], [cw]: "uri_encoded_bucket" }], [cr]: [{ [n]: { [cx]: "{url#scheme}://{url#authority}/{uri_encoded_bucket}{url#path}", [cy]: { [cz]: "S3Express", [cA]: [{ [cB]: true, [j]: "sigv4-s3express", [cC]: "s3express", [cD]: "{Region}" }] }, [cE]: {} }, [cq]: n }], [cq]: o }], [cq]: o }, { [cs]: [{ [ct]: p, [cu]: [ad, false] }], [cr]: [{ [n]: { [cx]: q, [cy]: { [cz]: "S3Express", [cA]: [{ [cB]: true, [j]: "sigv4-s3express", [cC]: "s3express", [cD]: "{Region}" }] }, [cE]: {} }, [cq]: n }], [cq]: o }, { [f]: "S3Express bucket name is not a valid virtual hostable name.", [cq]: f }], [cq]: o }, ah = { [ct]: m, [cu]: [{ [cv]: "Endpoint" }], [cw]: "url" }, ai = { [ct]: e, [cu]: [{ [ct]: i, [cu]: [{ [cv]: "url" }, "isIp"] }, true] }, aj = { [cv]: "url" }, ak = { [ct]: "uriEncode", [cu]: [ad], [cw]: "uri_encoded_bucket" }, al = { [cz]: "S3Express", [cA]: [{ [cB]: true, [j]: "sigv4", [cC]: "s3express", [cD]: "{Region}" }] }, am = {}, an = { [ct]: p, [cu]: [ad, false] }, ao = { [f]: "S3Express bucket name is not a valid virtual hostable name.", [cq]: f }, ap = { [ct]: d, [cu]: [{ [cv]: "UseS3ExpressControlEndpoint" }] }, aq = { [ct]: e, [cu]: [{ [cv]: "UseS3ExpressControlEndpoint" }, true] }, ar = { [ct]: r, [cu]: [Z] }, as = { [f]: "Unrecognized S3Express bucket name format.", [cq]: f }, at = { [ct]: r, [cu]: [ac] }, au = { [cv]: u }, av = { [cs]: [ar], [f]: "Expected a endpoint to be specified but no endpoint was found", [cq]: f }, aw = { [cA]: [{ [cB]: true, [j]: z, [cC]: A, [cF]: ["*"] }, { [cB]: true, [j]: "sigv4", [cC]: A, [cD]: "{Region}" }] }, ax = { [ct]: e, [cu]: [{ [cv]: "ForcePathStyle" }, false] }, ay = { [cv]: "ForcePathStyle" }, az = { [ct]: e, [cu]: [{ [cv]: "Accelerate" }, false] }, aA = { [ct]: h, [cu]: [{ [cv]: "Region" }, "aws-global"] }, aB = { [cA]: [{ [cB]: true, [j]: "sigv4", [cC]: B, [cD]: "us-east-1" }] }, aC = { [ct]: r, [cu]: [aA] }, aD = { [ct]: e, [cu]: [{ [cv]: "UseGlobalEndpoint" }, true] }, aE = { [cx]: "https://{Bucket}.s3-fips.dualstack.{Region}.{partitionResult#dnsSuffix}", [cy]: { [cA]: [{ [cB]: true, [j]: "sigv4", [cC]: B, [cD]: "{Region}" }] }, [cE]: {} }, aF = { [cA]: [{ [cB]: true, [j]: "sigv4", [cC]: B, [cD]: "{Region}" }] }, aG = { [ct]: e, [cu]: [{ [cv]: "UseGlobalEndpoint" }, false] }, aH = { [ct]: e, [cu]: [{ [cv]: "UseDualStack" }, false] }, aI = { [cx]: "https://{Bucket}.s3-fips.{Region}.{partitionResult#dnsSuffix}", [cy]: aF, [cE]: {} }, aJ = { [ct]: e, [cu]: [{ [cv]: "UseFIPS" }, false] }, aK = { [cx]: "https://{Bucket}.s3-accelerate.dualstack.{partitionResult#dnsSuffix}", [cy]: aF, [cE]: {} }, aL = { [cx]: "https://{Bucket}.s3.dualstack.{Region}.{partitionResult#dnsSuffix}", [cy]: aF, [cE]: {} }, aM = { [ct]: e, [cu]: [{ [ct]: i, [cu]: [aj, "isIp"] }, false] }, aN = { [cx]: C, [cy]: aF, [cE]: {} }, aO = { [cx]: q, [cy]: aF, [cE]: {} }, aP = { [n]: aO, [cq]: n }, aQ = { [cx]: D, [cy]: aF, [cE]: {} }, aR = { [cx]: "https://{Bucket}.s3.{Region}.{partitionResult#dnsSuffix}", [cy]: aF, [cE]: {} }, aS = { [f]: "Invalid region: region was not a valid DNS name.", [cq]: f }, aT = { [cv]: G }, aU = { [cv]: H }, aV = { [ct]: i, [cu]: [aT, "service"] }, aW = { [cv]: L }, aX = { [cs]: [Y], [f]: "S3 Object Lambda does not support Dual-stack", [cq]: f }, aY = { [cs]: [W], [f]: "S3 Object Lambda does not support S3 Accelerate", [cq]: f }, aZ = { [cs]: [{ [ct]: d, [cu]: [{ [cv]: "DisableAccessPoints" }] }, { [ct]: e, [cu]: [{ [cv]: "DisableAccessPoints" }, true] }], [f]: "Access points are not supported for this operation", [cq]: f }, ba = { [cs]: [{ [ct]: d, [cu]: [{ [cv]: "UseArnRegion" }] }, { [ct]: e, [cu]: [{ [cv]: "UseArnRegion" }, false] }, { [ct]: r, [cu]: [{ [ct]: h, [cu]: [{ [ct]: i, [cu]: [aT, "region"] }, "{Region}"] }] }], [f]: "Invalid configuration: region from ARN `{bucketArn#region}` does not match client region `{Region}` and UseArnRegion is `false`", [cq]: f }, bb = { [ct]: i, [cu]: [{ [cv]: "bucketPartition" }, j] }, bc = { [ct]: i, [cu]: [aT, "accountId"] }, bd = { [cA]: [{ [cB]: true, [j]: "sigv4", [cC]: J, [cD]: "{bucketArn#region}" }] }, be = { [f]: "Invalid ARN: The access point name may only contain a-z, A-Z, 0-9 and `-`. Found: `{accessPointName}`", [cq]: f }, bf = { [f]: "Invalid ARN: The account id may only contain a-z, A-Z, 0-9 and `-`. Found: `{bucketArn#accountId}`", [cq]: f }, bg = { [f]: "Invalid region in ARN: `{bucketArn#region}` (invalid DNS name)", [cq]: f }, bh = { [f]: "Client was configured for partition `{partitionResult#name}` but ARN (`{Bucket}`) has `{bucketPartition#name}`", [cq]: f }, bi = { [f]: "Invalid ARN: The ARN may only contain a single resource component after `accesspoint`.", [cq]: f }, bj = { [f]: "Invalid ARN: Expected a resource of the format `accesspoint:` but no name was provided", [cq]: f }, bk = { [cA]: [{ [cB]: true, [j]: "sigv4", [cC]: B, [cD]: "{bucketArn#region}" }] }, bl = { [cA]: [{ [cB]: true, [j]: z, [cC]: A, [cF]: ["*"] }, { [cB]: true, [j]: "sigv4", [cC]: A, [cD]: "{bucketArn#region}" }] }, bm = { [ct]: F, [cu]: [ad] }, bn = { [cx]: "https://s3-fips.dualstack.{Region}.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", [cy]: aF, [cE]: {} }, bo = { [cx]: "https://s3-fips.{Region}.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", [cy]: aF, [cE]: {} }, bp = { [cx]: "https://s3.dualstack.{Region}.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", [cy]: aF, [cE]: {} }, bq = { [cx]: Q, [cy]: aF, [cE]: {} }, br = { [cx]: "https://s3.{Region}.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", [cy]: aF, [cE]: {} }, bs = { [cv]: "UseObjectLambdaEndpoint" }, bt = { [cA]: [{ [cB]: true, [j]: "sigv4", [cC]: J, [cD]: "{Region}" }] }, bu = { [cx]: "https://s3-fips.dualstack.{Region}.{partitionResult#dnsSuffix}", [cy]: aF, [cE]: {} }, bv = { [cx]: "https://s3-fips.{Region}.{partitionResult#dnsSuffix}", [cy]: aF, [cE]: {} }, bw = { [cx]: "https://s3.dualstack.{Region}.{partitionResult#dnsSuffix}", [cy]: aF, [cE]: {} }, bx = { [cx]: t, [cy]: aF, [cE]: {} }, by = { [cx]: "https://s3.{Region}.{partitionResult#dnsSuffix}", [cy]: aF, [cE]: {} }, bz = [{ [cv]: "Region" }], bA = [{ [cv]: "Endpoint" }], bB = [ad], bC = [Y], bD = [W], bE = [Z, ah], bF = [{ [ct]: d, [cu]: [{ [cv]: "DisableS3ExpressSessionAuth" }] }, { [ct]: e, [cu]: [{ [cv]: "DisableS3ExpressSessionAuth" }, true] }], bG = [ak], bH = [an], bI = [aa], bJ = [X], bK = [{ [ct]: k, [cu]: [ad, 6, 14, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 14, 16, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bL = [{ [cs]: [X], [n]: { [cx]: "https://{Bucket}.s3express-fips-{s3expressAvailabilityZoneId}.{Region}.{partitionResult#dnsSuffix}", [cy]: al, [cE]: {} }, [cq]: n }, { [n]: { [cx]: "https://{Bucket}.s3express-{s3expressAvailabilityZoneId}.{Region}.{partitionResult#dnsSuffix}", [cy]: al, [cE]: {} }, [cq]: n }], bM = [{ [ct]: k, [cu]: [ad, 6, 15, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 15, 17, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bN = [{ [ct]: k, [cu]: [ad, 6, 19, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 19, 21, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bO = [{ [ct]: k, [cu]: [ad, 6, 20, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 20, 22, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bP = [{ [ct]: k, [cu]: [ad, 6, 26, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 26, 28, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bQ = [{ [cs]: [X], [n]: { [cx]: "https://{Bucket}.s3express-fips-{s3expressAvailabilityZoneId}.{Region}.{partitionResult#dnsSuffix}", [cy]: { [cz]: "S3Express", [cA]: [{ [cB]: true, [j]: "sigv4-s3express", [cC]: "s3express", [cD]: "{Region}" }] }, [cE]: {} }, [cq]: n }, { [n]: { [cx]: "https://{Bucket}.s3express-{s3expressAvailabilityZoneId}.{Region}.{partitionResult#dnsSuffix}", [cy]: { [cz]: "S3Express", [cA]: [{ [cB]: true, [j]: "sigv4-s3express", [cC]: "s3express", [cD]: "{Region}" }] }, [cE]: {} }, [cq]: n }], bR = [ad, 0, 7, true], bS = [{ [ct]: k, [cu]: [ad, 7, 15, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 15, 17, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bT = [{ [ct]: k, [cu]: [ad, 7, 16, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 16, 18, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bU = [{ [ct]: k, [cu]: [ad, 7, 20, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 20, 22, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bV = [{ [ct]: k, [cu]: [ad, 7, 21, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 21, 23, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bW = [{ [ct]: k, [cu]: [ad, 7, 27, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 27, 29, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bX = [ac], bY = [{ [ct]: y, [cu]: [{ [cv]: x }, false] }], bZ = [{ [ct]: h, [cu]: [{ [cv]: v }, "beta"] }], ca = ["*"], cb = [{ [ct]: y, [cu]: [{ [cv]: "Region" }, false] }], cc = [{ [ct]: h, [cu]: [{ [cv]: "Region" }, "us-east-1"] }], cd = [{ [ct]: h, [cu]: [aU, K] }], ce = [{ [ct]: i, [cu]: [aT, "resourceId[1]"], [cw]: L }, { [ct]: r, [cu]: [{ [ct]: h, [cu]: [aW, I] }] }], cf = [aT, "resourceId[1]"], cg = [{ [ct]: r, [cu]: [{ [ct]: h, [cu]: [{ [ct]: i, [cu]: [aT, "region"] }, I] }] }], ch = [{ [ct]: r, [cu]: [{ [ct]: d, [cu]: [{ [ct]: i, [cu]: [aT, "resourceId[2]"] }] }] }], ci = [aT, "resourceId[2]"], cj = [{ [ct]: g, [cu]: [{ [ct]: i, [cu]: [aT, "region"] }], [cw]: "bucketPartition" }], ck = [{ [ct]: h, [cu]: [bb, { [ct]: i, [cu]: [{ [cv]: "partitionResult" }, j] }] }], cl = [{ [ct]: y, [cu]: [{ [ct]: i, [cu]: [aT, "region"] }, true] }], cm = [{ [ct]: y, [cu]: [bc, false] }], cn = [{ [ct]: y, [cu]: [aW, false] }], co = [{ [ct]: y, [cu]: [{ [cv]: "Region" }, true] }]; +const _data = { version: "1.0", parameters: { Bucket: T, Region: T, UseFIPS: U, UseDualStack: U, Endpoint: T, ForcePathStyle: U, Accelerate: U, UseGlobalEndpoint: U, UseObjectLambdaEndpoint: V, Key: T, Prefix: T, CopySource: T, DisableAccessPoints: V, DisableMultiRegionAccessPoints: U, UseArnRegion: V, UseS3ExpressControlEndpoint: V, DisableS3ExpressSessionAuth: V }, [cr]: [{ [cs]: [{ [ct]: d, [cu]: bz }], [cr]: [{ [cs]: [W, X], error: "Accelerate cannot be used with FIPS", [cq]: f }, { [cs]: [Y, Z], error: "Cannot set dual-stack in combination with a custom endpoint.", [cq]: f }, { [cs]: [Z, X], error: "A custom endpoint cannot be combined with FIPS", [cq]: f }, { [cs]: [Z, W], error: "A custom endpoint cannot be combined with S3 Accelerate", [cq]: f }, { [cs]: [X, aa, ab], error: "Partition does not support FIPS", [cq]: f }, { [cs]: [ac, { [ct]: k, [cu]: [ad, 0, a, c], [cw]: l }, { [ct]: h, [cu]: [{ [cv]: l }, "--x-s3"] }], [cr]: [ae, af, ag, { [cs]: [ap, aq], [cr]: [{ [cs]: bI, [cr]: [{ [cs]: [ak, ar], [cr]: [{ [cs]: bJ, endpoint: { [cx]: "https://s3express-control-fips.{Region}.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", [cy]: al, [cE]: am }, [cq]: n }, { endpoint: { [cx]: "https://s3express-control.{Region}.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", [cy]: al, [cE]: am }, [cq]: n }], [cq]: o }], [cq]: o }], [cq]: o }, { [cs]: bH, [cr]: [{ [cs]: bI, [cr]: [{ [cs]: bF, [cr]: [{ [cs]: bK, [cr]: bL, [cq]: o }, { [cs]: bM, [cr]: bL, [cq]: o }, { [cs]: bN, [cr]: bL, [cq]: o }, { [cs]: bO, [cr]: bL, [cq]: o }, { [cs]: bP, [cr]: bL, [cq]: o }, as], [cq]: o }, { [cs]: bK, [cr]: bQ, [cq]: o }, { [cs]: bM, [cr]: bQ, [cq]: o }, { [cs]: bN, [cr]: bQ, [cq]: o }, { [cs]: bO, [cr]: bQ, [cq]: o }, { [cs]: bP, [cr]: bQ, [cq]: o }, as], [cq]: o }], [cq]: o }, ao], [cq]: o }, { [cs]: [ac, { [ct]: k, [cu]: bR, [cw]: s }, { [ct]: h, [cu]: [{ [cv]: s }, "--xa-s3"] }], [cr]: [ae, af, ag, { [cs]: bH, [cr]: [{ [cs]: bI, [cr]: [{ [cs]: bF, [cr]: [{ [cs]: bS, [cr]: bL, [cq]: o }, { [cs]: bT, [cr]: bL, [cq]: o }, { [cs]: bU, [cr]: bL, [cq]: o }, { [cs]: bV, [cr]: bL, [cq]: o }, { [cs]: bW, [cr]: bL, [cq]: o }, as], [cq]: o }, { [cs]: bS, [cr]: bQ, [cq]: o }, { [cs]: bT, [cr]: bQ, [cq]: o }, { [cs]: bU, [cr]: bQ, [cq]: o }, { [cs]: bV, [cr]: bQ, [cq]: o }, { [cs]: bW, [cr]: bQ, [cq]: o }, as], [cq]: o }], [cq]: o }, ao], [cq]: o }, { [cs]: [at, ap, aq], [cr]: [{ [cs]: bI, [cr]: [{ [cs]: bE, endpoint: { [cx]: t, [cy]: al, [cE]: am }, [cq]: n }, { [cs]: bJ, endpoint: { [cx]: "https://s3express-control-fips.{Region}.{partitionResult#dnsSuffix}", [cy]: al, [cE]: am }, [cq]: n }, { endpoint: { [cx]: "https://s3express-control.{Region}.{partitionResult#dnsSuffix}", [cy]: al, [cE]: am }, [cq]: n }], [cq]: o }], [cq]: o }, { [cs]: [ac, { [ct]: k, [cu]: [ad, 49, 50, c], [cw]: u }, { [ct]: k, [cu]: [ad, 8, 12, c], [cw]: v }, { [ct]: k, [cu]: bR, [cw]: w }, { [ct]: k, [cu]: [ad, 32, 49, c], [cw]: x }, { [ct]: g, [cu]: bz, [cw]: "regionPartition" }, { [ct]: h, [cu]: [{ [cv]: w }, "--op-s3"] }], [cr]: [{ [cs]: bY, [cr]: [{ [cs]: [{ [ct]: h, [cu]: [au, "e"] }], [cr]: [{ [cs]: bZ, [cr]: [av, { [cs]: bE, endpoint: { [cx]: "https://{Bucket}.ec2.{url#authority}", [cy]: aw, [cE]: am }, [cq]: n }], [cq]: o }, { endpoint: { [cx]: "https://{Bucket}.ec2.s3-outposts.{Region}.{regionPartition#dnsSuffix}", [cy]: aw, [cE]: am }, [cq]: n }], [cq]: o }, { [cs]: [{ [ct]: h, [cu]: [au, "o"] }], [cr]: [{ [cs]: bZ, [cr]: [av, { [cs]: bE, endpoint: { [cx]: "https://{Bucket}.op-{outpostId}.{url#authority}", [cy]: aw, [cE]: am }, [cq]: n }], [cq]: o }, { endpoint: { [cx]: "https://{Bucket}.op-{outpostId}.s3-outposts.{Region}.{regionPartition#dnsSuffix}", [cy]: aw, [cE]: am }, [cq]: n }], [cq]: o }, { error: "Unrecognized hardware type: \"Expected hardware type o or e but got {hardwareType}\"", [cq]: f }], [cq]: o }, { error: "Invalid ARN: The outpost Id must only contain a-z, A-Z, 0-9 and `-`.", [cq]: f }], [cq]: o }, { [cs]: bX, [cr]: [{ [cs]: [Z, { [ct]: r, [cu]: [{ [ct]: d, [cu]: [{ [ct]: m, [cu]: bA }] }] }], error: "Custom endpoint `{Endpoint}` was not a valid URI", [cq]: f }, { [cs]: [ax, an], [cr]: [{ [cs]: bI, [cr]: [{ [cs]: cb, [cr]: [{ [cs]: [W, ab], error: "S3 Accelerate cannot be used in this region", [cq]: f }, { [cs]: [Y, X, az, ar, aA], endpoint: { [cx]: "https://{Bucket}.s3-fips.dualstack.us-east-1.{partitionResult#dnsSuffix}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [Y, X, az, ar, aC, aD], [cr]: [{ endpoint: aE, [cq]: n }], [cq]: o }, { [cs]: [Y, X, az, ar, aC, aG], endpoint: aE, [cq]: n }, { [cs]: [aH, X, az, ar, aA], endpoint: { [cx]: "https://{Bucket}.s3-fips.us-east-1.{partitionResult#dnsSuffix}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aH, X, az, ar, aC, aD], [cr]: [{ endpoint: aI, [cq]: n }], [cq]: o }, { [cs]: [aH, X, az, ar, aC, aG], endpoint: aI, [cq]: n }, { [cs]: [Y, aJ, W, ar, aA], endpoint: { [cx]: "https://{Bucket}.s3-accelerate.dualstack.us-east-1.{partitionResult#dnsSuffix}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [Y, aJ, W, ar, aC, aD], [cr]: [{ endpoint: aK, [cq]: n }], [cq]: o }, { [cs]: [Y, aJ, W, ar, aC, aG], endpoint: aK, [cq]: n }, { [cs]: [Y, aJ, az, ar, aA], endpoint: { [cx]: "https://{Bucket}.s3.dualstack.us-east-1.{partitionResult#dnsSuffix}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [Y, aJ, az, ar, aC, aD], [cr]: [{ endpoint: aL, [cq]: n }], [cq]: o }, { [cs]: [Y, aJ, az, ar, aC, aG], endpoint: aL, [cq]: n }, { [cs]: [aH, aJ, az, Z, ah, ai, aA], endpoint: { [cx]: C, [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aH, aJ, az, Z, ah, aM, aA], endpoint: { [cx]: q, [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aH, aJ, az, Z, ah, ai, aC, aD], [cr]: [{ [cs]: cc, endpoint: aN, [cq]: n }, { endpoint: aN, [cq]: n }], [cq]: o }, { [cs]: [aH, aJ, az, Z, ah, aM, aC, aD], [cr]: [{ [cs]: cc, endpoint: aO, [cq]: n }, aP], [cq]: o }, { [cs]: [aH, aJ, az, Z, ah, ai, aC, aG], endpoint: aN, [cq]: n }, { [cs]: [aH, aJ, az, Z, ah, aM, aC, aG], endpoint: aO, [cq]: n }, { [cs]: [aH, aJ, W, ar, aA], endpoint: { [cx]: D, [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aH, aJ, W, ar, aC, aD], [cr]: [{ [cs]: cc, endpoint: aQ, [cq]: n }, { endpoint: aQ, [cq]: n }], [cq]: o }, { [cs]: [aH, aJ, W, ar, aC, aG], endpoint: aQ, [cq]: n }, { [cs]: [aH, aJ, az, ar, aA], endpoint: { [cx]: E, [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aH, aJ, az, ar, aC, aD], [cr]: [{ [cs]: cc, endpoint: { [cx]: E, [cy]: aF, [cE]: am }, [cq]: n }, { endpoint: aR, [cq]: n }], [cq]: o }, { [cs]: [aH, aJ, az, ar, aC, aG], endpoint: aR, [cq]: n }], [cq]: o }, aS], [cq]: o }], [cq]: o }, { [cs]: [Z, ah, { [ct]: h, [cu]: [{ [ct]: i, [cu]: [aj, "scheme"] }, "http"] }, { [ct]: p, [cu]: [ad, c] }, ax, aJ, aH, az], [cr]: [{ [cs]: bI, [cr]: [{ [cs]: cb, [cr]: [aP], [cq]: o }, aS], [cq]: o }], [cq]: o }, { [cs]: [ax, { [ct]: F, [cu]: bB, [cw]: G }], [cr]: [{ [cs]: [{ [ct]: i, [cu]: [aT, "resourceId[0]"], [cw]: H }, { [ct]: r, [cu]: [{ [ct]: h, [cu]: [aU, I] }] }], [cr]: [{ [cs]: [{ [ct]: h, [cu]: [aV, J] }], [cr]: [{ [cs]: cd, [cr]: [{ [cs]: ce, [cr]: [aX, aY, { [cs]: cg, [cr]: [aZ, { [cs]: ch, [cr]: [ba, { [cs]: cj, [cr]: [{ [cs]: bI, [cr]: [{ [cs]: ck, [cr]: [{ [cs]: cl, [cr]: [{ [cs]: [{ [ct]: h, [cu]: [bc, I] }], error: "Invalid ARN: Missing account id", [cq]: f }, { [cs]: cm, [cr]: [{ [cs]: cn, [cr]: [{ [cs]: bE, endpoint: { [cx]: M, [cy]: bd, [cE]: am }, [cq]: n }, { [cs]: bJ, endpoint: { [cx]: "https://{accessPointName}-{bucketArn#accountId}.s3-object-lambda-fips.{bucketArn#region}.{bucketPartition#dnsSuffix}", [cy]: bd, [cE]: am }, [cq]: n }, { endpoint: { [cx]: "https://{accessPointName}-{bucketArn#accountId}.s3-object-lambda.{bucketArn#region}.{bucketPartition#dnsSuffix}", [cy]: bd, [cE]: am }, [cq]: n }], [cq]: o }, be], [cq]: o }, bf], [cq]: o }, bg], [cq]: o }, bh], [cq]: o }], [cq]: o }], [cq]: o }, bi], [cq]: o }, { error: "Invalid ARN: bucket ARN is missing a region", [cq]: f }], [cq]: o }, bj], [cq]: o }, { error: "Invalid ARN: Object Lambda ARNs only support `accesspoint` arn types, but found: `{arnType}`", [cq]: f }], [cq]: o }, { [cs]: cd, [cr]: [{ [cs]: ce, [cr]: [{ [cs]: cg, [cr]: [{ [cs]: cd, [cr]: [{ [cs]: cg, [cr]: [aZ, { [cs]: ch, [cr]: [ba, { [cs]: cj, [cr]: [{ [cs]: bI, [cr]: [{ [cs]: [{ [ct]: h, [cu]: [bb, "{partitionResult#name}"] }], [cr]: [{ [cs]: cl, [cr]: [{ [cs]: [{ [ct]: h, [cu]: [aV, B] }], [cr]: [{ [cs]: cm, [cr]: [{ [cs]: cn, [cr]: [{ [cs]: bD, error: "Access Points do not support S3 Accelerate", [cq]: f }, { [cs]: [X, Y], endpoint: { [cx]: "https://{accessPointName}-{bucketArn#accountId}.s3-accesspoint-fips.dualstack.{bucketArn#region}.{bucketPartition#dnsSuffix}", [cy]: bk, [cE]: am }, [cq]: n }, { [cs]: [X, aH], endpoint: { [cx]: "https://{accessPointName}-{bucketArn#accountId}.s3-accesspoint-fips.{bucketArn#region}.{bucketPartition#dnsSuffix}", [cy]: bk, [cE]: am }, [cq]: n }, { [cs]: [aJ, Y], endpoint: { [cx]: "https://{accessPointName}-{bucketArn#accountId}.s3-accesspoint.dualstack.{bucketArn#region}.{bucketPartition#dnsSuffix}", [cy]: bk, [cE]: am }, [cq]: n }, { [cs]: [aJ, aH, Z, ah], endpoint: { [cx]: M, [cy]: bk, [cE]: am }, [cq]: n }, { [cs]: [aJ, aH], endpoint: { [cx]: "https://{accessPointName}-{bucketArn#accountId}.s3-accesspoint.{bucketArn#region}.{bucketPartition#dnsSuffix}", [cy]: bk, [cE]: am }, [cq]: n }], [cq]: o }, be], [cq]: o }, bf], [cq]: o }, { error: "Invalid ARN: The ARN was not for the S3 service, found: {bucketArn#service}", [cq]: f }], [cq]: o }, bg], [cq]: o }, bh], [cq]: o }], [cq]: o }], [cq]: o }, bi], [cq]: o }], [cq]: o }], [cq]: o }, { [cs]: [{ [ct]: y, [cu]: [aW, c] }], [cr]: [{ [cs]: bC, error: "S3 MRAP does not support dual-stack", [cq]: f }, { [cs]: bJ, error: "S3 MRAP does not support FIPS", [cq]: f }, { [cs]: bD, error: "S3 MRAP does not support S3 Accelerate", [cq]: f }, { [cs]: [{ [ct]: e, [cu]: [{ [cv]: "DisableMultiRegionAccessPoints" }, c] }], error: "Invalid configuration: Multi-Region Access Point ARNs are disabled.", [cq]: f }, { [cs]: [{ [ct]: g, [cu]: bz, [cw]: N }], [cr]: [{ [cs]: [{ [ct]: h, [cu]: [{ [ct]: i, [cu]: [{ [cv]: N }, j] }, { [ct]: i, [cu]: [aT, "partition"] }] }], [cr]: [{ endpoint: { [cx]: "https://{accessPointName}.accesspoint.s3-global.{mrapPartition#dnsSuffix}", [cy]: { [cA]: [{ [cB]: c, name: z, [cC]: B, [cF]: ca }] }, [cE]: am }, [cq]: n }], [cq]: o }, { error: "Client was configured for partition `{mrapPartition#name}` but bucket referred to partition `{bucketArn#partition}`", [cq]: f }], [cq]: o }], [cq]: o }, { error: "Invalid Access Point Name", [cq]: f }], [cq]: o }, bj], [cq]: o }, { [cs]: [{ [ct]: h, [cu]: [aV, A] }], [cr]: [{ [cs]: bC, error: "S3 Outposts does not support Dual-stack", [cq]: f }, { [cs]: bJ, error: "S3 Outposts does not support FIPS", [cq]: f }, { [cs]: bD, error: "S3 Outposts does not support S3 Accelerate", [cq]: f }, { [cs]: [{ [ct]: d, [cu]: [{ [ct]: i, [cu]: [aT, "resourceId[4]"] }] }], error: "Invalid Arn: Outpost Access Point ARN contains sub resources", [cq]: f }, { [cs]: [{ [ct]: i, [cu]: cf, [cw]: x }], [cr]: [{ [cs]: bY, [cr]: [ba, { [cs]: cj, [cr]: [{ [cs]: bI, [cr]: [{ [cs]: ck, [cr]: [{ [cs]: cl, [cr]: [{ [cs]: cm, [cr]: [{ [cs]: [{ [ct]: i, [cu]: ci, [cw]: O }], [cr]: [{ [cs]: [{ [ct]: i, [cu]: [aT, "resourceId[3]"], [cw]: L }], [cr]: [{ [cs]: [{ [ct]: h, [cu]: [{ [cv]: O }, K] }], [cr]: [{ [cs]: bE, endpoint: { [cx]: "https://{accessPointName}-{bucketArn#accountId}.{outpostId}.{url#authority}", [cy]: bl, [cE]: am }, [cq]: n }, { endpoint: { [cx]: "https://{accessPointName}-{bucketArn#accountId}.{outpostId}.s3-outposts.{bucketArn#region}.{bucketPartition#dnsSuffix}", [cy]: bl, [cE]: am }, [cq]: n }], [cq]: o }, { error: "Expected an outpost type `accesspoint`, found {outpostType}", [cq]: f }], [cq]: o }, { error: "Invalid ARN: expected an access point name", [cq]: f }], [cq]: o }, { error: "Invalid ARN: Expected a 4-component resource", [cq]: f }], [cq]: o }, bf], [cq]: o }, bg], [cq]: o }, bh], [cq]: o }], [cq]: o }], [cq]: o }, { error: "Invalid ARN: The outpost Id may only contain a-z, A-Z, 0-9 and `-`. Found: `{outpostId}`", [cq]: f }], [cq]: o }, { error: "Invalid ARN: The Outpost Id was not set", [cq]: f }], [cq]: o }, { error: "Invalid ARN: Unrecognized format: {Bucket} (type: {arnType})", [cq]: f }], [cq]: o }, { error: "Invalid ARN: No ARN type specified", [cq]: f }], [cq]: o }, { [cs]: [{ [ct]: k, [cu]: [ad, 0, 4, b], [cw]: P }, { [ct]: h, [cu]: [{ [cv]: P }, "arn:"] }, { [ct]: r, [cu]: [{ [ct]: d, [cu]: [bm] }] }], error: "Invalid ARN: `{Bucket}` was not a valid ARN", [cq]: f }, { [cs]: [{ [ct]: e, [cu]: [ay, c] }, bm], error: "Path-style addressing cannot be used with ARN buckets", [cq]: f }, { [cs]: bG, [cr]: [{ [cs]: bI, [cr]: [{ [cs]: [az], [cr]: [{ [cs]: [Y, ar, X, aA], endpoint: { [cx]: "https://s3-fips.dualstack.us-east-1.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [Y, ar, X, aC, aD], [cr]: [{ endpoint: bn, [cq]: n }], [cq]: o }, { [cs]: [Y, ar, X, aC, aG], endpoint: bn, [cq]: n }, { [cs]: [aH, ar, X, aA], endpoint: { [cx]: "https://s3-fips.us-east-1.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aH, ar, X, aC, aD], [cr]: [{ endpoint: bo, [cq]: n }], [cq]: o }, { [cs]: [aH, ar, X, aC, aG], endpoint: bo, [cq]: n }, { [cs]: [Y, ar, aJ, aA], endpoint: { [cx]: "https://s3.dualstack.us-east-1.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [Y, ar, aJ, aC, aD], [cr]: [{ endpoint: bp, [cq]: n }], [cq]: o }, { [cs]: [Y, ar, aJ, aC, aG], endpoint: bp, [cq]: n }, { [cs]: [aH, Z, ah, aJ, aA], endpoint: { [cx]: Q, [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aH, Z, ah, aJ, aC, aD], [cr]: [{ [cs]: cc, endpoint: bq, [cq]: n }, { endpoint: bq, [cq]: n }], [cq]: o }, { [cs]: [aH, Z, ah, aJ, aC, aG], endpoint: bq, [cq]: n }, { [cs]: [aH, ar, aJ, aA], endpoint: { [cx]: R, [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aH, ar, aJ, aC, aD], [cr]: [{ [cs]: cc, endpoint: { [cx]: R, [cy]: aF, [cE]: am }, [cq]: n }, { endpoint: br, [cq]: n }], [cq]: o }, { [cs]: [aH, ar, aJ, aC, aG], endpoint: br, [cq]: n }], [cq]: o }, { error: "Path-style addressing cannot be used with S3 Accelerate", [cq]: f }], [cq]: o }], [cq]: o }], [cq]: o }, { [cs]: [{ [ct]: d, [cu]: [bs] }, { [ct]: e, [cu]: [bs, c] }], [cr]: [{ [cs]: bI, [cr]: [{ [cs]: co, [cr]: [aX, aY, { [cs]: bE, endpoint: { [cx]: t, [cy]: bt, [cE]: am }, [cq]: n }, { [cs]: bJ, endpoint: { [cx]: "https://s3-object-lambda-fips.{Region}.{partitionResult#dnsSuffix}", [cy]: bt, [cE]: am }, [cq]: n }, { endpoint: { [cx]: "https://s3-object-lambda.{Region}.{partitionResult#dnsSuffix}", [cy]: bt, [cE]: am }, [cq]: n }], [cq]: o }, aS], [cq]: o }], [cq]: o }, { [cs]: [at], [cr]: [{ [cs]: bI, [cr]: [{ [cs]: co, [cr]: [{ [cs]: [X, Y, ar, aA], endpoint: { [cx]: "https://s3-fips.dualstack.us-east-1.{partitionResult#dnsSuffix}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [X, Y, ar, aC, aD], [cr]: [{ endpoint: bu, [cq]: n }], [cq]: o }, { [cs]: [X, Y, ar, aC, aG], endpoint: bu, [cq]: n }, { [cs]: [X, aH, ar, aA], endpoint: { [cx]: "https://s3-fips.us-east-1.{partitionResult#dnsSuffix}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [X, aH, ar, aC, aD], [cr]: [{ endpoint: bv, [cq]: n }], [cq]: o }, { [cs]: [X, aH, ar, aC, aG], endpoint: bv, [cq]: n }, { [cs]: [aJ, Y, ar, aA], endpoint: { [cx]: "https://s3.dualstack.us-east-1.{partitionResult#dnsSuffix}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aJ, Y, ar, aC, aD], [cr]: [{ endpoint: bw, [cq]: n }], [cq]: o }, { [cs]: [aJ, Y, ar, aC, aG], endpoint: bw, [cq]: n }, { [cs]: [aJ, aH, Z, ah, aA], endpoint: { [cx]: t, [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aJ, aH, Z, ah, aC, aD], [cr]: [{ [cs]: cc, endpoint: bx, [cq]: n }, { endpoint: bx, [cq]: n }], [cq]: o }, { [cs]: [aJ, aH, Z, ah, aC, aG], endpoint: bx, [cq]: n }, { [cs]: [aJ, aH, ar, aA], endpoint: { [cx]: S, [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aJ, aH, ar, aC, aD], [cr]: [{ [cs]: cc, endpoint: { [cx]: S, [cy]: aF, [cE]: am }, [cq]: n }, { endpoint: by, [cq]: n }], [cq]: o }, { [cs]: [aJ, aH, ar, aC, aG], endpoint: by, [cq]: n }], [cq]: o }, aS], [cq]: o }], [cq]: o }], [cq]: o }, { error: "A region must be set when sending requests to S3.", [cq]: f }] }; +exports.ruleSet = _data; diff --git a/node_modules/@aws-sdk/client-s3/dist-cjs/index.js b/node_modules/@aws-sdk/client-s3/dist-cjs/index.js new file mode 100644 index 00000000..2d5e846d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-cjs/index.js @@ -0,0 +1,11430 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + AbortMultipartUploadCommand: () => AbortMultipartUploadCommand, + AnalyticsFilter: () => AnalyticsFilter, + AnalyticsS3ExportFileFormat: () => AnalyticsS3ExportFileFormat, + ArchiveStatus: () => ArchiveStatus, + BucketAccelerateStatus: () => BucketAccelerateStatus, + BucketAlreadyExists: () => BucketAlreadyExists, + BucketAlreadyOwnedByYou: () => BucketAlreadyOwnedByYou, + BucketCannedACL: () => BucketCannedACL, + BucketLocationConstraint: () => BucketLocationConstraint, + BucketLogsPermission: () => BucketLogsPermission, + BucketType: () => BucketType, + BucketVersioningStatus: () => BucketVersioningStatus, + ChecksumAlgorithm: () => ChecksumAlgorithm, + ChecksumMode: () => ChecksumMode, + ChecksumType: () => ChecksumType, + CompleteMultipartUploadCommand: () => CompleteMultipartUploadCommand, + CompleteMultipartUploadOutputFilterSensitiveLog: () => CompleteMultipartUploadOutputFilterSensitiveLog, + CompleteMultipartUploadRequestFilterSensitiveLog: () => CompleteMultipartUploadRequestFilterSensitiveLog, + CompressionType: () => CompressionType, + CopyObjectCommand: () => CopyObjectCommand, + CopyObjectOutputFilterSensitiveLog: () => CopyObjectOutputFilterSensitiveLog, + CopyObjectRequestFilterSensitiveLog: () => CopyObjectRequestFilterSensitiveLog, + CreateBucketCommand: () => CreateBucketCommand, + CreateBucketMetadataTableConfigurationCommand: () => CreateBucketMetadataTableConfigurationCommand, + CreateMultipartUploadCommand: () => CreateMultipartUploadCommand, + CreateMultipartUploadOutputFilterSensitiveLog: () => CreateMultipartUploadOutputFilterSensitiveLog, + CreateMultipartUploadRequestFilterSensitiveLog: () => CreateMultipartUploadRequestFilterSensitiveLog, + CreateSessionCommand: () => CreateSessionCommand, + CreateSessionOutputFilterSensitiveLog: () => CreateSessionOutputFilterSensitiveLog, + CreateSessionRequestFilterSensitiveLog: () => CreateSessionRequestFilterSensitiveLog, + DataRedundancy: () => DataRedundancy, + DeleteBucketAnalyticsConfigurationCommand: () => DeleteBucketAnalyticsConfigurationCommand, + DeleteBucketCommand: () => DeleteBucketCommand, + DeleteBucketCorsCommand: () => DeleteBucketCorsCommand, + DeleteBucketEncryptionCommand: () => DeleteBucketEncryptionCommand, + DeleteBucketIntelligentTieringConfigurationCommand: () => DeleteBucketIntelligentTieringConfigurationCommand, + DeleteBucketInventoryConfigurationCommand: () => DeleteBucketInventoryConfigurationCommand, + DeleteBucketLifecycleCommand: () => DeleteBucketLifecycleCommand, + DeleteBucketMetadataTableConfigurationCommand: () => DeleteBucketMetadataTableConfigurationCommand, + DeleteBucketMetricsConfigurationCommand: () => DeleteBucketMetricsConfigurationCommand, + DeleteBucketOwnershipControlsCommand: () => DeleteBucketOwnershipControlsCommand, + DeleteBucketPolicyCommand: () => DeleteBucketPolicyCommand, + DeleteBucketReplicationCommand: () => DeleteBucketReplicationCommand, + DeleteBucketTaggingCommand: () => DeleteBucketTaggingCommand, + DeleteBucketWebsiteCommand: () => DeleteBucketWebsiteCommand, + DeleteMarkerReplicationStatus: () => DeleteMarkerReplicationStatus, + DeleteObjectCommand: () => DeleteObjectCommand, + DeleteObjectTaggingCommand: () => DeleteObjectTaggingCommand, + DeleteObjectsCommand: () => DeleteObjectsCommand, + DeletePublicAccessBlockCommand: () => DeletePublicAccessBlockCommand, + EncodingType: () => EncodingType, + EncryptionFilterSensitiveLog: () => EncryptionFilterSensitiveLog, + EncryptionTypeMismatch: () => EncryptionTypeMismatch, + Event: () => Event, + ExistingObjectReplicationStatus: () => ExistingObjectReplicationStatus, + ExpirationStatus: () => ExpirationStatus, + ExpressionType: () => ExpressionType, + FileHeaderInfo: () => FileHeaderInfo, + FilterRuleName: () => FilterRuleName, + GetBucketAccelerateConfigurationCommand: () => GetBucketAccelerateConfigurationCommand, + GetBucketAclCommand: () => GetBucketAclCommand, + GetBucketAnalyticsConfigurationCommand: () => GetBucketAnalyticsConfigurationCommand, + GetBucketCorsCommand: () => GetBucketCorsCommand, + GetBucketEncryptionCommand: () => GetBucketEncryptionCommand, + GetBucketEncryptionOutputFilterSensitiveLog: () => GetBucketEncryptionOutputFilterSensitiveLog, + GetBucketIntelligentTieringConfigurationCommand: () => GetBucketIntelligentTieringConfigurationCommand, + GetBucketInventoryConfigurationCommand: () => GetBucketInventoryConfigurationCommand, + GetBucketInventoryConfigurationOutputFilterSensitiveLog: () => GetBucketInventoryConfigurationOutputFilterSensitiveLog, + GetBucketLifecycleConfigurationCommand: () => GetBucketLifecycleConfigurationCommand, + GetBucketLocationCommand: () => GetBucketLocationCommand, + GetBucketLoggingCommand: () => GetBucketLoggingCommand, + GetBucketMetadataTableConfigurationCommand: () => GetBucketMetadataTableConfigurationCommand, + GetBucketMetricsConfigurationCommand: () => GetBucketMetricsConfigurationCommand, + GetBucketNotificationConfigurationCommand: () => GetBucketNotificationConfigurationCommand, + GetBucketOwnershipControlsCommand: () => GetBucketOwnershipControlsCommand, + GetBucketPolicyCommand: () => GetBucketPolicyCommand, + GetBucketPolicyStatusCommand: () => GetBucketPolicyStatusCommand, + GetBucketReplicationCommand: () => GetBucketReplicationCommand, + GetBucketRequestPaymentCommand: () => GetBucketRequestPaymentCommand, + GetBucketTaggingCommand: () => GetBucketTaggingCommand, + GetBucketVersioningCommand: () => GetBucketVersioningCommand, + GetBucketWebsiteCommand: () => GetBucketWebsiteCommand, + GetObjectAclCommand: () => GetObjectAclCommand, + GetObjectAttributesCommand: () => GetObjectAttributesCommand, + GetObjectAttributesRequestFilterSensitiveLog: () => GetObjectAttributesRequestFilterSensitiveLog, + GetObjectCommand: () => GetObjectCommand, + GetObjectLegalHoldCommand: () => GetObjectLegalHoldCommand, + GetObjectLockConfigurationCommand: () => GetObjectLockConfigurationCommand, + GetObjectOutputFilterSensitiveLog: () => GetObjectOutputFilterSensitiveLog, + GetObjectRequestFilterSensitiveLog: () => GetObjectRequestFilterSensitiveLog, + GetObjectRetentionCommand: () => GetObjectRetentionCommand, + GetObjectTaggingCommand: () => GetObjectTaggingCommand, + GetObjectTorrentCommand: () => GetObjectTorrentCommand, + GetObjectTorrentOutputFilterSensitiveLog: () => GetObjectTorrentOutputFilterSensitiveLog, + GetPublicAccessBlockCommand: () => GetPublicAccessBlockCommand, + HeadBucketCommand: () => HeadBucketCommand, + HeadObjectCommand: () => HeadObjectCommand, + HeadObjectOutputFilterSensitiveLog: () => HeadObjectOutputFilterSensitiveLog, + HeadObjectRequestFilterSensitiveLog: () => HeadObjectRequestFilterSensitiveLog, + IntelligentTieringAccessTier: () => IntelligentTieringAccessTier, + IntelligentTieringStatus: () => IntelligentTieringStatus, + InvalidObjectState: () => InvalidObjectState, + InvalidRequest: () => InvalidRequest, + InvalidWriteOffset: () => InvalidWriteOffset, + InventoryConfigurationFilterSensitiveLog: () => InventoryConfigurationFilterSensitiveLog, + InventoryDestinationFilterSensitiveLog: () => InventoryDestinationFilterSensitiveLog, + InventoryEncryptionFilterSensitiveLog: () => InventoryEncryptionFilterSensitiveLog, + InventoryFormat: () => InventoryFormat, + InventoryFrequency: () => InventoryFrequency, + InventoryIncludedObjectVersions: () => InventoryIncludedObjectVersions, + InventoryOptionalField: () => InventoryOptionalField, + InventoryS3BucketDestinationFilterSensitiveLog: () => InventoryS3BucketDestinationFilterSensitiveLog, + JSONType: () => JSONType, + ListBucketAnalyticsConfigurationsCommand: () => ListBucketAnalyticsConfigurationsCommand, + ListBucketIntelligentTieringConfigurationsCommand: () => ListBucketIntelligentTieringConfigurationsCommand, + ListBucketInventoryConfigurationsCommand: () => ListBucketInventoryConfigurationsCommand, + ListBucketInventoryConfigurationsOutputFilterSensitiveLog: () => ListBucketInventoryConfigurationsOutputFilterSensitiveLog, + ListBucketMetricsConfigurationsCommand: () => ListBucketMetricsConfigurationsCommand, + ListBucketsCommand: () => ListBucketsCommand, + ListDirectoryBucketsCommand: () => ListDirectoryBucketsCommand, + ListMultipartUploadsCommand: () => ListMultipartUploadsCommand, + ListObjectVersionsCommand: () => ListObjectVersionsCommand, + ListObjectsCommand: () => ListObjectsCommand, + ListObjectsV2Command: () => ListObjectsV2Command, + ListPartsCommand: () => ListPartsCommand, + ListPartsRequestFilterSensitiveLog: () => ListPartsRequestFilterSensitiveLog, + LocationType: () => LocationType, + MFADelete: () => MFADelete, + MFADeleteStatus: () => MFADeleteStatus, + MetadataDirective: () => MetadataDirective, + MetricsFilter: () => MetricsFilter, + MetricsStatus: () => MetricsStatus, + NoSuchBucket: () => NoSuchBucket, + NoSuchKey: () => NoSuchKey, + NoSuchUpload: () => NoSuchUpload, + NotFound: () => NotFound, + ObjectAlreadyInActiveTierError: () => ObjectAlreadyInActiveTierError, + ObjectAttributes: () => ObjectAttributes, + ObjectCannedACL: () => ObjectCannedACL, + ObjectLockEnabled: () => ObjectLockEnabled, + ObjectLockLegalHoldStatus: () => ObjectLockLegalHoldStatus, + ObjectLockMode: () => ObjectLockMode, + ObjectLockRetentionMode: () => ObjectLockRetentionMode, + ObjectNotInActiveTierError: () => ObjectNotInActiveTierError, + ObjectOwnership: () => ObjectOwnership, + ObjectStorageClass: () => ObjectStorageClass, + ObjectVersionStorageClass: () => ObjectVersionStorageClass, + OptionalObjectAttributes: () => OptionalObjectAttributes, + OutputLocationFilterSensitiveLog: () => OutputLocationFilterSensitiveLog, + OwnerOverride: () => OwnerOverride, + PartitionDateSource: () => PartitionDateSource, + Payer: () => Payer, + Permission: () => Permission, + Protocol: () => Protocol, + PutBucketAccelerateConfigurationCommand: () => PutBucketAccelerateConfigurationCommand, + PutBucketAclCommand: () => PutBucketAclCommand, + PutBucketAnalyticsConfigurationCommand: () => PutBucketAnalyticsConfigurationCommand, + PutBucketCorsCommand: () => PutBucketCorsCommand, + PutBucketEncryptionCommand: () => PutBucketEncryptionCommand, + PutBucketEncryptionRequestFilterSensitiveLog: () => PutBucketEncryptionRequestFilterSensitiveLog, + PutBucketIntelligentTieringConfigurationCommand: () => PutBucketIntelligentTieringConfigurationCommand, + PutBucketInventoryConfigurationCommand: () => PutBucketInventoryConfigurationCommand, + PutBucketInventoryConfigurationRequestFilterSensitiveLog: () => PutBucketInventoryConfigurationRequestFilterSensitiveLog, + PutBucketLifecycleConfigurationCommand: () => PutBucketLifecycleConfigurationCommand, + PutBucketLoggingCommand: () => PutBucketLoggingCommand, + PutBucketMetricsConfigurationCommand: () => PutBucketMetricsConfigurationCommand, + PutBucketNotificationConfigurationCommand: () => PutBucketNotificationConfigurationCommand, + PutBucketOwnershipControlsCommand: () => PutBucketOwnershipControlsCommand, + PutBucketPolicyCommand: () => PutBucketPolicyCommand, + PutBucketReplicationCommand: () => PutBucketReplicationCommand, + PutBucketRequestPaymentCommand: () => PutBucketRequestPaymentCommand, + PutBucketTaggingCommand: () => PutBucketTaggingCommand, + PutBucketVersioningCommand: () => PutBucketVersioningCommand, + PutBucketWebsiteCommand: () => PutBucketWebsiteCommand, + PutObjectAclCommand: () => PutObjectAclCommand, + PutObjectCommand: () => PutObjectCommand, + PutObjectLegalHoldCommand: () => PutObjectLegalHoldCommand, + PutObjectLockConfigurationCommand: () => PutObjectLockConfigurationCommand, + PutObjectOutputFilterSensitiveLog: () => PutObjectOutputFilterSensitiveLog, + PutObjectRequestFilterSensitiveLog: () => PutObjectRequestFilterSensitiveLog, + PutObjectRetentionCommand: () => PutObjectRetentionCommand, + PutObjectTaggingCommand: () => PutObjectTaggingCommand, + PutPublicAccessBlockCommand: () => PutPublicAccessBlockCommand, + QuoteFields: () => QuoteFields, + ReplicaModificationsStatus: () => ReplicaModificationsStatus, + ReplicationRuleStatus: () => ReplicationRuleStatus, + ReplicationStatus: () => ReplicationStatus, + ReplicationTimeStatus: () => ReplicationTimeStatus, + RequestCharged: () => RequestCharged, + RequestPayer: () => RequestPayer, + RestoreObjectCommand: () => RestoreObjectCommand, + RestoreObjectRequestFilterSensitiveLog: () => RestoreObjectRequestFilterSensitiveLog, + RestoreRequestFilterSensitiveLog: () => RestoreRequestFilterSensitiveLog, + RestoreRequestType: () => RestoreRequestType, + S3: () => S3, + S3Client: () => S3Client, + S3LocationFilterSensitiveLog: () => S3LocationFilterSensitiveLog, + S3ServiceException: () => S3ServiceException, + SSEKMSFilterSensitiveLog: () => SSEKMSFilterSensitiveLog, + SelectObjectContentCommand: () => SelectObjectContentCommand, + SelectObjectContentEventStream: () => SelectObjectContentEventStream, + SelectObjectContentEventStreamFilterSensitiveLog: () => SelectObjectContentEventStreamFilterSensitiveLog, + SelectObjectContentOutputFilterSensitiveLog: () => SelectObjectContentOutputFilterSensitiveLog, + SelectObjectContentRequestFilterSensitiveLog: () => SelectObjectContentRequestFilterSensitiveLog, + ServerSideEncryption: () => ServerSideEncryption, + ServerSideEncryptionByDefaultFilterSensitiveLog: () => ServerSideEncryptionByDefaultFilterSensitiveLog, + ServerSideEncryptionConfigurationFilterSensitiveLog: () => ServerSideEncryptionConfigurationFilterSensitiveLog, + ServerSideEncryptionRuleFilterSensitiveLog: () => ServerSideEncryptionRuleFilterSensitiveLog, + SessionCredentialsFilterSensitiveLog: () => SessionCredentialsFilterSensitiveLog, + SessionMode: () => SessionMode, + SseKmsEncryptedObjectsStatus: () => SseKmsEncryptedObjectsStatus, + StorageClass: () => StorageClass, + StorageClassAnalysisSchemaVersion: () => StorageClassAnalysisSchemaVersion, + TaggingDirective: () => TaggingDirective, + Tier: () => Tier, + TooManyParts: () => TooManyParts, + TransitionDefaultMinimumObjectSize: () => TransitionDefaultMinimumObjectSize, + TransitionStorageClass: () => TransitionStorageClass, + Type: () => Type, + UploadPartCommand: () => UploadPartCommand, + UploadPartCopyCommand: () => UploadPartCopyCommand, + UploadPartCopyOutputFilterSensitiveLog: () => UploadPartCopyOutputFilterSensitiveLog, + UploadPartCopyRequestFilterSensitiveLog: () => UploadPartCopyRequestFilterSensitiveLog, + UploadPartOutputFilterSensitiveLog: () => UploadPartOutputFilterSensitiveLog, + UploadPartRequestFilterSensitiveLog: () => UploadPartRequestFilterSensitiveLog, + WriteGetObjectResponseCommand: () => WriteGetObjectResponseCommand, + WriteGetObjectResponseRequestFilterSensitiveLog: () => WriteGetObjectResponseRequestFilterSensitiveLog, + __Client: () => import_smithy_client.Client, + paginateListBuckets: () => paginateListBuckets, + paginateListDirectoryBuckets: () => paginateListDirectoryBuckets, + paginateListObjectsV2: () => paginateListObjectsV2, + paginateListParts: () => paginateListParts, + waitForBucketExists: () => waitForBucketExists, + waitForBucketNotExists: () => waitForBucketNotExists, + waitForObjectExists: () => waitForObjectExists, + waitForObjectNotExists: () => waitForObjectNotExists, + waitUntilBucketExists: () => waitUntilBucketExists, + waitUntilBucketNotExists: () => waitUntilBucketNotExists, + waitUntilObjectExists: () => waitUntilObjectExists, + waitUntilObjectNotExists: () => waitUntilObjectNotExists +}); +module.exports = __toCommonJS(index_exports); + +// src/S3Client.ts +var import_middleware_expect_continue = require("@aws-sdk/middleware-expect-continue"); +var import_middleware_flexible_checksums = require("@aws-sdk/middleware-flexible-checksums"); +var import_middleware_host_header = require("@aws-sdk/middleware-host-header"); +var import_middleware_logger = require("@aws-sdk/middleware-logger"); +var import_middleware_recursion_detection = require("@aws-sdk/middleware-recursion-detection"); +var import_middleware_sdk_s32 = require("@aws-sdk/middleware-sdk-s3"); +var import_middleware_user_agent = require("@aws-sdk/middleware-user-agent"); +var import_config_resolver = require("@smithy/config-resolver"); +var import_core3 = require("@smithy/core"); +var import_eventstream_serde_config_resolver = require("@smithy/eventstream-serde-config-resolver"); +var import_middleware_content_length = require("@smithy/middleware-content-length"); + +var import_middleware_retry = require("@smithy/middleware-retry"); + +var import_httpAuthSchemeProvider = require("./auth/httpAuthSchemeProvider"); + +// src/commands/CreateSessionCommand.ts +var import_middleware_sdk_s3 = require("@aws-sdk/middleware-sdk-s3"); +var import_middleware_endpoint = require("@smithy/middleware-endpoint"); +var import_middleware_serde = require("@smithy/middleware-serde"); + + +// src/endpoint/EndpointParameters.ts +var resolveClientEndpointParameters = /* @__PURE__ */ __name((options) => { + return Object.assign(options, { + useFipsEndpoint: options.useFipsEndpoint ?? false, + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + forcePathStyle: options.forcePathStyle ?? false, + useAccelerateEndpoint: options.useAccelerateEndpoint ?? false, + useGlobalEndpoint: options.useGlobalEndpoint ?? false, + disableMultiregionAccessPoints: options.disableMultiregionAccessPoints ?? false, + defaultSigningName: "s3" + }); +}, "resolveClientEndpointParameters"); +var commonParams = { + ForcePathStyle: { type: "clientContextParams", name: "forcePathStyle" }, + UseArnRegion: { type: "clientContextParams", name: "useArnRegion" }, + DisableMultiRegionAccessPoints: { type: "clientContextParams", name: "disableMultiregionAccessPoints" }, + Accelerate: { type: "clientContextParams", name: "useAccelerateEndpoint" }, + DisableS3ExpressSessionAuth: { type: "clientContextParams", name: "disableS3ExpressSessionAuth" }, + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" } +}; + +// src/models/models_0.ts + + +// src/models/S3ServiceException.ts +var import_smithy_client = require("@smithy/smithy-client"); +var S3ServiceException = class _S3ServiceException extends import_smithy_client.ServiceException { + static { + __name(this, "S3ServiceException"); + } + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _S3ServiceException.prototype); + } +}; + +// src/models/models_0.ts +var RequestCharged = { + requester: "requester" +}; +var RequestPayer = { + requester: "requester" +}; +var NoSuchUpload = class _NoSuchUpload extends S3ServiceException { + static { + __name(this, "NoSuchUpload"); + } + name = "NoSuchUpload"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "NoSuchUpload", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _NoSuchUpload.prototype); + } +}; +var BucketAccelerateStatus = { + Enabled: "Enabled", + Suspended: "Suspended" +}; +var Type = { + AmazonCustomerByEmail: "AmazonCustomerByEmail", + CanonicalUser: "CanonicalUser", + Group: "Group" +}; +var Permission = { + FULL_CONTROL: "FULL_CONTROL", + READ: "READ", + READ_ACP: "READ_ACP", + WRITE: "WRITE", + WRITE_ACP: "WRITE_ACP" +}; +var OwnerOverride = { + Destination: "Destination" +}; +var ChecksumType = { + COMPOSITE: "COMPOSITE", + FULL_OBJECT: "FULL_OBJECT" +}; +var ServerSideEncryption = { + AES256: "AES256", + aws_kms: "aws:kms", + aws_kms_dsse: "aws:kms:dsse" +}; +var ObjectCannedACL = { + authenticated_read: "authenticated-read", + aws_exec_read: "aws-exec-read", + bucket_owner_full_control: "bucket-owner-full-control", + bucket_owner_read: "bucket-owner-read", + private: "private", + public_read: "public-read", + public_read_write: "public-read-write" +}; +var ChecksumAlgorithm = { + CRC32: "CRC32", + CRC32C: "CRC32C", + CRC64NVME: "CRC64NVME", + SHA1: "SHA1", + SHA256: "SHA256" +}; +var MetadataDirective = { + COPY: "COPY", + REPLACE: "REPLACE" +}; +var ObjectLockLegalHoldStatus = { + OFF: "OFF", + ON: "ON" +}; +var ObjectLockMode = { + COMPLIANCE: "COMPLIANCE", + GOVERNANCE: "GOVERNANCE" +}; +var StorageClass = { + DEEP_ARCHIVE: "DEEP_ARCHIVE", + EXPRESS_ONEZONE: "EXPRESS_ONEZONE", + GLACIER: "GLACIER", + GLACIER_IR: "GLACIER_IR", + INTELLIGENT_TIERING: "INTELLIGENT_TIERING", + ONEZONE_IA: "ONEZONE_IA", + OUTPOSTS: "OUTPOSTS", + REDUCED_REDUNDANCY: "REDUCED_REDUNDANCY", + SNOW: "SNOW", + STANDARD: "STANDARD", + STANDARD_IA: "STANDARD_IA" +}; +var TaggingDirective = { + COPY: "COPY", + REPLACE: "REPLACE" +}; +var ObjectNotInActiveTierError = class _ObjectNotInActiveTierError extends S3ServiceException { + static { + __name(this, "ObjectNotInActiveTierError"); + } + name = "ObjectNotInActiveTierError"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ObjectNotInActiveTierError", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ObjectNotInActiveTierError.prototype); + } +}; +var BucketAlreadyExists = class _BucketAlreadyExists extends S3ServiceException { + static { + __name(this, "BucketAlreadyExists"); + } + name = "BucketAlreadyExists"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "BucketAlreadyExists", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _BucketAlreadyExists.prototype); + } +}; +var BucketAlreadyOwnedByYou = class _BucketAlreadyOwnedByYou extends S3ServiceException { + static { + __name(this, "BucketAlreadyOwnedByYou"); + } + name = "BucketAlreadyOwnedByYou"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "BucketAlreadyOwnedByYou", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _BucketAlreadyOwnedByYou.prototype); + } +}; +var BucketCannedACL = { + authenticated_read: "authenticated-read", + private: "private", + public_read: "public-read", + public_read_write: "public-read-write" +}; +var DataRedundancy = { + SingleAvailabilityZone: "SingleAvailabilityZone", + SingleLocalZone: "SingleLocalZone" +}; +var BucketType = { + Directory: "Directory" +}; +var LocationType = { + AvailabilityZone: "AvailabilityZone", + LocalZone: "LocalZone" +}; +var BucketLocationConstraint = { + EU: "EU", + af_south_1: "af-south-1", + ap_east_1: "ap-east-1", + ap_northeast_1: "ap-northeast-1", + ap_northeast_2: "ap-northeast-2", + ap_northeast_3: "ap-northeast-3", + ap_south_1: "ap-south-1", + ap_south_2: "ap-south-2", + ap_southeast_1: "ap-southeast-1", + ap_southeast_2: "ap-southeast-2", + ap_southeast_3: "ap-southeast-3", + ap_southeast_4: "ap-southeast-4", + ap_southeast_5: "ap-southeast-5", + ca_central_1: "ca-central-1", + cn_north_1: "cn-north-1", + cn_northwest_1: "cn-northwest-1", + eu_central_1: "eu-central-1", + eu_central_2: "eu-central-2", + eu_north_1: "eu-north-1", + eu_south_1: "eu-south-1", + eu_south_2: "eu-south-2", + eu_west_1: "eu-west-1", + eu_west_2: "eu-west-2", + eu_west_3: "eu-west-3", + il_central_1: "il-central-1", + me_central_1: "me-central-1", + me_south_1: "me-south-1", + sa_east_1: "sa-east-1", + us_east_2: "us-east-2", + us_gov_east_1: "us-gov-east-1", + us_gov_west_1: "us-gov-west-1", + us_west_1: "us-west-1", + us_west_2: "us-west-2" +}; +var ObjectOwnership = { + BucketOwnerEnforced: "BucketOwnerEnforced", + BucketOwnerPreferred: "BucketOwnerPreferred", + ObjectWriter: "ObjectWriter" +}; +var SessionMode = { + ReadOnly: "ReadOnly", + ReadWrite: "ReadWrite" +}; +var NoSuchBucket = class _NoSuchBucket extends S3ServiceException { + static { + __name(this, "NoSuchBucket"); + } + name = "NoSuchBucket"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "NoSuchBucket", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _NoSuchBucket.prototype); + } +}; +var AnalyticsFilter; +((AnalyticsFilter2) => { + AnalyticsFilter2.visit = /* @__PURE__ */ __name((value, visitor) => { + if (value.Prefix !== void 0) return visitor.Prefix(value.Prefix); + if (value.Tag !== void 0) return visitor.Tag(value.Tag); + if (value.And !== void 0) return visitor.And(value.And); + return visitor._(value.$unknown[0], value.$unknown[1]); + }, "visit"); +})(AnalyticsFilter || (AnalyticsFilter = {})); +var AnalyticsS3ExportFileFormat = { + CSV: "CSV" +}; +var StorageClassAnalysisSchemaVersion = { + V_1: "V_1" +}; +var IntelligentTieringStatus = { + Disabled: "Disabled", + Enabled: "Enabled" +}; +var IntelligentTieringAccessTier = { + ARCHIVE_ACCESS: "ARCHIVE_ACCESS", + DEEP_ARCHIVE_ACCESS: "DEEP_ARCHIVE_ACCESS" +}; +var InventoryFormat = { + CSV: "CSV", + ORC: "ORC", + Parquet: "Parquet" +}; +var InventoryIncludedObjectVersions = { + All: "All", + Current: "Current" +}; +var InventoryOptionalField = { + BucketKeyStatus: "BucketKeyStatus", + ChecksumAlgorithm: "ChecksumAlgorithm", + ETag: "ETag", + EncryptionStatus: "EncryptionStatus", + IntelligentTieringAccessTier: "IntelligentTieringAccessTier", + IsMultipartUploaded: "IsMultipartUploaded", + LastModifiedDate: "LastModifiedDate", + ObjectAccessControlList: "ObjectAccessControlList", + ObjectLockLegalHoldStatus: "ObjectLockLegalHoldStatus", + ObjectLockMode: "ObjectLockMode", + ObjectLockRetainUntilDate: "ObjectLockRetainUntilDate", + ObjectOwner: "ObjectOwner", + ReplicationStatus: "ReplicationStatus", + Size: "Size", + StorageClass: "StorageClass" +}; +var InventoryFrequency = { + Daily: "Daily", + Weekly: "Weekly" +}; +var TransitionStorageClass = { + DEEP_ARCHIVE: "DEEP_ARCHIVE", + GLACIER: "GLACIER", + GLACIER_IR: "GLACIER_IR", + INTELLIGENT_TIERING: "INTELLIGENT_TIERING", + ONEZONE_IA: "ONEZONE_IA", + STANDARD_IA: "STANDARD_IA" +}; +var ExpirationStatus = { + Disabled: "Disabled", + Enabled: "Enabled" +}; +var TransitionDefaultMinimumObjectSize = { + all_storage_classes_128K: "all_storage_classes_128K", + varies_by_storage_class: "varies_by_storage_class" +}; +var BucketLogsPermission = { + FULL_CONTROL: "FULL_CONTROL", + READ: "READ", + WRITE: "WRITE" +}; +var PartitionDateSource = { + DeliveryTime: "DeliveryTime", + EventTime: "EventTime" +}; +var MetricsFilter; +((MetricsFilter2) => { + MetricsFilter2.visit = /* @__PURE__ */ __name((value, visitor) => { + if (value.Prefix !== void 0) return visitor.Prefix(value.Prefix); + if (value.Tag !== void 0) return visitor.Tag(value.Tag); + if (value.AccessPointArn !== void 0) return visitor.AccessPointArn(value.AccessPointArn); + if (value.And !== void 0) return visitor.And(value.And); + return visitor._(value.$unknown[0], value.$unknown[1]); + }, "visit"); +})(MetricsFilter || (MetricsFilter = {})); +var Event = { + s3_IntelligentTiering: "s3:IntelligentTiering", + s3_LifecycleExpiration_: "s3:LifecycleExpiration:*", + s3_LifecycleExpiration_Delete: "s3:LifecycleExpiration:Delete", + s3_LifecycleExpiration_DeleteMarkerCreated: "s3:LifecycleExpiration:DeleteMarkerCreated", + s3_LifecycleTransition: "s3:LifecycleTransition", + s3_ObjectAcl_Put: "s3:ObjectAcl:Put", + s3_ObjectCreated_: "s3:ObjectCreated:*", + s3_ObjectCreated_CompleteMultipartUpload: "s3:ObjectCreated:CompleteMultipartUpload", + s3_ObjectCreated_Copy: "s3:ObjectCreated:Copy", + s3_ObjectCreated_Post: "s3:ObjectCreated:Post", + s3_ObjectCreated_Put: "s3:ObjectCreated:Put", + s3_ObjectRemoved_: "s3:ObjectRemoved:*", + s3_ObjectRemoved_Delete: "s3:ObjectRemoved:Delete", + s3_ObjectRemoved_DeleteMarkerCreated: "s3:ObjectRemoved:DeleteMarkerCreated", + s3_ObjectRestore_: "s3:ObjectRestore:*", + s3_ObjectRestore_Completed: "s3:ObjectRestore:Completed", + s3_ObjectRestore_Delete: "s3:ObjectRestore:Delete", + s3_ObjectRestore_Post: "s3:ObjectRestore:Post", + s3_ObjectTagging_: "s3:ObjectTagging:*", + s3_ObjectTagging_Delete: "s3:ObjectTagging:Delete", + s3_ObjectTagging_Put: "s3:ObjectTagging:Put", + s3_ReducedRedundancyLostObject: "s3:ReducedRedundancyLostObject", + s3_Replication_: "s3:Replication:*", + s3_Replication_OperationFailedReplication: "s3:Replication:OperationFailedReplication", + s3_Replication_OperationMissedThreshold: "s3:Replication:OperationMissedThreshold", + s3_Replication_OperationNotTracked: "s3:Replication:OperationNotTracked", + s3_Replication_OperationReplicatedAfterThreshold: "s3:Replication:OperationReplicatedAfterThreshold" +}; +var FilterRuleName = { + prefix: "prefix", + suffix: "suffix" +}; +var DeleteMarkerReplicationStatus = { + Disabled: "Disabled", + Enabled: "Enabled" +}; +var MetricsStatus = { + Disabled: "Disabled", + Enabled: "Enabled" +}; +var ReplicationTimeStatus = { + Disabled: "Disabled", + Enabled: "Enabled" +}; +var ExistingObjectReplicationStatus = { + Disabled: "Disabled", + Enabled: "Enabled" +}; +var ReplicaModificationsStatus = { + Disabled: "Disabled", + Enabled: "Enabled" +}; +var SseKmsEncryptedObjectsStatus = { + Disabled: "Disabled", + Enabled: "Enabled" +}; +var ReplicationRuleStatus = { + Disabled: "Disabled", + Enabled: "Enabled" +}; +var Payer = { + BucketOwner: "BucketOwner", + Requester: "Requester" +}; +var MFADeleteStatus = { + Disabled: "Disabled", + Enabled: "Enabled" +}; +var BucketVersioningStatus = { + Enabled: "Enabled", + Suspended: "Suspended" +}; +var Protocol = { + http: "http", + https: "https" +}; +var ReplicationStatus = { + COMPLETE: "COMPLETE", + COMPLETED: "COMPLETED", + FAILED: "FAILED", + PENDING: "PENDING", + REPLICA: "REPLICA" +}; +var ChecksumMode = { + ENABLED: "ENABLED" +}; +var InvalidObjectState = class _InvalidObjectState extends S3ServiceException { + static { + __name(this, "InvalidObjectState"); + } + name = "InvalidObjectState"; + $fault = "client"; + StorageClass; + AccessTier; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidObjectState", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidObjectState.prototype); + this.StorageClass = opts.StorageClass; + this.AccessTier = opts.AccessTier; + } +}; +var NoSuchKey = class _NoSuchKey extends S3ServiceException { + static { + __name(this, "NoSuchKey"); + } + name = "NoSuchKey"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "NoSuchKey", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _NoSuchKey.prototype); + } +}; +var ObjectAttributes = { + CHECKSUM: "Checksum", + ETAG: "ETag", + OBJECT_PARTS: "ObjectParts", + OBJECT_SIZE: "ObjectSize", + STORAGE_CLASS: "StorageClass" +}; +var ObjectLockEnabled = { + Enabled: "Enabled" +}; +var ObjectLockRetentionMode = { + COMPLIANCE: "COMPLIANCE", + GOVERNANCE: "GOVERNANCE" +}; +var NotFound = class _NotFound extends S3ServiceException { + static { + __name(this, "NotFound"); + } + name = "NotFound"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "NotFound", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _NotFound.prototype); + } +}; +var ArchiveStatus = { + ARCHIVE_ACCESS: "ARCHIVE_ACCESS", + DEEP_ARCHIVE_ACCESS: "DEEP_ARCHIVE_ACCESS" +}; +var EncodingType = { + url: "url" +}; +var ObjectStorageClass = { + DEEP_ARCHIVE: "DEEP_ARCHIVE", + EXPRESS_ONEZONE: "EXPRESS_ONEZONE", + GLACIER: "GLACIER", + GLACIER_IR: "GLACIER_IR", + INTELLIGENT_TIERING: "INTELLIGENT_TIERING", + ONEZONE_IA: "ONEZONE_IA", + OUTPOSTS: "OUTPOSTS", + REDUCED_REDUNDANCY: "REDUCED_REDUNDANCY", + SNOW: "SNOW", + STANDARD: "STANDARD", + STANDARD_IA: "STANDARD_IA" +}; +var OptionalObjectAttributes = { + RESTORE_STATUS: "RestoreStatus" +}; +var ObjectVersionStorageClass = { + STANDARD: "STANDARD" +}; +var CompleteMultipartUploadOutputFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSEKMSKeyId && { SSEKMSKeyId: import_smithy_client.SENSITIVE_STRING } +}), "CompleteMultipartUploadOutputFilterSensitiveLog"); +var CompleteMultipartUploadRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSECustomerKey && { SSECustomerKey: import_smithy_client.SENSITIVE_STRING } +}), "CompleteMultipartUploadRequestFilterSensitiveLog"); +var CopyObjectOutputFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSEKMSKeyId && { SSEKMSKeyId: import_smithy_client.SENSITIVE_STRING }, + ...obj.SSEKMSEncryptionContext && { SSEKMSEncryptionContext: import_smithy_client.SENSITIVE_STRING } +}), "CopyObjectOutputFilterSensitiveLog"); +var CopyObjectRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSECustomerKey && { SSECustomerKey: import_smithy_client.SENSITIVE_STRING }, + ...obj.SSEKMSKeyId && { SSEKMSKeyId: import_smithy_client.SENSITIVE_STRING }, + ...obj.SSEKMSEncryptionContext && { SSEKMSEncryptionContext: import_smithy_client.SENSITIVE_STRING }, + ...obj.CopySourceSSECustomerKey && { CopySourceSSECustomerKey: import_smithy_client.SENSITIVE_STRING } +}), "CopyObjectRequestFilterSensitiveLog"); +var CreateMultipartUploadOutputFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSEKMSKeyId && { SSEKMSKeyId: import_smithy_client.SENSITIVE_STRING }, + ...obj.SSEKMSEncryptionContext && { SSEKMSEncryptionContext: import_smithy_client.SENSITIVE_STRING } +}), "CreateMultipartUploadOutputFilterSensitiveLog"); +var CreateMultipartUploadRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSECustomerKey && { SSECustomerKey: import_smithy_client.SENSITIVE_STRING }, + ...obj.SSEKMSKeyId && { SSEKMSKeyId: import_smithy_client.SENSITIVE_STRING }, + ...obj.SSEKMSEncryptionContext && { SSEKMSEncryptionContext: import_smithy_client.SENSITIVE_STRING } +}), "CreateMultipartUploadRequestFilterSensitiveLog"); +var SessionCredentialsFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SecretAccessKey && { SecretAccessKey: import_smithy_client.SENSITIVE_STRING }, + ...obj.SessionToken && { SessionToken: import_smithy_client.SENSITIVE_STRING } +}), "SessionCredentialsFilterSensitiveLog"); +var CreateSessionOutputFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSEKMSKeyId && { SSEKMSKeyId: import_smithy_client.SENSITIVE_STRING }, + ...obj.SSEKMSEncryptionContext && { SSEKMSEncryptionContext: import_smithy_client.SENSITIVE_STRING }, + ...obj.Credentials && { Credentials: SessionCredentialsFilterSensitiveLog(obj.Credentials) } +}), "CreateSessionOutputFilterSensitiveLog"); +var CreateSessionRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSEKMSKeyId && { SSEKMSKeyId: import_smithy_client.SENSITIVE_STRING }, + ...obj.SSEKMSEncryptionContext && { SSEKMSEncryptionContext: import_smithy_client.SENSITIVE_STRING } +}), "CreateSessionRequestFilterSensitiveLog"); +var ServerSideEncryptionByDefaultFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.KMSMasterKeyID && { KMSMasterKeyID: import_smithy_client.SENSITIVE_STRING } +}), "ServerSideEncryptionByDefaultFilterSensitiveLog"); +var ServerSideEncryptionRuleFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.ApplyServerSideEncryptionByDefault && { + ApplyServerSideEncryptionByDefault: ServerSideEncryptionByDefaultFilterSensitiveLog( + obj.ApplyServerSideEncryptionByDefault + ) + } +}), "ServerSideEncryptionRuleFilterSensitiveLog"); +var ServerSideEncryptionConfigurationFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.Rules && { Rules: obj.Rules.map((item) => ServerSideEncryptionRuleFilterSensitiveLog(item)) } +}), "ServerSideEncryptionConfigurationFilterSensitiveLog"); +var GetBucketEncryptionOutputFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.ServerSideEncryptionConfiguration && { + ServerSideEncryptionConfiguration: ServerSideEncryptionConfigurationFilterSensitiveLog( + obj.ServerSideEncryptionConfiguration + ) + } +}), "GetBucketEncryptionOutputFilterSensitiveLog"); +var SSEKMSFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.KeyId && { KeyId: import_smithy_client.SENSITIVE_STRING } +}), "SSEKMSFilterSensitiveLog"); +var InventoryEncryptionFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSEKMS && { SSEKMS: SSEKMSFilterSensitiveLog(obj.SSEKMS) } +}), "InventoryEncryptionFilterSensitiveLog"); +var InventoryS3BucketDestinationFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.Encryption && { Encryption: InventoryEncryptionFilterSensitiveLog(obj.Encryption) } +}), "InventoryS3BucketDestinationFilterSensitiveLog"); +var InventoryDestinationFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.S3BucketDestination && { + S3BucketDestination: InventoryS3BucketDestinationFilterSensitiveLog(obj.S3BucketDestination) + } +}), "InventoryDestinationFilterSensitiveLog"); +var InventoryConfigurationFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.Destination && { Destination: InventoryDestinationFilterSensitiveLog(obj.Destination) } +}), "InventoryConfigurationFilterSensitiveLog"); +var GetBucketInventoryConfigurationOutputFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.InventoryConfiguration && { + InventoryConfiguration: InventoryConfigurationFilterSensitiveLog(obj.InventoryConfiguration) + } +}), "GetBucketInventoryConfigurationOutputFilterSensitiveLog"); +var GetObjectOutputFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSEKMSKeyId && { SSEKMSKeyId: import_smithy_client.SENSITIVE_STRING } +}), "GetObjectOutputFilterSensitiveLog"); +var GetObjectRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSECustomerKey && { SSECustomerKey: import_smithy_client.SENSITIVE_STRING } +}), "GetObjectRequestFilterSensitiveLog"); +var GetObjectAttributesRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSECustomerKey && { SSECustomerKey: import_smithy_client.SENSITIVE_STRING } +}), "GetObjectAttributesRequestFilterSensitiveLog"); +var GetObjectTorrentOutputFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj +}), "GetObjectTorrentOutputFilterSensitiveLog"); +var HeadObjectOutputFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSEKMSKeyId && { SSEKMSKeyId: import_smithy_client.SENSITIVE_STRING } +}), "HeadObjectOutputFilterSensitiveLog"); +var HeadObjectRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSECustomerKey && { SSECustomerKey: import_smithy_client.SENSITIVE_STRING } +}), "HeadObjectRequestFilterSensitiveLog"); +var ListBucketInventoryConfigurationsOutputFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.InventoryConfigurationList && { + InventoryConfigurationList: obj.InventoryConfigurationList.map( + (item) => InventoryConfigurationFilterSensitiveLog(item) + ) + } +}), "ListBucketInventoryConfigurationsOutputFilterSensitiveLog"); +var ListPartsRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSECustomerKey && { SSECustomerKey: import_smithy_client.SENSITIVE_STRING } +}), "ListPartsRequestFilterSensitiveLog"); + +// src/protocols/Aws_restXml.ts +var import_core = require("@aws-sdk/core"); +var import_xml_builder = require("@aws-sdk/xml-builder"); +var import_core2 = require("@smithy/core"); +var import_protocol_http = require("@smithy/protocol-http"); + + +// src/models/models_1.ts + +var MFADelete = { + Disabled: "Disabled", + Enabled: "Enabled" +}; +var EncryptionTypeMismatch = class _EncryptionTypeMismatch extends S3ServiceException { + static { + __name(this, "EncryptionTypeMismatch"); + } + name = "EncryptionTypeMismatch"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "EncryptionTypeMismatch", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _EncryptionTypeMismatch.prototype); + } +}; +var InvalidRequest = class _InvalidRequest extends S3ServiceException { + static { + __name(this, "InvalidRequest"); + } + name = "InvalidRequest"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidRequest", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidRequest.prototype); + } +}; +var InvalidWriteOffset = class _InvalidWriteOffset extends S3ServiceException { + static { + __name(this, "InvalidWriteOffset"); + } + name = "InvalidWriteOffset"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidWriteOffset", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidWriteOffset.prototype); + } +}; +var TooManyParts = class _TooManyParts extends S3ServiceException { + static { + __name(this, "TooManyParts"); + } + name = "TooManyParts"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TooManyParts", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TooManyParts.prototype); + } +}; +var ObjectAlreadyInActiveTierError = class _ObjectAlreadyInActiveTierError extends S3ServiceException { + static { + __name(this, "ObjectAlreadyInActiveTierError"); + } + name = "ObjectAlreadyInActiveTierError"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ObjectAlreadyInActiveTierError", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ObjectAlreadyInActiveTierError.prototype); + } +}; +var Tier = { + Bulk: "Bulk", + Expedited: "Expedited", + Standard: "Standard" +}; +var ExpressionType = { + SQL: "SQL" +}; +var CompressionType = { + BZIP2: "BZIP2", + GZIP: "GZIP", + NONE: "NONE" +}; +var FileHeaderInfo = { + IGNORE: "IGNORE", + NONE: "NONE", + USE: "USE" +}; +var JSONType = { + DOCUMENT: "DOCUMENT", + LINES: "LINES" +}; +var QuoteFields = { + ALWAYS: "ALWAYS", + ASNEEDED: "ASNEEDED" +}; +var RestoreRequestType = { + SELECT: "SELECT" +}; +var SelectObjectContentEventStream; +((SelectObjectContentEventStream3) => { + SelectObjectContentEventStream3.visit = /* @__PURE__ */ __name((value, visitor) => { + if (value.Records !== void 0) return visitor.Records(value.Records); + if (value.Stats !== void 0) return visitor.Stats(value.Stats); + if (value.Progress !== void 0) return visitor.Progress(value.Progress); + if (value.Cont !== void 0) return visitor.Cont(value.Cont); + if (value.End !== void 0) return visitor.End(value.End); + return visitor._(value.$unknown[0], value.$unknown[1]); + }, "visit"); +})(SelectObjectContentEventStream || (SelectObjectContentEventStream = {})); +var PutBucketEncryptionRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.ServerSideEncryptionConfiguration && { + ServerSideEncryptionConfiguration: ServerSideEncryptionConfigurationFilterSensitiveLog( + obj.ServerSideEncryptionConfiguration + ) + } +}), "PutBucketEncryptionRequestFilterSensitiveLog"); +var PutBucketInventoryConfigurationRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.InventoryConfiguration && { + InventoryConfiguration: InventoryConfigurationFilterSensitiveLog(obj.InventoryConfiguration) + } +}), "PutBucketInventoryConfigurationRequestFilterSensitiveLog"); +var PutObjectOutputFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSEKMSKeyId && { SSEKMSKeyId: import_smithy_client.SENSITIVE_STRING }, + ...obj.SSEKMSEncryptionContext && { SSEKMSEncryptionContext: import_smithy_client.SENSITIVE_STRING } +}), "PutObjectOutputFilterSensitiveLog"); +var PutObjectRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSECustomerKey && { SSECustomerKey: import_smithy_client.SENSITIVE_STRING }, + ...obj.SSEKMSKeyId && { SSEKMSKeyId: import_smithy_client.SENSITIVE_STRING }, + ...obj.SSEKMSEncryptionContext && { SSEKMSEncryptionContext: import_smithy_client.SENSITIVE_STRING } +}), "PutObjectRequestFilterSensitiveLog"); +var EncryptionFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.KMSKeyId && { KMSKeyId: import_smithy_client.SENSITIVE_STRING } +}), "EncryptionFilterSensitiveLog"); +var S3LocationFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.Encryption && { Encryption: EncryptionFilterSensitiveLog(obj.Encryption) } +}), "S3LocationFilterSensitiveLog"); +var OutputLocationFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.S3 && { S3: S3LocationFilterSensitiveLog(obj.S3) } +}), "OutputLocationFilterSensitiveLog"); +var RestoreRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.OutputLocation && { OutputLocation: OutputLocationFilterSensitiveLog(obj.OutputLocation) } +}), "RestoreRequestFilterSensitiveLog"); +var RestoreObjectRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.RestoreRequest && { RestoreRequest: RestoreRequestFilterSensitiveLog(obj.RestoreRequest) } +}), "RestoreObjectRequestFilterSensitiveLog"); +var SelectObjectContentEventStreamFilterSensitiveLog = /* @__PURE__ */ __name((obj) => { + if (obj.Records !== void 0) return { Records: obj.Records }; + if (obj.Stats !== void 0) return { Stats: obj.Stats }; + if (obj.Progress !== void 0) return { Progress: obj.Progress }; + if (obj.Cont !== void 0) return { Cont: obj.Cont }; + if (obj.End !== void 0) return { End: obj.End }; + if (obj.$unknown !== void 0) return { [obj.$unknown[0]]: "UNKNOWN" }; +}, "SelectObjectContentEventStreamFilterSensitiveLog"); +var SelectObjectContentOutputFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.Payload && { Payload: "STREAMING_CONTENT" } +}), "SelectObjectContentOutputFilterSensitiveLog"); +var SelectObjectContentRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSECustomerKey && { SSECustomerKey: import_smithy_client.SENSITIVE_STRING } +}), "SelectObjectContentRequestFilterSensitiveLog"); +var UploadPartOutputFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSEKMSKeyId && { SSEKMSKeyId: import_smithy_client.SENSITIVE_STRING } +}), "UploadPartOutputFilterSensitiveLog"); +var UploadPartRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSECustomerKey && { SSECustomerKey: import_smithy_client.SENSITIVE_STRING } +}), "UploadPartRequestFilterSensitiveLog"); +var UploadPartCopyOutputFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSEKMSKeyId && { SSEKMSKeyId: import_smithy_client.SENSITIVE_STRING } +}), "UploadPartCopyOutputFilterSensitiveLog"); +var UploadPartCopyRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSECustomerKey && { SSECustomerKey: import_smithy_client.SENSITIVE_STRING }, + ...obj.CopySourceSSECustomerKey && { CopySourceSSECustomerKey: import_smithy_client.SENSITIVE_STRING } +}), "UploadPartCopyRequestFilterSensitiveLog"); +var WriteGetObjectResponseRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SSEKMSKeyId && { SSEKMSKeyId: import_smithy_client.SENSITIVE_STRING } +}), "WriteGetObjectResponseRequestFilterSensitiveLog"); + +// src/protocols/Aws_restXml.ts +var se_AbortMultipartUploadCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + [_xaimit]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_IMIT]), () => (0, import_smithy_client.dateToUtcString)(input[_IMIT]).toString()] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_xi]: [, "AbortMultipartUpload"], + [_uI]: [, (0, import_smithy_client.expectNonNull)(input[_UI], `UploadId`)] + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}, "se_AbortMultipartUploadCommand"); +var se_CompleteMultipartUploadCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_xacc]: input[_CCRC], + [_xacc_]: input[_CCRCC], + [_xacc__]: input[_CCRCNVME], + [_xacs]: input[_CSHA], + [_xacs_]: input[_CSHAh], + [_xact]: input[_CT], + [_xamos]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_MOS]), () => input[_MOS].toString()], + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + [_im]: input[_IM], + [_inm]: input[_INM], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_uI]: [, (0, import_smithy_client.expectNonNull)(input[_UI], `UploadId`)] + }); + let body; + let contents; + if (input.MultipartUpload !== void 0) { + contents = se_CompletedMultipartUpload(input.MultipartUpload, context); + contents = contents.n("CompleteMultipartUpload"); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("POST").h(headers).q(query).b(body); + return b.build(); +}, "se_CompleteMultipartUploadCommand"); +var se_CopyObjectCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + ...input.Metadata !== void 0 && Object.keys(input.Metadata).reduce((acc, suffix) => { + acc[`x-amz-meta-${suffix.toLowerCase()}`] = input.Metadata[suffix]; + return acc; + }, {}), + [_xaa]: input[_ACL], + [_cc]: input[_CC], + [_xaca]: input[_CA], + [_cd]: input[_CD], + [_ce]: input[_CE], + [_cl]: input[_CL], + [_ct]: input[_CTo], + [_xacs__]: input[_CS], + [_xacsim]: input[_CSIM], + [_xacsims]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_CSIMS]), () => (0, import_smithy_client.dateToUtcString)(input[_CSIMS]).toString()], + [_xacsinm]: input[_CSINM], + [_xacsius]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_CSIUS]), () => (0, import_smithy_client.dateToUtcString)(input[_CSIUS]).toString()], + [_e]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_E]), () => (0, import_smithy_client.dateToUtcString)(input[_E]).toString()], + [_xagfc]: input[_GFC], + [_xagr]: input[_GR], + [_xagra]: input[_GRACP], + [_xagwa]: input[_GWACP], + [_xamd]: input[_MD], + [_xatd]: input[_TD], + [_xasse]: input[_SSE], + [_xasc]: input[_SC], + [_xawrl]: input[_WRL], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + [_xasseakki]: input[_SSEKMSKI], + [_xassec]: input[_SSEKMSEC], + [_xassebke]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_BKE]), () => input[_BKE].toString()], + [_xacssseca]: input[_CSSSECA], + [_xacssseck]: input[_CSSSECK], + [_xacssseckm]: input[_CSSSECKMD], + [_xarp]: input[_RP], + [_xat]: input[_T], + [_xaolm]: input[_OLM], + [_xaolrud]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_OLRUD]), () => (0, import_smithy_client.serializeDateTime)(input[_OLRUD]).toString()], + [_xaollh]: input[_OLLHS], + [_xaebo]: input[_EBO], + [_xasebo]: input[_ESBO] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_xi]: [, "CopyObject"] + }); + let body; + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_CopyObjectCommand"); +var se_CreateBucketCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_xaa]: input[_ACL], + [_xagfc]: input[_GFC], + [_xagr]: input[_GR], + [_xagra]: input[_GRACP], + [_xagw]: input[_GW], + [_xagwa]: input[_GWACP], + [_xabole]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_OLEFB]), () => input[_OLEFB].toString()], + [_xaoo]: input[_OO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + let body; + let contents; + if (input.CreateBucketConfiguration !== void 0) { + contents = se_CreateBucketConfiguration(input.CreateBucketConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).b(body); + return b.build(); +}, "se_CreateBucketCommand"); +var se_CreateBucketMetadataTableConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_mT]: [, ""] + }); + let body; + let contents; + if (input.MetadataTableConfiguration !== void 0) { + contents = se_MetadataTableConfiguration(input.MetadataTableConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("POST").h(headers).q(query).b(body); + return b.build(); +}, "se_CreateBucketMetadataTableConfigurationCommand"); +var se_CreateMultipartUploadCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + ...input.Metadata !== void 0 && Object.keys(input.Metadata).reduce((acc, suffix) => { + acc[`x-amz-meta-${suffix.toLowerCase()}`] = input.Metadata[suffix]; + return acc; + }, {}), + [_xaa]: input[_ACL], + [_cc]: input[_CC], + [_cd]: input[_CD], + [_ce]: input[_CE], + [_cl]: input[_CL], + [_ct]: input[_CTo], + [_e]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_E]), () => (0, import_smithy_client.dateToUtcString)(input[_E]).toString()], + [_xagfc]: input[_GFC], + [_xagr]: input[_GR], + [_xagra]: input[_GRACP], + [_xagwa]: input[_GWACP], + [_xasse]: input[_SSE], + [_xasc]: input[_SC], + [_xawrl]: input[_WRL], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + [_xasseakki]: input[_SSEKMSKI], + [_xassec]: input[_SSEKMSEC], + [_xassebke]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_BKE]), () => input[_BKE].toString()], + [_xarp]: input[_RP], + [_xat]: input[_T], + [_xaolm]: input[_OLM], + [_xaolrud]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_OLRUD]), () => (0, import_smithy_client.serializeDateTime)(input[_OLRUD]).toString()], + [_xaollh]: input[_OLLHS], + [_xaebo]: input[_EBO], + [_xaca]: input[_CA], + [_xact]: input[_CT] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_u]: [, ""] + }); + let body; + b.m("POST").h(headers).q(query).b(body); + return b.build(); +}, "se_CreateMultipartUploadCommand"); +var se_CreateSessionCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xacsm]: input[_SM], + [_xasse]: input[_SSE], + [_xasseakki]: input[_SSEKMSKI], + [_xassec]: input[_SSEKMSEC], + [_xassebke]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_BKE]), () => input[_BKE].toString()] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_s]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_CreateSessionCommand"); +var se_DeleteBucketCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + let body; + b.m("DELETE").h(headers).b(body); + return b.build(); +}, "se_DeleteBucketCommand"); +var se_DeleteBucketAnalyticsConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_a]: [, ""], + [_i]: [, (0, import_smithy_client.expectNonNull)(input[_I], `Id`)] + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}, "se_DeleteBucketAnalyticsConfigurationCommand"); +var se_DeleteBucketCorsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_c]: [, ""] + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}, "se_DeleteBucketCorsCommand"); +var se_DeleteBucketEncryptionCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_en]: [, ""] + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}, "se_DeleteBucketEncryptionCommand"); +var se_DeleteBucketIntelligentTieringConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = {}; + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_it]: [, ""], + [_i]: [, (0, import_smithy_client.expectNonNull)(input[_I], `Id`)] + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}, "se_DeleteBucketIntelligentTieringConfigurationCommand"); +var se_DeleteBucketInventoryConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_in]: [, ""], + [_i]: [, (0, import_smithy_client.expectNonNull)(input[_I], `Id`)] + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}, "se_DeleteBucketInventoryConfigurationCommand"); +var se_DeleteBucketLifecycleCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_l]: [, ""] + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}, "se_DeleteBucketLifecycleCommand"); +var se_DeleteBucketMetadataTableConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_mT]: [, ""] + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}, "se_DeleteBucketMetadataTableConfigurationCommand"); +var se_DeleteBucketMetricsConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_m]: [, ""], + [_i]: [, (0, import_smithy_client.expectNonNull)(input[_I], `Id`)] + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}, "se_DeleteBucketMetricsConfigurationCommand"); +var se_DeleteBucketOwnershipControlsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_oC]: [, ""] + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}, "se_DeleteBucketOwnershipControlsCommand"); +var se_DeleteBucketPolicyCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_p]: [, ""] + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}, "se_DeleteBucketPolicyCommand"); +var se_DeleteBucketReplicationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_r]: [, ""] + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}, "se_DeleteBucketReplicationCommand"); +var se_DeleteBucketTaggingCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_t]: [, ""] + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}, "se_DeleteBucketTaggingCommand"); +var se_DeleteBucketWebsiteCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_w]: [, ""] + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}, "se_DeleteBucketWebsiteCommand"); +var se_DeleteObjectCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xam]: input[_MFA], + [_xarp]: input[_RP], + [_xabgr]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_BGR]), () => input[_BGR].toString()], + [_xaebo]: input[_EBO], + [_im]: input[_IM], + [_xaimlmt]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_IMLMT]), () => (0, import_smithy_client.dateToUtcString)(input[_IMLMT]).toString()], + [_xaims]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_IMS]), () => input[_IMS].toString()] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_xi]: [, "DeleteObject"], + [_vI]: [, input[_VI]] + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}, "se_DeleteObjectCommand"); +var se_DeleteObjectsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_xam]: input[_MFA], + [_xarp]: input[_RP], + [_xabgr]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_BGR]), () => input[_BGR].toString()], + [_xaebo]: input[_EBO], + [_xasca]: input[_CA] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_d]: [, ""] + }); + let body; + let contents; + if (input.Delete !== void 0) { + contents = se_Delete(input.Delete, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("POST").h(headers).q(query).b(body); + return b.build(); +}, "se_DeleteObjectsCommand"); +var se_DeleteObjectTaggingCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_t]: [, ""], + [_vI]: [, input[_VI]] + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}, "se_DeleteObjectTaggingCommand"); +var se_DeletePublicAccessBlockCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_pAB]: [, ""] + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}, "se_DeletePublicAccessBlockCommand"); +var se_GetBucketAccelerateConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + [_xarp]: input[_RP] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_ac]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketAccelerateConfigurationCommand"); +var se_GetBucketAclCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_acl]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketAclCommand"); +var se_GetBucketAnalyticsConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_a]: [, ""], + [_xi]: [, "GetBucketAnalyticsConfiguration"], + [_i]: [, (0, import_smithy_client.expectNonNull)(input[_I], `Id`)] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketAnalyticsConfigurationCommand"); +var se_GetBucketCorsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_c]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketCorsCommand"); +var se_GetBucketEncryptionCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_en]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketEncryptionCommand"); +var se_GetBucketIntelligentTieringConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = {}; + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_it]: [, ""], + [_xi]: [, "GetBucketIntelligentTieringConfiguration"], + [_i]: [, (0, import_smithy_client.expectNonNull)(input[_I], `Id`)] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketIntelligentTieringConfigurationCommand"); +var se_GetBucketInventoryConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_in]: [, ""], + [_xi]: [, "GetBucketInventoryConfiguration"], + [_i]: [, (0, import_smithy_client.expectNonNull)(input[_I], `Id`)] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketInventoryConfigurationCommand"); +var se_GetBucketLifecycleConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_l]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketLifecycleConfigurationCommand"); +var se_GetBucketLocationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_lo]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketLocationCommand"); +var se_GetBucketLoggingCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_log]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketLoggingCommand"); +var se_GetBucketMetadataTableConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_mT]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketMetadataTableConfigurationCommand"); +var se_GetBucketMetricsConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_m]: [, ""], + [_xi]: [, "GetBucketMetricsConfiguration"], + [_i]: [, (0, import_smithy_client.expectNonNull)(input[_I], `Id`)] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketMetricsConfigurationCommand"); +var se_GetBucketNotificationConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_n]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketNotificationConfigurationCommand"); +var se_GetBucketOwnershipControlsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_oC]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketOwnershipControlsCommand"); +var se_GetBucketPolicyCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_p]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketPolicyCommand"); +var se_GetBucketPolicyStatusCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_pS]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketPolicyStatusCommand"); +var se_GetBucketReplicationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_r]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketReplicationCommand"); +var se_GetBucketRequestPaymentCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_rP]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketRequestPaymentCommand"); +var se_GetBucketTaggingCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_t]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketTaggingCommand"); +var se_GetBucketVersioningCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_v]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketVersioningCommand"); +var se_GetBucketWebsiteCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_w]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetBucketWebsiteCommand"); +var se_GetObjectCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_im]: input[_IM], + [_ims]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_IMSf]), () => (0, import_smithy_client.dateToUtcString)(input[_IMSf]).toString()], + [_inm]: input[_INM], + [_ius]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_IUS]), () => (0, import_smithy_client.dateToUtcString)(input[_IUS]).toString()], + [_ra]: input[_R], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + [_xacm]: input[_CM] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_xi]: [, "GetObject"], + [_rcc]: [, input[_RCC]], + [_rcd]: [, input[_RCD]], + [_rce]: [, input[_RCE]], + [_rcl]: [, input[_RCL]], + [_rct]: [, input[_RCT]], + [_re]: [() => input.ResponseExpires !== void 0, () => (0, import_smithy_client.dateToUtcString)(input[_RE]).toString()], + [_vI]: [, input[_VI]], + [_pN]: [() => input.PartNumber !== void 0, () => input[_PN].toString()] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetObjectCommand"); +var se_GetObjectAclCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xarp]: input[_RP], + [_xaebo]: input[_EBO] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_acl]: [, ""], + [_vI]: [, input[_VI]] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetObjectAclCommand"); +var se_GetObjectAttributesCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xamp]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_MP]), () => input[_MP].toString()], + [_xapnm]: input[_PNM], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + [_xaoa]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_OA]), () => (input[_OA] || []).map(import_smithy_client.quoteHeader).join(", ")] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_at]: [, ""], + [_vI]: [, input[_VI]] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetObjectAttributesCommand"); +var se_GetObjectLegalHoldCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xarp]: input[_RP], + [_xaebo]: input[_EBO] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_lh]: [, ""], + [_vI]: [, input[_VI]] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetObjectLegalHoldCommand"); +var se_GetObjectLockConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_ol]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetObjectLockConfigurationCommand"); +var se_GetObjectRetentionCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xarp]: input[_RP], + [_xaebo]: input[_EBO] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_ret]: [, ""], + [_vI]: [, input[_VI]] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetObjectRetentionCommand"); +var se_GetObjectTaggingCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + [_xarp]: input[_RP] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_t]: [, ""], + [_vI]: [, input[_VI]] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetObjectTaggingCommand"); +var se_GetObjectTorrentCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xarp]: input[_RP], + [_xaebo]: input[_EBO] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_to]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetObjectTorrentCommand"); +var se_GetPublicAccessBlockCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_pAB]: [, ""] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetPublicAccessBlockCommand"); +var se_HeadBucketCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + let body; + b.m("HEAD").h(headers).b(body); + return b.build(); +}, "se_HeadBucketCommand"); +var se_HeadObjectCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_im]: input[_IM], + [_ims]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_IMSf]), () => (0, import_smithy_client.dateToUtcString)(input[_IMSf]).toString()], + [_inm]: input[_INM], + [_ius]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_IUS]), () => (0, import_smithy_client.dateToUtcString)(input[_IUS]).toString()], + [_ra]: input[_R], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + [_xacm]: input[_CM] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_rcc]: [, input[_RCC]], + [_rcd]: [, input[_RCD]], + [_rce]: [, input[_RCE]], + [_rcl]: [, input[_RCL]], + [_rct]: [, input[_RCT]], + [_re]: [() => input.ResponseExpires !== void 0, () => (0, import_smithy_client.dateToUtcString)(input[_RE]).toString()], + [_vI]: [, input[_VI]], + [_pN]: [() => input.PartNumber !== void 0, () => input[_PN].toString()] + }); + let body; + b.m("HEAD").h(headers).q(query).b(body); + return b.build(); +}, "se_HeadObjectCommand"); +var se_ListBucketAnalyticsConfigurationsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_a]: [, ""], + [_xi]: [, "ListBucketAnalyticsConfigurations"], + [_ct_]: [, input[_CTon]] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListBucketAnalyticsConfigurationsCommand"); +var se_ListBucketIntelligentTieringConfigurationsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = {}; + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_it]: [, ""], + [_xi]: [, "ListBucketIntelligentTieringConfigurations"], + [_ct_]: [, input[_CTon]] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListBucketIntelligentTieringConfigurationsCommand"); +var se_ListBucketInventoryConfigurationsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_in]: [, ""], + [_xi]: [, "ListBucketInventoryConfigurations"], + [_ct_]: [, input[_CTon]] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListBucketInventoryConfigurationsCommand"); +var se_ListBucketMetricsConfigurationsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_m]: [, ""], + [_xi]: [, "ListBucketMetricsConfigurations"], + [_ct_]: [, input[_CTon]] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListBucketMetricsConfigurationsCommand"); +var se_ListBucketsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = {}; + b.bp("/"); + const query = (0, import_smithy_client.map)({ + [_xi]: [, "ListBuckets"], + [_mb]: [() => input.MaxBuckets !== void 0, () => input[_MB].toString()], + [_ct_]: [, input[_CTon]], + [_pr]: [, input[_P]], + [_br]: [, input[_BR]] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListBucketsCommand"); +var se_ListDirectoryBucketsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = {}; + b.bp("/"); + const query = (0, import_smithy_client.map)({ + [_xi]: [, "ListDirectoryBuckets"], + [_ct_]: [, input[_CTon]], + [_mdb]: [() => input.MaxDirectoryBuckets !== void 0, () => input[_MDB].toString()] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListDirectoryBucketsCommand"); +var se_ListMultipartUploadsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + [_xarp]: input[_RP] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_u]: [, ""], + [_de]: [, input[_D]], + [_et]: [, input[_ET]], + [_km]: [, input[_KM]], + [_mu]: [() => input.MaxUploads !== void 0, () => input[_MU].toString()], + [_pr]: [, input[_P]], + [_uim]: [, input[_UIM]] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListMultipartUploadsCommand"); +var se_ListObjectsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + [_xaooa]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_OOA]), () => (input[_OOA] || []).map(import_smithy_client.quoteHeader).join(", ")] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_de]: [, input[_D]], + [_et]: [, input[_ET]], + [_ma]: [, input[_M]], + [_mk]: [() => input.MaxKeys !== void 0, () => input[_MK].toString()], + [_pr]: [, input[_P]] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListObjectsCommand"); +var se_ListObjectsV2Command = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + [_xaooa]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_OOA]), () => (input[_OOA] || []).map(import_smithy_client.quoteHeader).join(", ")] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_lt]: [, "2"], + [_de]: [, input[_D]], + [_et]: [, input[_ET]], + [_mk]: [() => input.MaxKeys !== void 0, () => input[_MK].toString()], + [_pr]: [, input[_P]], + [_ct_]: [, input[_CTon]], + [_fo]: [() => input.FetchOwner !== void 0, () => input[_FO].toString()], + [_sa]: [, input[_SA]] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListObjectsV2Command"); +var se_ListObjectVersionsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + [_xarp]: input[_RP], + [_xaooa]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_OOA]), () => (input[_OOA] || []).map(import_smithy_client.quoteHeader).join(", ")] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_ver]: [, ""], + [_de]: [, input[_D]], + [_et]: [, input[_ET]], + [_km]: [, input[_KM]], + [_mk]: [() => input.MaxKeys !== void 0, () => input[_MK].toString()], + [_pr]: [, input[_P]], + [_vim]: [, input[_VIM]] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListObjectVersionsCommand"); +var se_ListPartsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_xi]: [, "ListParts"], + [_mp]: [() => input.MaxParts !== void 0, () => input[_MP].toString()], + [_pnm]: [, input[_PNM]], + [_uI]: [, (0, import_smithy_client.expectNonNull)(input[_UI], `UploadId`)] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListPartsCommand"); +var se_PutBucketAccelerateConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_xaebo]: input[_EBO], + [_xasca]: input[_CA] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_ac]: [, ""] + }); + let body; + let contents; + if (input.AccelerateConfiguration !== void 0) { + contents = se_AccelerateConfiguration(input.AccelerateConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutBucketAccelerateConfigurationCommand"); +var se_PutBucketAclCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_xaa]: input[_ACL], + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xagfc]: input[_GFC], + [_xagr]: input[_GR], + [_xagra]: input[_GRACP], + [_xagw]: input[_GW], + [_xagwa]: input[_GWACP], + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_acl]: [, ""] + }); + let body; + let contents; + if (input.AccessControlPolicy !== void 0) { + contents = se_AccessControlPolicy(input.AccessControlPolicy, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutBucketAclCommand"); +var se_PutBucketAnalyticsConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_a]: [, ""], + [_i]: [, (0, import_smithy_client.expectNonNull)(input[_I], `Id`)] + }); + let body; + let contents; + if (input.AnalyticsConfiguration !== void 0) { + contents = se_AnalyticsConfiguration(input.AnalyticsConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutBucketAnalyticsConfigurationCommand"); +var se_PutBucketCorsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_c]: [, ""] + }); + let body; + let contents; + if (input.CORSConfiguration !== void 0) { + contents = se_CORSConfiguration(input.CORSConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutBucketCorsCommand"); +var se_PutBucketEncryptionCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_en]: [, ""] + }); + let body; + let contents; + if (input.ServerSideEncryptionConfiguration !== void 0) { + contents = se_ServerSideEncryptionConfiguration(input.ServerSideEncryptionConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutBucketEncryptionCommand"); +var se_PutBucketIntelligentTieringConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = { + "content-type": "application/xml" + }; + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_it]: [, ""], + [_i]: [, (0, import_smithy_client.expectNonNull)(input[_I], `Id`)] + }); + let body; + let contents; + if (input.IntelligentTieringConfiguration !== void 0) { + contents = se_IntelligentTieringConfiguration(input.IntelligentTieringConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutBucketIntelligentTieringConfigurationCommand"); +var se_PutBucketInventoryConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_in]: [, ""], + [_i]: [, (0, import_smithy_client.expectNonNull)(input[_I], `Id`)] + }); + let body; + let contents; + if (input.InventoryConfiguration !== void 0) { + contents = se_InventoryConfiguration(input.InventoryConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutBucketInventoryConfigurationCommand"); +var se_PutBucketLifecycleConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_xasca]: input[_CA], + [_xaebo]: input[_EBO], + [_xatdmos]: input[_TDMOS] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_l]: [, ""] + }); + let body; + let contents; + if (input.LifecycleConfiguration !== void 0) { + contents = se_BucketLifecycleConfiguration(input.LifecycleConfiguration, context); + contents = contents.n("LifecycleConfiguration"); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutBucketLifecycleConfigurationCommand"); +var se_PutBucketLoggingCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_log]: [, ""] + }); + let body; + let contents; + if (input.BucketLoggingStatus !== void 0) { + contents = se_BucketLoggingStatus(input.BucketLoggingStatus, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutBucketLoggingCommand"); +var se_PutBucketMetricsConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_m]: [, ""], + [_i]: [, (0, import_smithy_client.expectNonNull)(input[_I], `Id`)] + }); + let body; + let contents; + if (input.MetricsConfiguration !== void 0) { + contents = se_MetricsConfiguration(input.MetricsConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutBucketMetricsConfigurationCommand"); +var se_PutBucketNotificationConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_xaebo]: input[_EBO], + [_xasdv]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_SDV]), () => input[_SDV].toString()] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_n]: [, ""] + }); + let body; + let contents; + if (input.NotificationConfiguration !== void 0) { + contents = se_NotificationConfiguration(input.NotificationConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutBucketNotificationConfigurationCommand"); +var se_PutBucketOwnershipControlsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_oC]: [, ""] + }); + let body; + let contents; + if (input.OwnershipControls !== void 0) { + contents = se_OwnershipControls(input.OwnershipControls, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutBucketOwnershipControlsCommand"); +var se_PutBucketPolicyCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "text/plain", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xacrsba]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_CRSBA]), () => input[_CRSBA].toString()], + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_p]: [, ""] + }); + let body; + let contents; + if (input.Policy !== void 0) { + contents = input.Policy; + body = contents; + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutBucketPolicyCommand"); +var se_PutBucketReplicationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xabolt]: input[_To], + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_r]: [, ""] + }); + let body; + let contents; + if (input.ReplicationConfiguration !== void 0) { + contents = se_ReplicationConfiguration(input.ReplicationConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutBucketReplicationCommand"); +var se_PutBucketRequestPaymentCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_rP]: [, ""] + }); + let body; + let contents; + if (input.RequestPaymentConfiguration !== void 0) { + contents = se_RequestPaymentConfiguration(input.RequestPaymentConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutBucketRequestPaymentCommand"); +var se_PutBucketTaggingCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_t]: [, ""] + }); + let body; + let contents; + if (input.Tagging !== void 0) { + contents = se_Tagging(input.Tagging, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutBucketTaggingCommand"); +var se_PutBucketVersioningCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xam]: input[_MFA], + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_v]: [, ""] + }); + let body; + let contents; + if (input.VersioningConfiguration !== void 0) { + contents = se_VersioningConfiguration(input.VersioningConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutBucketVersioningCommand"); +var se_PutBucketWebsiteCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_w]: [, ""] + }); + let body; + let contents; + if (input.WebsiteConfiguration !== void 0) { + contents = se_WebsiteConfiguration(input.WebsiteConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutBucketWebsiteCommand"); +var se_PutObjectCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + ...input.Metadata !== void 0 && Object.keys(input.Metadata).reduce((acc, suffix) => { + acc[`x-amz-meta-${suffix.toLowerCase()}`] = input.Metadata[suffix]; + return acc; + }, {}), + [_ct]: input[_CTo] || "application/octet-stream", + [_xaa]: input[_ACL], + [_cc]: input[_CC], + [_cd]: input[_CD], + [_ce]: input[_CE], + [_cl]: input[_CL], + [_cl_]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_CLo]), () => input[_CLo].toString()], + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xacc]: input[_CCRC], + [_xacc_]: input[_CCRCC], + [_xacc__]: input[_CCRCNVME], + [_xacs]: input[_CSHA], + [_xacs_]: input[_CSHAh], + [_e]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_E]), () => (0, import_smithy_client.dateToUtcString)(input[_E]).toString()], + [_im]: input[_IM], + [_inm]: input[_INM], + [_xagfc]: input[_GFC], + [_xagr]: input[_GR], + [_xagra]: input[_GRACP], + [_xagwa]: input[_GWACP], + [_xawob]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_WOB]), () => input[_WOB].toString()], + [_xasse]: input[_SSE], + [_xasc]: input[_SC], + [_xawrl]: input[_WRL], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + [_xasseakki]: input[_SSEKMSKI], + [_xassec]: input[_SSEKMSEC], + [_xassebke]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_BKE]), () => input[_BKE].toString()], + [_xarp]: input[_RP], + [_xat]: input[_T], + [_xaolm]: input[_OLM], + [_xaolrud]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_OLRUD]), () => (0, import_smithy_client.serializeDateTime)(input[_OLRUD]).toString()], + [_xaollh]: input[_OLLHS], + [_xaebo]: input[_EBO] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_xi]: [, "PutObject"] + }); + let body; + let contents; + if (input.Body !== void 0) { + contents = input.Body; + body = contents; + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutObjectCommand"); +var se_PutObjectAclCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_xaa]: input[_ACL], + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xagfc]: input[_GFC], + [_xagr]: input[_GR], + [_xagra]: input[_GRACP], + [_xagw]: input[_GW], + [_xagwa]: input[_GWACP], + [_xarp]: input[_RP], + [_xaebo]: input[_EBO] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_acl]: [, ""], + [_vI]: [, input[_VI]] + }); + let body; + let contents; + if (input.AccessControlPolicy !== void 0) { + contents = se_AccessControlPolicy(input.AccessControlPolicy, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutObjectAclCommand"); +var se_PutObjectLegalHoldCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_xarp]: input[_RP], + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_lh]: [, ""], + [_vI]: [, input[_VI]] + }); + let body; + let contents; + if (input.LegalHold !== void 0) { + contents = se_ObjectLockLegalHold(input.LegalHold, context); + contents = contents.n("LegalHold"); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutObjectLegalHoldCommand"); +var se_PutObjectLockConfigurationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_xarp]: input[_RP], + [_xabolt]: input[_To], + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_ol]: [, ""] + }); + let body; + let contents; + if (input.ObjectLockConfiguration !== void 0) { + contents = se_ObjectLockConfiguration(input.ObjectLockConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutObjectLockConfigurationCommand"); +var se_PutObjectRetentionCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_xarp]: input[_RP], + [_xabgr]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_BGR]), () => input[_BGR].toString()], + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_ret]: [, ""], + [_vI]: [, input[_VI]] + }); + let body; + let contents; + if (input.Retention !== void 0) { + contents = se_ObjectLockRetention(input.Retention, context); + contents = contents.n("Retention"); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutObjectRetentionCommand"); +var se_PutObjectTaggingCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO], + [_xarp]: input[_RP] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_t]: [, ""], + [_vI]: [, input[_VI]] + }); + let body; + let contents; + if (input.Tagging !== void 0) { + contents = se_Tagging(input.Tagging, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutObjectTaggingCommand"); +var se_PutPublicAccessBlockCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO] + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = (0, import_smithy_client.map)({ + [_pAB]: [, ""] + }); + let body; + let contents; + if (input.PublicAccessBlockConfiguration !== void 0) { + contents = se_PublicAccessBlockConfiguration(input.PublicAccessBlockConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_PutPublicAccessBlockCommand"); +var se_RestoreObjectCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_xarp]: input[_RP], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_res]: [, ""], + [_vI]: [, input[_VI]] + }); + let body; + let contents; + if (input.RestoreRequest !== void 0) { + contents = se_RestoreRequest(input.RestoreRequest, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("POST").h(headers).q(query).b(body); + return b.build(); +}, "se_RestoreObjectCommand"); +var se_SelectObjectContentCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/xml", + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + [_xaebo]: input[_EBO] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_se]: [, ""], + [_st]: [, "2"] + }); + let body; + body = _ve; + const bn = new import_xml_builder.XmlNode(_SOCR); + bn.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + bn.cc(input, _Ex); + bn.cc(input, _ETx); + if (input[_IS] != null) { + bn.c(se_InputSerialization(input[_IS], context).n(_IS)); + } + if (input[_OS] != null) { + bn.c(se_OutputSerialization(input[_OS], context).n(_OS)); + } + if (input[_RPe] != null) { + bn.c(se_RequestProgress(input[_RPe], context).n(_RPe)); + } + if (input[_SR] != null) { + bn.c(se_ScanRange(input[_SR], context).n(_SR)); + } + body += bn.toString(); + b.m("POST").h(headers).q(query).b(body); + return b.build(); +}, "se_SelectObjectContentCommand"); +var se_UploadPartCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "content-type": "application/octet-stream", + [_cl_]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_CLo]), () => input[_CLo].toString()], + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xacc]: input[_CCRC], + [_xacc_]: input[_CCRCC], + [_xacc__]: input[_CCRCNVME], + [_xacs]: input[_CSHA], + [_xacs_]: input[_CSHAh], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + [_xarp]: input[_RP], + [_xaebo]: input[_EBO] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_xi]: [, "UploadPart"], + [_pN]: [(0, import_smithy_client.expectNonNull)(input.PartNumber, `PartNumber`) != null, () => input[_PN].toString()], + [_uI]: [, (0, import_smithy_client.expectNonNull)(input[_UI], `UploadId`)] + }); + let body; + let contents; + if (input.Body !== void 0) { + contents = input.Body; + body = contents; + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_UploadPartCommand"); +var se_UploadPartCopyCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xacs__]: input[_CS], + [_xacsim]: input[_CSIM], + [_xacsims]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_CSIMS]), () => (0, import_smithy_client.dateToUtcString)(input[_CSIMS]).toString()], + [_xacsinm]: input[_CSINM], + [_xacsius]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_CSIUS]), () => (0, import_smithy_client.dateToUtcString)(input[_CSIUS]).toString()], + [_xacsr]: input[_CSR], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + [_xacssseca]: input[_CSSSECA], + [_xacssseck]: input[_CSSSECK], + [_xacssseckm]: input[_CSSSECKMD], + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + [_xasebo]: input[_ESBO] + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = (0, import_smithy_client.map)({ + [_xi]: [, "UploadPartCopy"], + [_pN]: [(0, import_smithy_client.expectNonNull)(input.PartNumber, `PartNumber`) != null, () => input[_PN].toString()], + [_uI]: [, (0, import_smithy_client.expectNonNull)(input[_UI], `UploadId`)] + }); + let body; + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}, "se_UploadPartCopyCommand"); +var se_WriteGetObjectResponseCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core2.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + "x-amz-content-sha256": "UNSIGNED-PAYLOAD", + ...input.Metadata !== void 0 && Object.keys(input.Metadata).reduce((acc, suffix) => { + acc[`x-amz-meta-${suffix.toLowerCase()}`] = input.Metadata[suffix]; + return acc; + }, {}), + "content-type": "application/octet-stream", + [_xarr]: input[_RR], + [_xart]: input[_RT], + [_xafs]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_SCt]), () => input[_SCt].toString()], + [_xafec]: input[_EC], + [_xafem]: input[_EM], + [_xafhar]: input[_AR], + [_xafhcc]: input[_CC], + [_xafhcd]: input[_CD], + [_xafhce]: input[_CE], + [_xafhcl]: input[_CL], + [_cl_]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_CLo]), () => input[_CLo].toString()], + [_xafhcr]: input[_CR], + [_xafhct]: input[_CTo], + [_xafhxacc]: input[_CCRC], + [_xafhxacc_]: input[_CCRCC], + [_xafhxacc__]: input[_CCRCNVME], + [_xafhxacs]: input[_CSHA], + [_xafhxacs_]: input[_CSHAh], + [_xafhxadm]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_DM]), () => input[_DM].toString()], + [_xafhe]: input[_ETa], + [_xafhe_]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_E]), () => (0, import_smithy_client.dateToUtcString)(input[_E]).toString()], + [_xafhxae]: input[_Exp], + [_xafhlm]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_LM]), () => (0, import_smithy_client.dateToUtcString)(input[_LM]).toString()], + [_xafhxamm]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_MM]), () => input[_MM].toString()], + [_xafhxaolm]: input[_OLM], + [_xafhxaollh]: input[_OLLHS], + [_xafhxaolrud]: [ + () => (0, import_smithy_client.isSerializableHeaderValue)(input[_OLRUD]), + () => (0, import_smithy_client.serializeDateTime)(input[_OLRUD]).toString() + ], + [_xafhxampc]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_PC]), () => input[_PC].toString()], + [_xafhxars]: input[_RS], + [_xafhxarc]: input[_RC], + [_xafhxar]: input[_Re], + [_xafhxasse]: input[_SSE], + [_xafhxasseca]: input[_SSECA], + [_xafhxasseakki]: input[_SSEKMSKI], + [_xafhxasseckm]: input[_SSECKMD], + [_xafhxasc]: input[_SC], + [_xafhxatc]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_TC]), () => input[_TC].toString()], + [_xafhxavi]: input[_VI], + [_xafhxassebke]: [() => (0, import_smithy_client.isSerializableHeaderValue)(input[_BKE]), () => input[_BKE].toString()] + }); + b.bp("/WriteGetObjectResponse"); + let body; + let contents; + if (input.Body !== void 0) { + contents = input.Body; + body = contents; + } + let { hostname: resolvedHostname } = await context.endpoint(); + if (context.disableHostPrefix !== true) { + resolvedHostname = "{RequestRoute}." + resolvedHostname; + if (input.RequestRoute === void 0) { + throw new Error("Empty value provided for input host prefix: RequestRoute."); + } + resolvedHostname = resolvedHostname.replace("{RequestRoute}", input.RequestRoute); + if (!(0, import_protocol_http.isValidHostname)(resolvedHostname)) { + throw new Error("ValidationError: prefixed hostname must be hostname compatible."); + } + } + b.hn(resolvedHostname); + b.m("POST").h(headers).b(body); + return b.build(); +}, "se_WriteGetObjectResponseCommand"); +var de_AbortMultipartUploadCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]] + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_AbortMultipartUploadCommand"); +var de_CompleteMultipartUploadCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_Exp]: [, output.headers[_xae]], + [_SSE]: [, output.headers[_xasse]], + [_VI]: [, output.headers[_xavi]], + [_SSEKMSKI]: [, output.headers[_xasseakki]], + [_BKE]: [() => void 0 !== output.headers[_xassebke], () => (0, import_smithy_client.parseBoolean)(output.headers[_xassebke])], + [_RC]: [, output.headers[_xarc]] + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data[_B] != null) { + contents[_B] = (0, import_smithy_client.expectString)(data[_B]); + } + if (data[_CCRC] != null) { + contents[_CCRC] = (0, import_smithy_client.expectString)(data[_CCRC]); + } + if (data[_CCRCC] != null) { + contents[_CCRCC] = (0, import_smithy_client.expectString)(data[_CCRCC]); + } + if (data[_CCRCNVME] != null) { + contents[_CCRCNVME] = (0, import_smithy_client.expectString)(data[_CCRCNVME]); + } + if (data[_CSHA] != null) { + contents[_CSHA] = (0, import_smithy_client.expectString)(data[_CSHA]); + } + if (data[_CSHAh] != null) { + contents[_CSHAh] = (0, import_smithy_client.expectString)(data[_CSHAh]); + } + if (data[_CT] != null) { + contents[_CT] = (0, import_smithy_client.expectString)(data[_CT]); + } + if (data[_ETa] != null) { + contents[_ETa] = (0, import_smithy_client.expectString)(data[_ETa]); + } + if (data[_K] != null) { + contents[_K] = (0, import_smithy_client.expectString)(data[_K]); + } + if (data[_L] != null) { + contents[_L] = (0, import_smithy_client.expectString)(data[_L]); + } + return contents; +}, "de_CompleteMultipartUploadCommand"); +var de_CopyObjectCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_Exp]: [, output.headers[_xae]], + [_CSVI]: [, output.headers[_xacsvi]], + [_VI]: [, output.headers[_xavi]], + [_SSE]: [, output.headers[_xasse]], + [_SSECA]: [, output.headers[_xasseca]], + [_SSECKMD]: [, output.headers[_xasseckm]], + [_SSEKMSKI]: [, output.headers[_xasseakki]], + [_SSEKMSEC]: [, output.headers[_xassec]], + [_BKE]: [() => void 0 !== output.headers[_xassebke], () => (0, import_smithy_client.parseBoolean)(output.headers[_xassebke])], + [_RC]: [, output.headers[_xarc]] + }); + const data = (0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)); + contents.CopyObjectResult = de_CopyObjectResult(data, context); + return contents; +}, "de_CopyObjectCommand"); +var de_CreateBucketCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_L]: [, output.headers[_lo]] + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_CreateBucketCommand"); +var de_CreateBucketMetadataTableConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_CreateBucketMetadataTableConfigurationCommand"); +var de_CreateMultipartUploadCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_AD]: [ + () => void 0 !== output.headers[_xaad], + () => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc7231DateTime)(output.headers[_xaad])) + ], + [_ARI]: [, output.headers[_xaari]], + [_SSE]: [, output.headers[_xasse]], + [_SSECA]: [, output.headers[_xasseca]], + [_SSECKMD]: [, output.headers[_xasseckm]], + [_SSEKMSKI]: [, output.headers[_xasseakki]], + [_SSEKMSEC]: [, output.headers[_xassec]], + [_BKE]: [() => void 0 !== output.headers[_xassebke], () => (0, import_smithy_client.parseBoolean)(output.headers[_xassebke])], + [_RC]: [, output.headers[_xarc]], + [_CA]: [, output.headers[_xaca]], + [_CT]: [, output.headers[_xact]] + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data[_B] != null) { + contents[_B] = (0, import_smithy_client.expectString)(data[_B]); + } + if (data[_K] != null) { + contents[_K] = (0, import_smithy_client.expectString)(data[_K]); + } + if (data[_UI] != null) { + contents[_UI] = (0, import_smithy_client.expectString)(data[_UI]); + } + return contents; +}, "de_CreateMultipartUploadCommand"); +var de_CreateSessionCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_SSE]: [, output.headers[_xasse]], + [_SSEKMSKI]: [, output.headers[_xasseakki]], + [_SSEKMSEC]: [, output.headers[_xassec]], + [_BKE]: [() => void 0 !== output.headers[_xassebke], () => (0, import_smithy_client.parseBoolean)(output.headers[_xassebke])] + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data[_C] != null) { + contents[_C] = de_SessionCredentials(data[_C], context); + } + return contents; +}, "de_CreateSessionCommand"); +var de_DeleteBucketCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_DeleteBucketCommand"); +var de_DeleteBucketAnalyticsConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_DeleteBucketAnalyticsConfigurationCommand"); +var de_DeleteBucketCorsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_DeleteBucketCorsCommand"); +var de_DeleteBucketEncryptionCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_DeleteBucketEncryptionCommand"); +var de_DeleteBucketIntelligentTieringConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_DeleteBucketIntelligentTieringConfigurationCommand"); +var de_DeleteBucketInventoryConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_DeleteBucketInventoryConfigurationCommand"); +var de_DeleteBucketLifecycleCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_DeleteBucketLifecycleCommand"); +var de_DeleteBucketMetadataTableConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_DeleteBucketMetadataTableConfigurationCommand"); +var de_DeleteBucketMetricsConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_DeleteBucketMetricsConfigurationCommand"); +var de_DeleteBucketOwnershipControlsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_DeleteBucketOwnershipControlsCommand"); +var de_DeleteBucketPolicyCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_DeleteBucketPolicyCommand"); +var de_DeleteBucketReplicationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_DeleteBucketReplicationCommand"); +var de_DeleteBucketTaggingCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_DeleteBucketTaggingCommand"); +var de_DeleteBucketWebsiteCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_DeleteBucketWebsiteCommand"); +var de_DeleteObjectCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_DM]: [() => void 0 !== output.headers[_xadm], () => (0, import_smithy_client.parseBoolean)(output.headers[_xadm])], + [_VI]: [, output.headers[_xavi]], + [_RC]: [, output.headers[_xarc]] + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_DeleteObjectCommand"); +var de_DeleteObjectsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]] + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data.Deleted === "") { + contents[_De] = []; + } else if (data[_De] != null) { + contents[_De] = de_DeletedObjects((0, import_smithy_client.getArrayIfSingleItem)(data[_De]), context); + } + if (data.Error === "") { + contents[_Err] = []; + } else if (data[_Er] != null) { + contents[_Err] = de_Errors((0, import_smithy_client.getArrayIfSingleItem)(data[_Er]), context); + } + return contents; +}, "de_DeleteObjectsCommand"); +var de_DeleteObjectTaggingCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_VI]: [, output.headers[_xavi]] + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_DeleteObjectTaggingCommand"); +var de_DeletePublicAccessBlockCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_DeletePublicAccessBlockCommand"); +var de_GetBucketAccelerateConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]] + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data[_S] != null) { + contents[_S] = (0, import_smithy_client.expectString)(data[_S]); + } + return contents; +}, "de_GetBucketAccelerateConfigurationCommand"); +var de_GetBucketAclCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data.AccessControlList === "") { + contents[_Gr] = []; + } else if (data[_ACLc] != null && data[_ACLc][_G] != null) { + contents[_Gr] = de_Grants((0, import_smithy_client.getArrayIfSingleItem)(data[_ACLc][_G]), context); + } + if (data[_O] != null) { + contents[_O] = de_Owner(data[_O], context); + } + return contents; +}, "de_GetBucketAclCommand"); +var de_GetBucketAnalyticsConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)); + contents.AnalyticsConfiguration = de_AnalyticsConfiguration(data, context); + return contents; +}, "de_GetBucketAnalyticsConfigurationCommand"); +var de_GetBucketCorsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data.CORSRule === "") { + contents[_CORSRu] = []; + } else if (data[_CORSR] != null) { + contents[_CORSRu] = de_CORSRules((0, import_smithy_client.getArrayIfSingleItem)(data[_CORSR]), context); + } + return contents; +}, "de_GetBucketCorsCommand"); +var de_GetBucketEncryptionCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)); + contents.ServerSideEncryptionConfiguration = de_ServerSideEncryptionConfiguration(data, context); + return contents; +}, "de_GetBucketEncryptionCommand"); +var de_GetBucketIntelligentTieringConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)); + contents.IntelligentTieringConfiguration = de_IntelligentTieringConfiguration(data, context); + return contents; +}, "de_GetBucketIntelligentTieringConfigurationCommand"); +var de_GetBucketInventoryConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)); + contents.InventoryConfiguration = de_InventoryConfiguration(data, context); + return contents; +}, "de_GetBucketInventoryConfigurationCommand"); +var de_GetBucketLifecycleConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_TDMOS]: [, output.headers[_xatdmos]] + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data.Rule === "") { + contents[_Rul] = []; + } else if (data[_Ru] != null) { + contents[_Rul] = de_LifecycleRules((0, import_smithy_client.getArrayIfSingleItem)(data[_Ru]), context); + } + return contents; +}, "de_GetBucketLifecycleConfigurationCommand"); +var de_GetBucketLocationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data[_LC] != null) { + contents[_LC] = (0, import_smithy_client.expectString)(data[_LC]); + } + return contents; +}, "de_GetBucketLocationCommand"); +var de_GetBucketLoggingCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data[_LE] != null) { + contents[_LE] = de_LoggingEnabled(data[_LE], context); + } + return contents; +}, "de_GetBucketLoggingCommand"); +var de_GetBucketMetadataTableConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)); + contents.GetBucketMetadataTableConfigurationResult = de_GetBucketMetadataTableConfigurationResult(data, context); + return contents; +}, "de_GetBucketMetadataTableConfigurationCommand"); +var de_GetBucketMetricsConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)); + contents.MetricsConfiguration = de_MetricsConfiguration(data, context); + return contents; +}, "de_GetBucketMetricsConfigurationCommand"); +var de_GetBucketNotificationConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data[_EBC] != null) { + contents[_EBC] = de_EventBridgeConfiguration(data[_EBC], context); + } + if (data.CloudFunctionConfiguration === "") { + contents[_LFC] = []; + } else if (data[_CFC] != null) { + contents[_LFC] = de_LambdaFunctionConfigurationList((0, import_smithy_client.getArrayIfSingleItem)(data[_CFC]), context); + } + if (data.QueueConfiguration === "") { + contents[_QCu] = []; + } else if (data[_QC] != null) { + contents[_QCu] = de_QueueConfigurationList((0, import_smithy_client.getArrayIfSingleItem)(data[_QC]), context); + } + if (data.TopicConfiguration === "") { + contents[_TCop] = []; + } else if (data[_TCo] != null) { + contents[_TCop] = de_TopicConfigurationList((0, import_smithy_client.getArrayIfSingleItem)(data[_TCo]), context); + } + return contents; +}, "de_GetBucketNotificationConfigurationCommand"); +var de_GetBucketOwnershipControlsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)); + contents.OwnershipControls = de_OwnershipControls(data, context); + return contents; +}, "de_GetBucketOwnershipControlsCommand"); +var de_GetBucketPolicyCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = await collectBodyString(output.body, context); + contents.Policy = (0, import_smithy_client.expectString)(data); + return contents; +}, "de_GetBucketPolicyCommand"); +var de_GetBucketPolicyStatusCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)); + contents.PolicyStatus = de_PolicyStatus(data, context); + return contents; +}, "de_GetBucketPolicyStatusCommand"); +var de_GetBucketReplicationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)); + contents.ReplicationConfiguration = de_ReplicationConfiguration(data, context); + return contents; +}, "de_GetBucketReplicationCommand"); +var de_GetBucketRequestPaymentCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data[_Pa] != null) { + contents[_Pa] = (0, import_smithy_client.expectString)(data[_Pa]); + } + return contents; +}, "de_GetBucketRequestPaymentCommand"); +var de_GetBucketTaggingCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data.TagSet === "") { + contents[_TS] = []; + } else if (data[_TS] != null && data[_TS][_Ta] != null) { + contents[_TS] = de_TagSet((0, import_smithy_client.getArrayIfSingleItem)(data[_TS][_Ta]), context); + } + return contents; +}, "de_GetBucketTaggingCommand"); +var de_GetBucketVersioningCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data[_MDf] != null) { + contents[_MFAD] = (0, import_smithy_client.expectString)(data[_MDf]); + } + if (data[_S] != null) { + contents[_S] = (0, import_smithy_client.expectString)(data[_S]); + } + return contents; +}, "de_GetBucketVersioningCommand"); +var de_GetBucketWebsiteCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data[_ED] != null) { + contents[_ED] = de_ErrorDocument(data[_ED], context); + } + if (data[_ID] != null) { + contents[_ID] = de_IndexDocument(data[_ID], context); + } + if (data[_RART] != null) { + contents[_RART] = de_RedirectAllRequestsTo(data[_RART], context); + } + if (data.RoutingRules === "") { + contents[_RRo] = []; + } else if (data[_RRo] != null && data[_RRo][_RRou] != null) { + contents[_RRo] = de_RoutingRules((0, import_smithy_client.getArrayIfSingleItem)(data[_RRo][_RRou]), context); + } + return contents; +}, "de_GetBucketWebsiteCommand"); +var de_GetObjectCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_DM]: [() => void 0 !== output.headers[_xadm], () => (0, import_smithy_client.parseBoolean)(output.headers[_xadm])], + [_AR]: [, output.headers[_ar]], + [_Exp]: [, output.headers[_xae]], + [_Re]: [, output.headers[_xar]], + [_LM]: [() => void 0 !== output.headers[_lm], () => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc7231DateTime)(output.headers[_lm]))], + [_CLo]: [() => void 0 !== output.headers[_cl_], () => (0, import_smithy_client.strictParseLong)(output.headers[_cl_])], + [_ETa]: [, output.headers[_eta]], + [_CCRC]: [, output.headers[_xacc]], + [_CCRCC]: [, output.headers[_xacc_]], + [_CCRCNVME]: [, output.headers[_xacc__]], + [_CSHA]: [, output.headers[_xacs]], + [_CSHAh]: [, output.headers[_xacs_]], + [_CT]: [, output.headers[_xact]], + [_MM]: [() => void 0 !== output.headers[_xamm], () => (0, import_smithy_client.strictParseInt32)(output.headers[_xamm])], + [_VI]: [, output.headers[_xavi]], + [_CC]: [, output.headers[_cc]], + [_CD]: [, output.headers[_cd]], + [_CE]: [, output.headers[_ce]], + [_CL]: [, output.headers[_cl]], + [_CR]: [, output.headers[_cr]], + [_CTo]: [, output.headers[_ct]], + [_E]: [() => void 0 !== output.headers[_e], () => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc7231DateTime)(output.headers[_e]))], + [_ES]: [, output.headers[_ex]], + [_WRL]: [, output.headers[_xawrl]], + [_SSE]: [, output.headers[_xasse]], + [_SSECA]: [, output.headers[_xasseca]], + [_SSECKMD]: [, output.headers[_xasseckm]], + [_SSEKMSKI]: [, output.headers[_xasseakki]], + [_BKE]: [() => void 0 !== output.headers[_xassebke], () => (0, import_smithy_client.parseBoolean)(output.headers[_xassebke])], + [_SC]: [, output.headers[_xasc]], + [_RC]: [, output.headers[_xarc]], + [_RS]: [, output.headers[_xars]], + [_PC]: [() => void 0 !== output.headers[_xampc], () => (0, import_smithy_client.strictParseInt32)(output.headers[_xampc])], + [_TC]: [() => void 0 !== output.headers[_xatc], () => (0, import_smithy_client.strictParseInt32)(output.headers[_xatc])], + [_OLM]: [, output.headers[_xaolm]], + [_OLRUD]: [ + () => void 0 !== output.headers[_xaolrud], + () => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc3339DateTimeWithOffset)(output.headers[_xaolrud])) + ], + [_OLLHS]: [, output.headers[_xaollh]], + Metadata: [ + , + Object.keys(output.headers).filter((header) => header.startsWith("x-amz-meta-")).reduce((acc, header) => { + acc[header.substring(11)] = output.headers[header]; + return acc; + }, {}) + ] + }); + const data = output.body; + context.sdkStreamMixin(data); + contents.Body = data; + return contents; +}, "de_GetObjectCommand"); +var de_GetObjectAclCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]] + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data.AccessControlList === "") { + contents[_Gr] = []; + } else if (data[_ACLc] != null && data[_ACLc][_G] != null) { + contents[_Gr] = de_Grants((0, import_smithy_client.getArrayIfSingleItem)(data[_ACLc][_G]), context); + } + if (data[_O] != null) { + contents[_O] = de_Owner(data[_O], context); + } + return contents; +}, "de_GetObjectAclCommand"); +var de_GetObjectAttributesCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_DM]: [() => void 0 !== output.headers[_xadm], () => (0, import_smithy_client.parseBoolean)(output.headers[_xadm])], + [_LM]: [() => void 0 !== output.headers[_lm], () => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc7231DateTime)(output.headers[_lm]))], + [_VI]: [, output.headers[_xavi]], + [_RC]: [, output.headers[_xarc]] + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data[_Ch] != null) { + contents[_Ch] = de_Checksum(data[_Ch], context); + } + if (data[_ETa] != null) { + contents[_ETa] = (0, import_smithy_client.expectString)(data[_ETa]); + } + if (data[_OP] != null) { + contents[_OP] = de_GetObjectAttributesParts(data[_OP], context); + } + if (data[_OSb] != null) { + contents[_OSb] = (0, import_smithy_client.strictParseLong)(data[_OSb]); + } + if (data[_SC] != null) { + contents[_SC] = (0, import_smithy_client.expectString)(data[_SC]); + } + return contents; +}, "de_GetObjectAttributesCommand"); +var de_GetObjectLegalHoldCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)); + contents.LegalHold = de_ObjectLockLegalHold(data, context); + return contents; +}, "de_GetObjectLegalHoldCommand"); +var de_GetObjectLockConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)); + contents.ObjectLockConfiguration = de_ObjectLockConfiguration(data, context); + return contents; +}, "de_GetObjectLockConfigurationCommand"); +var de_GetObjectRetentionCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)); + contents.Retention = de_ObjectLockRetention(data, context); + return contents; +}, "de_GetObjectRetentionCommand"); +var de_GetObjectTaggingCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_VI]: [, output.headers[_xavi]] + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data.TagSet === "") { + contents[_TS] = []; + } else if (data[_TS] != null && data[_TS][_Ta] != null) { + contents[_TS] = de_TagSet((0, import_smithy_client.getArrayIfSingleItem)(data[_TS][_Ta]), context); + } + return contents; +}, "de_GetObjectTaggingCommand"); +var de_GetObjectTorrentCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]] + }); + const data = output.body; + context.sdkStreamMixin(data); + contents.Body = data; + return contents; +}, "de_GetObjectTorrentCommand"); +var de_GetPublicAccessBlockCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)); + contents.PublicAccessBlockConfiguration = de_PublicAccessBlockConfiguration(data, context); + return contents; +}, "de_GetPublicAccessBlockCommand"); +var de_HeadBucketCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_BLT]: [, output.headers[_xablt]], + [_BLN]: [, output.headers[_xabln]], + [_BR]: [, output.headers[_xabr]], + [_APA]: [() => void 0 !== output.headers[_xaapa], () => (0, import_smithy_client.parseBoolean)(output.headers[_xaapa])] + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_HeadBucketCommand"); +var de_HeadObjectCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_DM]: [() => void 0 !== output.headers[_xadm], () => (0, import_smithy_client.parseBoolean)(output.headers[_xadm])], + [_AR]: [, output.headers[_ar]], + [_Exp]: [, output.headers[_xae]], + [_Re]: [, output.headers[_xar]], + [_AS]: [, output.headers[_xaas]], + [_LM]: [() => void 0 !== output.headers[_lm], () => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc7231DateTime)(output.headers[_lm]))], + [_CLo]: [() => void 0 !== output.headers[_cl_], () => (0, import_smithy_client.strictParseLong)(output.headers[_cl_])], + [_CCRC]: [, output.headers[_xacc]], + [_CCRCC]: [, output.headers[_xacc_]], + [_CCRCNVME]: [, output.headers[_xacc__]], + [_CSHA]: [, output.headers[_xacs]], + [_CSHAh]: [, output.headers[_xacs_]], + [_CT]: [, output.headers[_xact]], + [_ETa]: [, output.headers[_eta]], + [_MM]: [() => void 0 !== output.headers[_xamm], () => (0, import_smithy_client.strictParseInt32)(output.headers[_xamm])], + [_VI]: [, output.headers[_xavi]], + [_CC]: [, output.headers[_cc]], + [_CD]: [, output.headers[_cd]], + [_CE]: [, output.headers[_ce]], + [_CL]: [, output.headers[_cl]], + [_CTo]: [, output.headers[_ct]], + [_CR]: [, output.headers[_cr]], + [_E]: [() => void 0 !== output.headers[_e], () => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc7231DateTime)(output.headers[_e]))], + [_ES]: [, output.headers[_ex]], + [_WRL]: [, output.headers[_xawrl]], + [_SSE]: [, output.headers[_xasse]], + [_SSECA]: [, output.headers[_xasseca]], + [_SSECKMD]: [, output.headers[_xasseckm]], + [_SSEKMSKI]: [, output.headers[_xasseakki]], + [_BKE]: [() => void 0 !== output.headers[_xassebke], () => (0, import_smithy_client.parseBoolean)(output.headers[_xassebke])], + [_SC]: [, output.headers[_xasc]], + [_RC]: [, output.headers[_xarc]], + [_RS]: [, output.headers[_xars]], + [_PC]: [() => void 0 !== output.headers[_xampc], () => (0, import_smithy_client.strictParseInt32)(output.headers[_xampc])], + [_OLM]: [, output.headers[_xaolm]], + [_OLRUD]: [ + () => void 0 !== output.headers[_xaolrud], + () => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc3339DateTimeWithOffset)(output.headers[_xaolrud])) + ], + [_OLLHS]: [, output.headers[_xaollh]], + Metadata: [ + , + Object.keys(output.headers).filter((header) => header.startsWith("x-amz-meta-")).reduce((acc, header) => { + acc[header.substring(11)] = output.headers[header]; + return acc; + }, {}) + ] + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_HeadObjectCommand"); +var de_ListBucketAnalyticsConfigurationsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data.AnalyticsConfiguration === "") { + contents[_ACLn] = []; + } else if (data[_AC] != null) { + contents[_ACLn] = de_AnalyticsConfigurationList((0, import_smithy_client.getArrayIfSingleItem)(data[_AC]), context); + } + if (data[_CTon] != null) { + contents[_CTon] = (0, import_smithy_client.expectString)(data[_CTon]); + } + if (data[_IT] != null) { + contents[_IT] = (0, import_smithy_client.parseBoolean)(data[_IT]); + } + if (data[_NCT] != null) { + contents[_NCT] = (0, import_smithy_client.expectString)(data[_NCT]); + } + return contents; +}, "de_ListBucketAnalyticsConfigurationsCommand"); +var de_ListBucketIntelligentTieringConfigurationsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data[_CTon] != null) { + contents[_CTon] = (0, import_smithy_client.expectString)(data[_CTon]); + } + if (data.IntelligentTieringConfiguration === "") { + contents[_ITCL] = []; + } else if (data[_ITC] != null) { + contents[_ITCL] = de_IntelligentTieringConfigurationList((0, import_smithy_client.getArrayIfSingleItem)(data[_ITC]), context); + } + if (data[_IT] != null) { + contents[_IT] = (0, import_smithy_client.parseBoolean)(data[_IT]); + } + if (data[_NCT] != null) { + contents[_NCT] = (0, import_smithy_client.expectString)(data[_NCT]); + } + return contents; +}, "de_ListBucketIntelligentTieringConfigurationsCommand"); +var de_ListBucketInventoryConfigurationsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data[_CTon] != null) { + contents[_CTon] = (0, import_smithy_client.expectString)(data[_CTon]); + } + if (data.InventoryConfiguration === "") { + contents[_ICL] = []; + } else if (data[_IC] != null) { + contents[_ICL] = de_InventoryConfigurationList((0, import_smithy_client.getArrayIfSingleItem)(data[_IC]), context); + } + if (data[_IT] != null) { + contents[_IT] = (0, import_smithy_client.parseBoolean)(data[_IT]); + } + if (data[_NCT] != null) { + contents[_NCT] = (0, import_smithy_client.expectString)(data[_NCT]); + } + return contents; +}, "de_ListBucketInventoryConfigurationsCommand"); +var de_ListBucketMetricsConfigurationsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data[_CTon] != null) { + contents[_CTon] = (0, import_smithy_client.expectString)(data[_CTon]); + } + if (data[_IT] != null) { + contents[_IT] = (0, import_smithy_client.parseBoolean)(data[_IT]); + } + if (data.MetricsConfiguration === "") { + contents[_MCL] = []; + } else if (data[_MC] != null) { + contents[_MCL] = de_MetricsConfigurationList((0, import_smithy_client.getArrayIfSingleItem)(data[_MC]), context); + } + if (data[_NCT] != null) { + contents[_NCT] = (0, import_smithy_client.expectString)(data[_NCT]); + } + return contents; +}, "de_ListBucketMetricsConfigurationsCommand"); +var de_ListBucketsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data.Buckets === "") { + contents[_Bu] = []; + } else if (data[_Bu] != null && data[_Bu][_B] != null) { + contents[_Bu] = de_Buckets((0, import_smithy_client.getArrayIfSingleItem)(data[_Bu][_B]), context); + } + if (data[_CTon] != null) { + contents[_CTon] = (0, import_smithy_client.expectString)(data[_CTon]); + } + if (data[_O] != null) { + contents[_O] = de_Owner(data[_O], context); + } + if (data[_P] != null) { + contents[_P] = (0, import_smithy_client.expectString)(data[_P]); + } + return contents; +}, "de_ListBucketsCommand"); +var de_ListDirectoryBucketsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data.Buckets === "") { + contents[_Bu] = []; + } else if (data[_Bu] != null && data[_Bu][_B] != null) { + contents[_Bu] = de_Buckets((0, import_smithy_client.getArrayIfSingleItem)(data[_Bu][_B]), context); + } + if (data[_CTon] != null) { + contents[_CTon] = (0, import_smithy_client.expectString)(data[_CTon]); + } + return contents; +}, "de_ListDirectoryBucketsCommand"); +var de_ListMultipartUploadsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]] + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data[_B] != null) { + contents[_B] = (0, import_smithy_client.expectString)(data[_B]); + } + if (data.CommonPrefixes === "") { + contents[_CP] = []; + } else if (data[_CP] != null) { + contents[_CP] = de_CommonPrefixList((0, import_smithy_client.getArrayIfSingleItem)(data[_CP]), context); + } + if (data[_D] != null) { + contents[_D] = (0, import_smithy_client.expectString)(data[_D]); + } + if (data[_ET] != null) { + contents[_ET] = (0, import_smithy_client.expectString)(data[_ET]); + } + if (data[_IT] != null) { + contents[_IT] = (0, import_smithy_client.parseBoolean)(data[_IT]); + } + if (data[_KM] != null) { + contents[_KM] = (0, import_smithy_client.expectString)(data[_KM]); + } + if (data[_MU] != null) { + contents[_MU] = (0, import_smithy_client.strictParseInt32)(data[_MU]); + } + if (data[_NKM] != null) { + contents[_NKM] = (0, import_smithy_client.expectString)(data[_NKM]); + } + if (data[_NUIM] != null) { + contents[_NUIM] = (0, import_smithy_client.expectString)(data[_NUIM]); + } + if (data[_P] != null) { + contents[_P] = (0, import_smithy_client.expectString)(data[_P]); + } + if (data[_UIM] != null) { + contents[_UIM] = (0, import_smithy_client.expectString)(data[_UIM]); + } + if (data.Upload === "") { + contents[_Up] = []; + } else if (data[_U] != null) { + contents[_Up] = de_MultipartUploadList((0, import_smithy_client.getArrayIfSingleItem)(data[_U]), context); + } + return contents; +}, "de_ListMultipartUploadsCommand"); +var de_ListObjectsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]] + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data.CommonPrefixes === "") { + contents[_CP] = []; + } else if (data[_CP] != null) { + contents[_CP] = de_CommonPrefixList((0, import_smithy_client.getArrayIfSingleItem)(data[_CP]), context); + } + if (data.Contents === "") { + contents[_Co] = []; + } else if (data[_Co] != null) { + contents[_Co] = de_ObjectList((0, import_smithy_client.getArrayIfSingleItem)(data[_Co]), context); + } + if (data[_D] != null) { + contents[_D] = (0, import_smithy_client.expectString)(data[_D]); + } + if (data[_ET] != null) { + contents[_ET] = (0, import_smithy_client.expectString)(data[_ET]); + } + if (data[_IT] != null) { + contents[_IT] = (0, import_smithy_client.parseBoolean)(data[_IT]); + } + if (data[_M] != null) { + contents[_M] = (0, import_smithy_client.expectString)(data[_M]); + } + if (data[_MK] != null) { + contents[_MK] = (0, import_smithy_client.strictParseInt32)(data[_MK]); + } + if (data[_N] != null) { + contents[_N] = (0, import_smithy_client.expectString)(data[_N]); + } + if (data[_NM] != null) { + contents[_NM] = (0, import_smithy_client.expectString)(data[_NM]); + } + if (data[_P] != null) { + contents[_P] = (0, import_smithy_client.expectString)(data[_P]); + } + return contents; +}, "de_ListObjectsCommand"); +var de_ListObjectsV2Command = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]] + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data.CommonPrefixes === "") { + contents[_CP] = []; + } else if (data[_CP] != null) { + contents[_CP] = de_CommonPrefixList((0, import_smithy_client.getArrayIfSingleItem)(data[_CP]), context); + } + if (data.Contents === "") { + contents[_Co] = []; + } else if (data[_Co] != null) { + contents[_Co] = de_ObjectList((0, import_smithy_client.getArrayIfSingleItem)(data[_Co]), context); + } + if (data[_CTon] != null) { + contents[_CTon] = (0, import_smithy_client.expectString)(data[_CTon]); + } + if (data[_D] != null) { + contents[_D] = (0, import_smithy_client.expectString)(data[_D]); + } + if (data[_ET] != null) { + contents[_ET] = (0, import_smithy_client.expectString)(data[_ET]); + } + if (data[_IT] != null) { + contents[_IT] = (0, import_smithy_client.parseBoolean)(data[_IT]); + } + if (data[_KC] != null) { + contents[_KC] = (0, import_smithy_client.strictParseInt32)(data[_KC]); + } + if (data[_MK] != null) { + contents[_MK] = (0, import_smithy_client.strictParseInt32)(data[_MK]); + } + if (data[_N] != null) { + contents[_N] = (0, import_smithy_client.expectString)(data[_N]); + } + if (data[_NCT] != null) { + contents[_NCT] = (0, import_smithy_client.expectString)(data[_NCT]); + } + if (data[_P] != null) { + contents[_P] = (0, import_smithy_client.expectString)(data[_P]); + } + if (data[_SA] != null) { + contents[_SA] = (0, import_smithy_client.expectString)(data[_SA]); + } + return contents; +}, "de_ListObjectsV2Command"); +var de_ListObjectVersionsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]] + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data.CommonPrefixes === "") { + contents[_CP] = []; + } else if (data[_CP] != null) { + contents[_CP] = de_CommonPrefixList((0, import_smithy_client.getArrayIfSingleItem)(data[_CP]), context); + } + if (data.DeleteMarker === "") { + contents[_DMe] = []; + } else if (data[_DM] != null) { + contents[_DMe] = de_DeleteMarkers((0, import_smithy_client.getArrayIfSingleItem)(data[_DM]), context); + } + if (data[_D] != null) { + contents[_D] = (0, import_smithy_client.expectString)(data[_D]); + } + if (data[_ET] != null) { + contents[_ET] = (0, import_smithy_client.expectString)(data[_ET]); + } + if (data[_IT] != null) { + contents[_IT] = (0, import_smithy_client.parseBoolean)(data[_IT]); + } + if (data[_KM] != null) { + contents[_KM] = (0, import_smithy_client.expectString)(data[_KM]); + } + if (data[_MK] != null) { + contents[_MK] = (0, import_smithy_client.strictParseInt32)(data[_MK]); + } + if (data[_N] != null) { + contents[_N] = (0, import_smithy_client.expectString)(data[_N]); + } + if (data[_NKM] != null) { + contents[_NKM] = (0, import_smithy_client.expectString)(data[_NKM]); + } + if (data[_NVIM] != null) { + contents[_NVIM] = (0, import_smithy_client.expectString)(data[_NVIM]); + } + if (data[_P] != null) { + contents[_P] = (0, import_smithy_client.expectString)(data[_P]); + } + if (data[_VIM] != null) { + contents[_VIM] = (0, import_smithy_client.expectString)(data[_VIM]); + } + if (data.Version === "") { + contents[_Ve] = []; + } else if (data[_V] != null) { + contents[_Ve] = de_ObjectVersionList((0, import_smithy_client.getArrayIfSingleItem)(data[_V]), context); + } + return contents; +}, "de_ListObjectVersionsCommand"); +var de_ListPartsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_AD]: [ + () => void 0 !== output.headers[_xaad], + () => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc7231DateTime)(output.headers[_xaad])) + ], + [_ARI]: [, output.headers[_xaari]], + [_RC]: [, output.headers[_xarc]] + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)), "body"); + if (data[_B] != null) { + contents[_B] = (0, import_smithy_client.expectString)(data[_B]); + } + if (data[_CA] != null) { + contents[_CA] = (0, import_smithy_client.expectString)(data[_CA]); + } + if (data[_CT] != null) { + contents[_CT] = (0, import_smithy_client.expectString)(data[_CT]); + } + if (data[_In] != null) { + contents[_In] = de_Initiator(data[_In], context); + } + if (data[_IT] != null) { + contents[_IT] = (0, import_smithy_client.parseBoolean)(data[_IT]); + } + if (data[_K] != null) { + contents[_K] = (0, import_smithy_client.expectString)(data[_K]); + } + if (data[_MP] != null) { + contents[_MP] = (0, import_smithy_client.strictParseInt32)(data[_MP]); + } + if (data[_NPNM] != null) { + contents[_NPNM] = (0, import_smithy_client.expectString)(data[_NPNM]); + } + if (data[_O] != null) { + contents[_O] = de_Owner(data[_O], context); + } + if (data[_PNM] != null) { + contents[_PNM] = (0, import_smithy_client.expectString)(data[_PNM]); + } + if (data.Part === "") { + contents[_Part] = []; + } else if (data[_Par] != null) { + contents[_Part] = de_Parts((0, import_smithy_client.getArrayIfSingleItem)(data[_Par]), context); + } + if (data[_SC] != null) { + contents[_SC] = (0, import_smithy_client.expectString)(data[_SC]); + } + if (data[_UI] != null) { + contents[_UI] = (0, import_smithy_client.expectString)(data[_UI]); + } + return contents; +}, "de_ListPartsCommand"); +var de_PutBucketAccelerateConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutBucketAccelerateConfigurationCommand"); +var de_PutBucketAclCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutBucketAclCommand"); +var de_PutBucketAnalyticsConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutBucketAnalyticsConfigurationCommand"); +var de_PutBucketCorsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutBucketCorsCommand"); +var de_PutBucketEncryptionCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutBucketEncryptionCommand"); +var de_PutBucketIntelligentTieringConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutBucketIntelligentTieringConfigurationCommand"); +var de_PutBucketInventoryConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutBucketInventoryConfigurationCommand"); +var de_PutBucketLifecycleConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_TDMOS]: [, output.headers[_xatdmos]] + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutBucketLifecycleConfigurationCommand"); +var de_PutBucketLoggingCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutBucketLoggingCommand"); +var de_PutBucketMetricsConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutBucketMetricsConfigurationCommand"); +var de_PutBucketNotificationConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutBucketNotificationConfigurationCommand"); +var de_PutBucketOwnershipControlsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutBucketOwnershipControlsCommand"); +var de_PutBucketPolicyCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutBucketPolicyCommand"); +var de_PutBucketReplicationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutBucketReplicationCommand"); +var de_PutBucketRequestPaymentCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutBucketRequestPaymentCommand"); +var de_PutBucketTaggingCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutBucketTaggingCommand"); +var de_PutBucketVersioningCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutBucketVersioningCommand"); +var de_PutBucketWebsiteCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutBucketWebsiteCommand"); +var de_PutObjectCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_Exp]: [, output.headers[_xae]], + [_ETa]: [, output.headers[_eta]], + [_CCRC]: [, output.headers[_xacc]], + [_CCRCC]: [, output.headers[_xacc_]], + [_CCRCNVME]: [, output.headers[_xacc__]], + [_CSHA]: [, output.headers[_xacs]], + [_CSHAh]: [, output.headers[_xacs_]], + [_CT]: [, output.headers[_xact]], + [_SSE]: [, output.headers[_xasse]], + [_VI]: [, output.headers[_xavi]], + [_SSECA]: [, output.headers[_xasseca]], + [_SSECKMD]: [, output.headers[_xasseckm]], + [_SSEKMSKI]: [, output.headers[_xasseakki]], + [_SSEKMSEC]: [, output.headers[_xassec]], + [_BKE]: [() => void 0 !== output.headers[_xassebke], () => (0, import_smithy_client.parseBoolean)(output.headers[_xassebke])], + [_Si]: [() => void 0 !== output.headers[_xaos], () => (0, import_smithy_client.strictParseLong)(output.headers[_xaos])], + [_RC]: [, output.headers[_xarc]] + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutObjectCommand"); +var de_PutObjectAclCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]] + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutObjectAclCommand"); +var de_PutObjectLegalHoldCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]] + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutObjectLegalHoldCommand"); +var de_PutObjectLockConfigurationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]] + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutObjectLockConfigurationCommand"); +var de_PutObjectRetentionCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]] + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutObjectRetentionCommand"); +var de_PutObjectTaggingCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_VI]: [, output.headers[_xavi]] + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutObjectTaggingCommand"); +var de_PutPublicAccessBlockCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_PutPublicAccessBlockCommand"); +var de_RestoreObjectCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]], + [_ROP]: [, output.headers[_xarop]] + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_RestoreObjectCommand"); +var de_SelectObjectContentCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = output.body; + contents.Payload = de_SelectObjectContentEventStream(data, context); + return contents; +}, "de_SelectObjectContentCommand"); +var de_UploadPartCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_SSE]: [, output.headers[_xasse]], + [_ETa]: [, output.headers[_eta]], + [_CCRC]: [, output.headers[_xacc]], + [_CCRCC]: [, output.headers[_xacc_]], + [_CCRCNVME]: [, output.headers[_xacc__]], + [_CSHA]: [, output.headers[_xacs]], + [_CSHAh]: [, output.headers[_xacs_]], + [_SSECA]: [, output.headers[_xasseca]], + [_SSECKMD]: [, output.headers[_xasseckm]], + [_SSEKMSKI]: [, output.headers[_xasseakki]], + [_BKE]: [() => void 0 !== output.headers[_xassebke], () => (0, import_smithy_client.parseBoolean)(output.headers[_xassebke])], + [_RC]: [, output.headers[_xarc]] + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_UploadPartCommand"); +var de_UploadPartCopyCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output), + [_CSVI]: [, output.headers[_xacsvi]], + [_SSE]: [, output.headers[_xasse]], + [_SSECA]: [, output.headers[_xasseca]], + [_SSECKMD]: [, output.headers[_xasseckm]], + [_SSEKMSKI]: [, output.headers[_xasseakki]], + [_BKE]: [() => void 0 !== output.headers[_xassebke], () => (0, import_smithy_client.parseBoolean)(output.headers[_xassebke])], + [_RC]: [, output.headers[_xarc]] + }); + const data = (0, import_smithy_client.expectObject)(await (0, import_core.parseXmlBody)(output.body, context)); + contents.CopyPartResult = de_CopyPartResult(data, context); + return contents; +}, "de_UploadPartCopyCommand"); +var de_WriteGetObjectResponseCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_WriteGetObjectResponseCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core.parseXmlErrorBody)(output.body, context) + }; + const errorCode = (0, import_core.loadRestXmlErrorCode)(output, parsedOutput.body); + switch (errorCode) { + case "NoSuchUpload": + case "com.amazonaws.s3#NoSuchUpload": + throw await de_NoSuchUploadRes(parsedOutput, context); + case "ObjectNotInActiveTierError": + case "com.amazonaws.s3#ObjectNotInActiveTierError": + throw await de_ObjectNotInActiveTierErrorRes(parsedOutput, context); + case "BucketAlreadyExists": + case "com.amazonaws.s3#BucketAlreadyExists": + throw await de_BucketAlreadyExistsRes(parsedOutput, context); + case "BucketAlreadyOwnedByYou": + case "com.amazonaws.s3#BucketAlreadyOwnedByYou": + throw await de_BucketAlreadyOwnedByYouRes(parsedOutput, context); + case "NoSuchBucket": + case "com.amazonaws.s3#NoSuchBucket": + throw await de_NoSuchBucketRes(parsedOutput, context); + case "InvalidObjectState": + case "com.amazonaws.s3#InvalidObjectState": + throw await de_InvalidObjectStateRes(parsedOutput, context); + case "NoSuchKey": + case "com.amazonaws.s3#NoSuchKey": + throw await de_NoSuchKeyRes(parsedOutput, context); + case "NotFound": + case "com.amazonaws.s3#NotFound": + throw await de_NotFoundRes(parsedOutput, context); + case "EncryptionTypeMismatch": + case "com.amazonaws.s3#EncryptionTypeMismatch": + throw await de_EncryptionTypeMismatchRes(parsedOutput, context); + case "InvalidRequest": + case "com.amazonaws.s3#InvalidRequest": + throw await de_InvalidRequestRes(parsedOutput, context); + case "InvalidWriteOffset": + case "com.amazonaws.s3#InvalidWriteOffset": + throw await de_InvalidWriteOffsetRes(parsedOutput, context); + case "TooManyParts": + case "com.amazonaws.s3#TooManyParts": + throw await de_TooManyPartsRes(parsedOutput, context); + case "ObjectAlreadyInActiveTierError": + case "com.amazonaws.s3#ObjectAlreadyInActiveTierError": + throw await de_ObjectAlreadyInActiveTierErrorRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}, "de_CommandError"); +var throwDefaultError = (0, import_smithy_client.withBaseException)(S3ServiceException); +var de_BucketAlreadyExistsRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const exception = new BucketAlreadyExists({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_BucketAlreadyExistsRes"); +var de_BucketAlreadyOwnedByYouRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const exception = new BucketAlreadyOwnedByYou({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_BucketAlreadyOwnedByYouRes"); +var de_EncryptionTypeMismatchRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const exception = new EncryptionTypeMismatch({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_EncryptionTypeMismatchRes"); +var de_InvalidObjectStateRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + if (data[_AT] != null) { + contents[_AT] = (0, import_smithy_client.expectString)(data[_AT]); + } + if (data[_SC] != null) { + contents[_SC] = (0, import_smithy_client.expectString)(data[_SC]); + } + const exception = new InvalidObjectState({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidObjectStateRes"); +var de_InvalidRequestRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const exception = new InvalidRequest({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidRequestRes"); +var de_InvalidWriteOffsetRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const exception = new InvalidWriteOffset({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidWriteOffsetRes"); +var de_NoSuchBucketRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const exception = new NoSuchBucket({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_NoSuchBucketRes"); +var de_NoSuchKeyRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const exception = new NoSuchKey({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_NoSuchKeyRes"); +var de_NoSuchUploadRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const exception = new NoSuchUpload({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_NoSuchUploadRes"); +var de_NotFoundRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const exception = new NotFound({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_NotFoundRes"); +var de_ObjectAlreadyInActiveTierErrorRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const exception = new ObjectAlreadyInActiveTierError({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_ObjectAlreadyInActiveTierErrorRes"); +var de_ObjectNotInActiveTierErrorRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const exception = new ObjectNotInActiveTierError({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_ObjectNotInActiveTierErrorRes"); +var de_TooManyPartsRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const exception = new TooManyParts({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_TooManyPartsRes"); +var de_SelectObjectContentEventStream = /* @__PURE__ */ __name((output, context) => { + return context.eventStreamMarshaller.deserialize(output, async (event) => { + if (event["Records"] != null) { + return { + Records: await de_RecordsEvent_event(event["Records"], context) + }; + } + if (event["Stats"] != null) { + return { + Stats: await de_StatsEvent_event(event["Stats"], context) + }; + } + if (event["Progress"] != null) { + return { + Progress: await de_ProgressEvent_event(event["Progress"], context) + }; + } + if (event["Cont"] != null) { + return { + Cont: await de_ContinuationEvent_event(event["Cont"], context) + }; + } + if (event["End"] != null) { + return { + End: await de_EndEvent_event(event["End"], context) + }; + } + return { $unknown: output }; + }); +}, "de_SelectObjectContentEventStream"); +var de_ContinuationEvent_event = /* @__PURE__ */ __name(async (output, context) => { + const contents = {}; + const data = await (0, import_core.parseXmlBody)(output.body, context); + Object.assign(contents, de_ContinuationEvent(data, context)); + return contents; +}, "de_ContinuationEvent_event"); +var de_EndEvent_event = /* @__PURE__ */ __name(async (output, context) => { + const contents = {}; + const data = await (0, import_core.parseXmlBody)(output.body, context); + Object.assign(contents, de_EndEvent(data, context)); + return contents; +}, "de_EndEvent_event"); +var de_ProgressEvent_event = /* @__PURE__ */ __name(async (output, context) => { + const contents = {}; + const data = await (0, import_core.parseXmlBody)(output.body, context); + contents.Details = de_Progress(data, context); + return contents; +}, "de_ProgressEvent_event"); +var de_RecordsEvent_event = /* @__PURE__ */ __name(async (output, context) => { + const contents = {}; + contents.Payload = output.body; + return contents; +}, "de_RecordsEvent_event"); +var de_StatsEvent_event = /* @__PURE__ */ __name(async (output, context) => { + const contents = {}; + const data = await (0, import_core.parseXmlBody)(output.body, context); + contents.Details = de_Stats(data, context); + return contents; +}, "de_StatsEvent_event"); +var se_AbortIncompleteMultipartUpload = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_AIMU); + if (input[_DAI] != null) { + bn.c(import_xml_builder.XmlNode.of(_DAI, String(input[_DAI])).n(_DAI)); + } + return bn; +}, "se_AbortIncompleteMultipartUpload"); +var se_AccelerateConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_ACc); + if (input[_S] != null) { + bn.c(import_xml_builder.XmlNode.of(_BAS, input[_S]).n(_S)); + } + return bn; +}, "se_AccelerateConfiguration"); +var se_AccessControlPolicy = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_ACP); + bn.lc(input, "Grants", "AccessControlList", () => se_Grants(input[_Gr], context)); + if (input[_O] != null) { + bn.c(se_Owner(input[_O], context).n(_O)); + } + return bn; +}, "se_AccessControlPolicy"); +var se_AccessControlTranslation = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_ACT); + if (input[_O] != null) { + bn.c(import_xml_builder.XmlNode.of(_OOw, input[_O]).n(_O)); + } + return bn; +}, "se_AccessControlTranslation"); +var se_AllowedHeaders = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = import_xml_builder.XmlNode.of(_AH, entry); + return n.n(_me); + }); +}, "se_AllowedHeaders"); +var se_AllowedMethods = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = import_xml_builder.XmlNode.of(_AM, entry); + return n.n(_me); + }); +}, "se_AllowedMethods"); +var se_AllowedOrigins = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = import_xml_builder.XmlNode.of(_AO, entry); + return n.n(_me); + }); +}, "se_AllowedOrigins"); +var se_AnalyticsAndOperator = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_AAO); + bn.cc(input, _P); + bn.l(input, "Tags", "Tag", () => se_TagSet(input[_Tag], context)); + return bn; +}, "se_AnalyticsAndOperator"); +var se_AnalyticsConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_AC); + if (input[_I] != null) { + bn.c(import_xml_builder.XmlNode.of(_AI, input[_I]).n(_I)); + } + if (input[_F] != null) { + bn.c(se_AnalyticsFilter(input[_F], context).n(_F)); + } + if (input[_SCA] != null) { + bn.c(se_StorageClassAnalysis(input[_SCA], context).n(_SCA)); + } + return bn; +}, "se_AnalyticsConfiguration"); +var se_AnalyticsExportDestination = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_AED); + if (input[_SBD] != null) { + bn.c(se_AnalyticsS3BucketDestination(input[_SBD], context).n(_SBD)); + } + return bn; +}, "se_AnalyticsExportDestination"); +var se_AnalyticsFilter = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_AF); + AnalyticsFilter.visit(input, { + Prefix: /* @__PURE__ */ __name((value) => { + if (input[_P] != null) { + bn.c(import_xml_builder.XmlNode.of(_P, value).n(_P)); + } + }, "Prefix"), + Tag: /* @__PURE__ */ __name((value) => { + if (input[_Ta] != null) { + bn.c(se_Tag(value, context).n(_Ta)); + } + }, "Tag"), + And: /* @__PURE__ */ __name((value) => { + if (input[_A] != null) { + bn.c(se_AnalyticsAndOperator(value, context).n(_A)); + } + }, "And"), + _: /* @__PURE__ */ __name((name, value) => { + if (!(value instanceof import_xml_builder.XmlNode || value instanceof import_xml_builder.XmlText)) { + throw new Error("Unable to serialize unknown union members in XML."); + } + bn.c(new import_xml_builder.XmlNode(name).c(value)); + }, "_") + }); + return bn; +}, "se_AnalyticsFilter"); +var se_AnalyticsS3BucketDestination = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_ASBD); + if (input[_Fo] != null) { + bn.c(import_xml_builder.XmlNode.of(_ASEFF, input[_Fo]).n(_Fo)); + } + if (input[_BAI] != null) { + bn.c(import_xml_builder.XmlNode.of(_AIc, input[_BAI]).n(_BAI)); + } + if (input[_B] != null) { + bn.c(import_xml_builder.XmlNode.of(_BN, input[_B]).n(_B)); + } + bn.cc(input, _P); + return bn; +}, "se_AnalyticsS3BucketDestination"); +var se_BucketInfo = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_BI); + bn.cc(input, _DR); + if (input[_Ty] != null) { + bn.c(import_xml_builder.XmlNode.of(_BT, input[_Ty]).n(_Ty)); + } + return bn; +}, "se_BucketInfo"); +var se_BucketLifecycleConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_BLC); + bn.l(input, "Rules", "Rule", () => se_LifecycleRules(input[_Rul], context)); + return bn; +}, "se_BucketLifecycleConfiguration"); +var se_BucketLoggingStatus = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_BLS); + if (input[_LE] != null) { + bn.c(se_LoggingEnabled(input[_LE], context).n(_LE)); + } + return bn; +}, "se_BucketLoggingStatus"); +var se_CompletedMultipartUpload = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_CMU); + bn.l(input, "Parts", "Part", () => se_CompletedPartList(input[_Part], context)); + return bn; +}, "se_CompletedMultipartUpload"); +var se_CompletedPart = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_CPo); + bn.cc(input, _ETa); + bn.cc(input, _CCRC); + bn.cc(input, _CCRCC); + bn.cc(input, _CCRCNVME); + bn.cc(input, _CSHA); + bn.cc(input, _CSHAh); + if (input[_PN] != null) { + bn.c(import_xml_builder.XmlNode.of(_PN, String(input[_PN])).n(_PN)); + } + return bn; +}, "se_CompletedPart"); +var se_CompletedPartList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = se_CompletedPart(entry, context); + return n.n(_me); + }); +}, "se_CompletedPartList"); +var se_Condition = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_Con); + bn.cc(input, _HECRE); + bn.cc(input, _KPE); + return bn; +}, "se_Condition"); +var se_CORSConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_CORSC); + bn.l(input, "CORSRules", "CORSRule", () => se_CORSRules(input[_CORSRu], context)); + return bn; +}, "se_CORSConfiguration"); +var se_CORSRule = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_CORSR); + bn.cc(input, _ID_); + bn.l(input, "AllowedHeaders", "AllowedHeader", () => se_AllowedHeaders(input[_AHl], context)); + bn.l(input, "AllowedMethods", "AllowedMethod", () => se_AllowedMethods(input[_AMl], context)); + bn.l(input, "AllowedOrigins", "AllowedOrigin", () => se_AllowedOrigins(input[_AOl], context)); + bn.l(input, "ExposeHeaders", "ExposeHeader", () => se_ExposeHeaders(input[_EH], context)); + if (input[_MAS] != null) { + bn.c(import_xml_builder.XmlNode.of(_MAS, String(input[_MAS])).n(_MAS)); + } + return bn; +}, "se_CORSRule"); +var se_CORSRules = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = se_CORSRule(entry, context); + return n.n(_me); + }); +}, "se_CORSRules"); +var se_CreateBucketConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_CBC); + if (input[_LC] != null) { + bn.c(import_xml_builder.XmlNode.of(_BLCu, input[_LC]).n(_LC)); + } + if (input[_L] != null) { + bn.c(se_LocationInfo(input[_L], context).n(_L)); + } + if (input[_B] != null) { + bn.c(se_BucketInfo(input[_B], context).n(_B)); + } + return bn; +}, "se_CreateBucketConfiguration"); +var se_CSVInput = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_CSVIn); + bn.cc(input, _FHI); + bn.cc(input, _Com); + bn.cc(input, _QEC); + bn.cc(input, _RD); + bn.cc(input, _FD); + bn.cc(input, _QCuo); + if (input[_AQRD] != null) { + bn.c(import_xml_builder.XmlNode.of(_AQRD, String(input[_AQRD])).n(_AQRD)); + } + return bn; +}, "se_CSVInput"); +var se_CSVOutput = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_CSVO); + bn.cc(input, _QF); + bn.cc(input, _QEC); + bn.cc(input, _RD); + bn.cc(input, _FD); + bn.cc(input, _QCuo); + return bn; +}, "se_CSVOutput"); +var se_DefaultRetention = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_DRe); + if (input[_Mo] != null) { + bn.c(import_xml_builder.XmlNode.of(_OLRM, input[_Mo]).n(_Mo)); + } + if (input[_Da] != null) { + bn.c(import_xml_builder.XmlNode.of(_Da, String(input[_Da])).n(_Da)); + } + if (input[_Y] != null) { + bn.c(import_xml_builder.XmlNode.of(_Y, String(input[_Y])).n(_Y)); + } + return bn; +}, "se_DefaultRetention"); +var se_Delete = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_Del); + bn.l(input, "Objects", "Object", () => se_ObjectIdentifierList(input[_Ob], context)); + if (input[_Q] != null) { + bn.c(import_xml_builder.XmlNode.of(_Q, String(input[_Q])).n(_Q)); + } + return bn; +}, "se_Delete"); +var se_DeleteMarkerReplication = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_DMR); + if (input[_S] != null) { + bn.c(import_xml_builder.XmlNode.of(_DMRS, input[_S]).n(_S)); + } + return bn; +}, "se_DeleteMarkerReplication"); +var se_Destination = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_Des); + if (input[_B] != null) { + bn.c(import_xml_builder.XmlNode.of(_BN, input[_B]).n(_B)); + } + if (input[_Ac] != null) { + bn.c(import_xml_builder.XmlNode.of(_AIc, input[_Ac]).n(_Ac)); + } + bn.cc(input, _SC); + if (input[_ACT] != null) { + bn.c(se_AccessControlTranslation(input[_ACT], context).n(_ACT)); + } + if (input[_ECn] != null) { + bn.c(se_EncryptionConfiguration(input[_ECn], context).n(_ECn)); + } + if (input[_RTe] != null) { + bn.c(se_ReplicationTime(input[_RTe], context).n(_RTe)); + } + if (input[_Me] != null) { + bn.c(se_Metrics(input[_Me], context).n(_Me)); + } + return bn; +}, "se_Destination"); +var se_Encryption = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_En); + if (input[_ETn] != null) { + bn.c(import_xml_builder.XmlNode.of(_SSE, input[_ETn]).n(_ETn)); + } + if (input[_KMSKI] != null) { + bn.c(import_xml_builder.XmlNode.of(_SSEKMSKI, input[_KMSKI]).n(_KMSKI)); + } + bn.cc(input, _KMSC); + return bn; +}, "se_Encryption"); +var se_EncryptionConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_ECn); + bn.cc(input, _RKKID); + return bn; +}, "se_EncryptionConfiguration"); +var se_ErrorDocument = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_ED); + if (input[_K] != null) { + bn.c(import_xml_builder.XmlNode.of(_OK, input[_K]).n(_K)); + } + return bn; +}, "se_ErrorDocument"); +var se_EventBridgeConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_EBC); + return bn; +}, "se_EventBridgeConfiguration"); +var se_EventList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = import_xml_builder.XmlNode.of(_Ev, entry); + return n.n(_me); + }); +}, "se_EventList"); +var se_ExistingObjectReplication = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_EOR); + if (input[_S] != null) { + bn.c(import_xml_builder.XmlNode.of(_EORS, input[_S]).n(_S)); + } + return bn; +}, "se_ExistingObjectReplication"); +var se_ExposeHeaders = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = import_xml_builder.XmlNode.of(_EHx, entry); + return n.n(_me); + }); +}, "se_ExposeHeaders"); +var se_FilterRule = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_FR); + if (input[_N] != null) { + bn.c(import_xml_builder.XmlNode.of(_FRN, input[_N]).n(_N)); + } + if (input[_Va] != null) { + bn.c(import_xml_builder.XmlNode.of(_FRV, input[_Va]).n(_Va)); + } + return bn; +}, "se_FilterRule"); +var se_FilterRuleList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = se_FilterRule(entry, context); + return n.n(_me); + }); +}, "se_FilterRuleList"); +var se_GlacierJobParameters = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_GJP); + bn.cc(input, _Ti); + return bn; +}, "se_GlacierJobParameters"); +var se_Grant = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_G); + if (input[_Gra] != null) { + const n = se_Grantee(input[_Gra], context).n(_Gra); + n.a("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance"); + bn.c(n); + } + bn.cc(input, _Pe); + return bn; +}, "se_Grant"); +var se_Grantee = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_Gra); + bn.cc(input, _DN); + bn.cc(input, _EA); + bn.cc(input, _ID_); + bn.cc(input, _URI); + bn.a("xsi:type", input[_Ty]); + return bn; +}, "se_Grantee"); +var se_Grants = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = se_Grant(entry, context); + return n.n(_G); + }); +}, "se_Grants"); +var se_IndexDocument = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_ID); + bn.cc(input, _Su); + return bn; +}, "se_IndexDocument"); +var se_InputSerialization = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_IS); + if (input[_CSV] != null) { + bn.c(se_CSVInput(input[_CSV], context).n(_CSV)); + } + bn.cc(input, _CTom); + if (input[_JSON] != null) { + bn.c(se_JSONInput(input[_JSON], context).n(_JSON)); + } + if (input[_Parq] != null) { + bn.c(se_ParquetInput(input[_Parq], context).n(_Parq)); + } + return bn; +}, "se_InputSerialization"); +var se_IntelligentTieringAndOperator = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_ITAO); + bn.cc(input, _P); + bn.l(input, "Tags", "Tag", () => se_TagSet(input[_Tag], context)); + return bn; +}, "se_IntelligentTieringAndOperator"); +var se_IntelligentTieringConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_ITC); + if (input[_I] != null) { + bn.c(import_xml_builder.XmlNode.of(_ITI, input[_I]).n(_I)); + } + if (input[_F] != null) { + bn.c(se_IntelligentTieringFilter(input[_F], context).n(_F)); + } + if (input[_S] != null) { + bn.c(import_xml_builder.XmlNode.of(_ITS, input[_S]).n(_S)); + } + bn.l(input, "Tierings", "Tiering", () => se_TieringList(input[_Tie], context)); + return bn; +}, "se_IntelligentTieringConfiguration"); +var se_IntelligentTieringFilter = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_ITF); + bn.cc(input, _P); + if (input[_Ta] != null) { + bn.c(se_Tag(input[_Ta], context).n(_Ta)); + } + if (input[_A] != null) { + bn.c(se_IntelligentTieringAndOperator(input[_A], context).n(_A)); + } + return bn; +}, "se_IntelligentTieringFilter"); +var se_InventoryConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_IC); + if (input[_Des] != null) { + bn.c(se_InventoryDestination(input[_Des], context).n(_Des)); + } + if (input[_IE] != null) { + bn.c(import_xml_builder.XmlNode.of(_IE, String(input[_IE])).n(_IE)); + } + if (input[_F] != null) { + bn.c(se_InventoryFilter(input[_F], context).n(_F)); + } + if (input[_I] != null) { + bn.c(import_xml_builder.XmlNode.of(_II, input[_I]).n(_I)); + } + if (input[_IOV] != null) { + bn.c(import_xml_builder.XmlNode.of(_IIOV, input[_IOV]).n(_IOV)); + } + bn.lc(input, "OptionalFields", "OptionalFields", () => se_InventoryOptionalFields(input[_OF], context)); + if (input[_Sc] != null) { + bn.c(se_InventorySchedule(input[_Sc], context).n(_Sc)); + } + return bn; +}, "se_InventoryConfiguration"); +var se_InventoryDestination = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_IDn); + if (input[_SBD] != null) { + bn.c(se_InventoryS3BucketDestination(input[_SBD], context).n(_SBD)); + } + return bn; +}, "se_InventoryDestination"); +var se_InventoryEncryption = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_IEn); + if (input[_SSES] != null) { + bn.c(se_SSES3(input[_SSES], context).n(_SS)); + } + if (input[_SSEKMS] != null) { + bn.c(se_SSEKMS(input[_SSEKMS], context).n(_SK)); + } + return bn; +}, "se_InventoryEncryption"); +var se_InventoryFilter = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_IF); + bn.cc(input, _P); + return bn; +}, "se_InventoryFilter"); +var se_InventoryOptionalFields = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = import_xml_builder.XmlNode.of(_IOF, entry); + return n.n(_Fi); + }); +}, "se_InventoryOptionalFields"); +var se_InventoryS3BucketDestination = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_ISBD); + bn.cc(input, _AIc); + if (input[_B] != null) { + bn.c(import_xml_builder.XmlNode.of(_BN, input[_B]).n(_B)); + } + if (input[_Fo] != null) { + bn.c(import_xml_builder.XmlNode.of(_IFn, input[_Fo]).n(_Fo)); + } + bn.cc(input, _P); + if (input[_En] != null) { + bn.c(se_InventoryEncryption(input[_En], context).n(_En)); + } + return bn; +}, "se_InventoryS3BucketDestination"); +var se_InventorySchedule = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_ISn); + if (input[_Fr] != null) { + bn.c(import_xml_builder.XmlNode.of(_IFnv, input[_Fr]).n(_Fr)); + } + return bn; +}, "se_InventorySchedule"); +var se_JSONInput = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_JSONI); + if (input[_Ty] != null) { + bn.c(import_xml_builder.XmlNode.of(_JSONT, input[_Ty]).n(_Ty)); + } + return bn; +}, "se_JSONInput"); +var se_JSONOutput = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_JSONO); + bn.cc(input, _RD); + return bn; +}, "se_JSONOutput"); +var se_LambdaFunctionConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_LFCa); + if (input[_I] != null) { + bn.c(import_xml_builder.XmlNode.of(_NI, input[_I]).n(_I)); + } + if (input[_LFA] != null) { + bn.c(import_xml_builder.XmlNode.of(_LFA, input[_LFA]).n(_CF)); + } + bn.l(input, "Events", "Event", () => se_EventList(input[_Eve], context)); + if (input[_F] != null) { + bn.c(se_NotificationConfigurationFilter(input[_F], context).n(_F)); + } + return bn; +}, "se_LambdaFunctionConfiguration"); +var se_LambdaFunctionConfigurationList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = se_LambdaFunctionConfiguration(entry, context); + return n.n(_me); + }); +}, "se_LambdaFunctionConfigurationList"); +var se_LifecycleExpiration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_LEi); + if (input[_Dat] != null) { + bn.c(import_xml_builder.XmlNode.of(_Dat, (0, import_smithy_client.serializeDateTime)(input[_Dat]).toString()).n(_Dat)); + } + if (input[_Da] != null) { + bn.c(import_xml_builder.XmlNode.of(_Da, String(input[_Da])).n(_Da)); + } + if (input[_EODM] != null) { + bn.c(import_xml_builder.XmlNode.of(_EODM, String(input[_EODM])).n(_EODM)); + } + return bn; +}, "se_LifecycleExpiration"); +var se_LifecycleRule = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_LR); + if (input[_Exp] != null) { + bn.c(se_LifecycleExpiration(input[_Exp], context).n(_Exp)); + } + bn.cc(input, _ID_); + bn.cc(input, _P); + if (input[_F] != null) { + bn.c(se_LifecycleRuleFilter(input[_F], context).n(_F)); + } + if (input[_S] != null) { + bn.c(import_xml_builder.XmlNode.of(_ESx, input[_S]).n(_S)); + } + bn.l(input, "Transitions", "Transition", () => se_TransitionList(input[_Tr], context)); + bn.l( + input, + "NoncurrentVersionTransitions", + "NoncurrentVersionTransition", + () => se_NoncurrentVersionTransitionList(input[_NVT], context) + ); + if (input[_NVE] != null) { + bn.c(se_NoncurrentVersionExpiration(input[_NVE], context).n(_NVE)); + } + if (input[_AIMU] != null) { + bn.c(se_AbortIncompleteMultipartUpload(input[_AIMU], context).n(_AIMU)); + } + return bn; +}, "se_LifecycleRule"); +var se_LifecycleRuleAndOperator = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_LRAO); + bn.cc(input, _P); + bn.l(input, "Tags", "Tag", () => se_TagSet(input[_Tag], context)); + if (input[_OSGT] != null) { + bn.c(import_xml_builder.XmlNode.of(_OSGTB, String(input[_OSGT])).n(_OSGT)); + } + if (input[_OSLT] != null) { + bn.c(import_xml_builder.XmlNode.of(_OSLTB, String(input[_OSLT])).n(_OSLT)); + } + return bn; +}, "se_LifecycleRuleAndOperator"); +var se_LifecycleRuleFilter = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_LRF); + bn.cc(input, _P); + if (input[_Ta] != null) { + bn.c(se_Tag(input[_Ta], context).n(_Ta)); + } + if (input[_OSGT] != null) { + bn.c(import_xml_builder.XmlNode.of(_OSGTB, String(input[_OSGT])).n(_OSGT)); + } + if (input[_OSLT] != null) { + bn.c(import_xml_builder.XmlNode.of(_OSLTB, String(input[_OSLT])).n(_OSLT)); + } + if (input[_A] != null) { + bn.c(se_LifecycleRuleAndOperator(input[_A], context).n(_A)); + } + return bn; +}, "se_LifecycleRuleFilter"); +var se_LifecycleRules = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = se_LifecycleRule(entry, context); + return n.n(_me); + }); +}, "se_LifecycleRules"); +var se_LocationInfo = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_LI); + if (input[_Ty] != null) { + bn.c(import_xml_builder.XmlNode.of(_LT, input[_Ty]).n(_Ty)); + } + if (input[_N] != null) { + bn.c(import_xml_builder.XmlNode.of(_LNAS, input[_N]).n(_N)); + } + return bn; +}, "se_LocationInfo"); +var se_LoggingEnabled = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_LE); + bn.cc(input, _TB); + bn.lc(input, "TargetGrants", "TargetGrants", () => se_TargetGrants(input[_TG], context)); + bn.cc(input, _TP); + if (input[_TOKF] != null) { + bn.c(se_TargetObjectKeyFormat(input[_TOKF], context).n(_TOKF)); + } + return bn; +}, "se_LoggingEnabled"); +var se_MetadataEntry = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_ME); + if (input[_N] != null) { + bn.c(import_xml_builder.XmlNode.of(_MKe, input[_N]).n(_N)); + } + if (input[_Va] != null) { + bn.c(import_xml_builder.XmlNode.of(_MV, input[_Va]).n(_Va)); + } + return bn; +}, "se_MetadataEntry"); +var se_MetadataTableConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_MTC); + if (input[_STD] != null) { + bn.c(se_S3TablesDestination(input[_STD], context).n(_STD)); + } + return bn; +}, "se_MetadataTableConfiguration"); +var se_Metrics = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_Me); + if (input[_S] != null) { + bn.c(import_xml_builder.XmlNode.of(_MS, input[_S]).n(_S)); + } + if (input[_ETv] != null) { + bn.c(se_ReplicationTimeValue(input[_ETv], context).n(_ETv)); + } + return bn; +}, "se_Metrics"); +var se_MetricsAndOperator = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_MAO); + bn.cc(input, _P); + bn.l(input, "Tags", "Tag", () => se_TagSet(input[_Tag], context)); + bn.cc(input, _APAc); + return bn; +}, "se_MetricsAndOperator"); +var se_MetricsConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_MC); + if (input[_I] != null) { + bn.c(import_xml_builder.XmlNode.of(_MI, input[_I]).n(_I)); + } + if (input[_F] != null) { + bn.c(se_MetricsFilter(input[_F], context).n(_F)); + } + return bn; +}, "se_MetricsConfiguration"); +var se_MetricsFilter = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_MF); + MetricsFilter.visit(input, { + Prefix: /* @__PURE__ */ __name((value) => { + if (input[_P] != null) { + bn.c(import_xml_builder.XmlNode.of(_P, value).n(_P)); + } + }, "Prefix"), + Tag: /* @__PURE__ */ __name((value) => { + if (input[_Ta] != null) { + bn.c(se_Tag(value, context).n(_Ta)); + } + }, "Tag"), + AccessPointArn: /* @__PURE__ */ __name((value) => { + if (input[_APAc] != null) { + bn.c(import_xml_builder.XmlNode.of(_APAc, value).n(_APAc)); + } + }, "AccessPointArn"), + And: /* @__PURE__ */ __name((value) => { + if (input[_A] != null) { + bn.c(se_MetricsAndOperator(value, context).n(_A)); + } + }, "And"), + _: /* @__PURE__ */ __name((name, value) => { + if (!(value instanceof import_xml_builder.XmlNode || value instanceof import_xml_builder.XmlText)) { + throw new Error("Unable to serialize unknown union members in XML."); + } + bn.c(new import_xml_builder.XmlNode(name).c(value)); + }, "_") + }); + return bn; +}, "se_MetricsFilter"); +var se_NoncurrentVersionExpiration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_NVE); + if (input[_ND] != null) { + bn.c(import_xml_builder.XmlNode.of(_Da, String(input[_ND])).n(_ND)); + } + if (input[_NNV] != null) { + bn.c(import_xml_builder.XmlNode.of(_VC, String(input[_NNV])).n(_NNV)); + } + return bn; +}, "se_NoncurrentVersionExpiration"); +var se_NoncurrentVersionTransition = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_NVTo); + if (input[_ND] != null) { + bn.c(import_xml_builder.XmlNode.of(_Da, String(input[_ND])).n(_ND)); + } + if (input[_SC] != null) { + bn.c(import_xml_builder.XmlNode.of(_TSC, input[_SC]).n(_SC)); + } + if (input[_NNV] != null) { + bn.c(import_xml_builder.XmlNode.of(_VC, String(input[_NNV])).n(_NNV)); + } + return bn; +}, "se_NoncurrentVersionTransition"); +var se_NoncurrentVersionTransitionList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = se_NoncurrentVersionTransition(entry, context); + return n.n(_me); + }); +}, "se_NoncurrentVersionTransitionList"); +var se_NotificationConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_NC); + bn.l(input, "TopicConfigurations", "TopicConfiguration", () => se_TopicConfigurationList(input[_TCop], context)); + bn.l(input, "QueueConfigurations", "QueueConfiguration", () => se_QueueConfigurationList(input[_QCu], context)); + bn.l( + input, + "LambdaFunctionConfigurations", + "CloudFunctionConfiguration", + () => se_LambdaFunctionConfigurationList(input[_LFC], context) + ); + if (input[_EBC] != null) { + bn.c(se_EventBridgeConfiguration(input[_EBC], context).n(_EBC)); + } + return bn; +}, "se_NotificationConfiguration"); +var se_NotificationConfigurationFilter = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_NCF); + if (input[_K] != null) { + bn.c(se_S3KeyFilter(input[_K], context).n(_SKe)); + } + return bn; +}, "se_NotificationConfigurationFilter"); +var se_ObjectIdentifier = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_OI); + if (input[_K] != null) { + bn.c(import_xml_builder.XmlNode.of(_OK, input[_K]).n(_K)); + } + if (input[_VI] != null) { + bn.c(import_xml_builder.XmlNode.of(_OVI, input[_VI]).n(_VI)); + } + bn.cc(input, _ETa); + if (input[_LMT] != null) { + bn.c(import_xml_builder.XmlNode.of(_LMT, (0, import_smithy_client.dateToUtcString)(input[_LMT]).toString()).n(_LMT)); + } + if (input[_Si] != null) { + bn.c(import_xml_builder.XmlNode.of(_Si, String(input[_Si])).n(_Si)); + } + return bn; +}, "se_ObjectIdentifier"); +var se_ObjectIdentifierList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = se_ObjectIdentifier(entry, context); + return n.n(_me); + }); +}, "se_ObjectIdentifierList"); +var se_ObjectLockConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_OLC); + bn.cc(input, _OLE); + if (input[_Ru] != null) { + bn.c(se_ObjectLockRule(input[_Ru], context).n(_Ru)); + } + return bn; +}, "se_ObjectLockConfiguration"); +var se_ObjectLockLegalHold = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_OLLH); + if (input[_S] != null) { + bn.c(import_xml_builder.XmlNode.of(_OLLHS, input[_S]).n(_S)); + } + return bn; +}, "se_ObjectLockLegalHold"); +var se_ObjectLockRetention = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_OLR); + if (input[_Mo] != null) { + bn.c(import_xml_builder.XmlNode.of(_OLRM, input[_Mo]).n(_Mo)); + } + if (input[_RUD] != null) { + bn.c(import_xml_builder.XmlNode.of(_Dat, (0, import_smithy_client.serializeDateTime)(input[_RUD]).toString()).n(_RUD)); + } + return bn; +}, "se_ObjectLockRetention"); +var se_ObjectLockRule = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_OLRb); + if (input[_DRe] != null) { + bn.c(se_DefaultRetention(input[_DRe], context).n(_DRe)); + } + return bn; +}, "se_ObjectLockRule"); +var se_OutputLocation = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_OL); + if (input[_S_] != null) { + bn.c(se_S3Location(input[_S_], context).n(_S_)); + } + return bn; +}, "se_OutputLocation"); +var se_OutputSerialization = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_OS); + if (input[_CSV] != null) { + bn.c(se_CSVOutput(input[_CSV], context).n(_CSV)); + } + if (input[_JSON] != null) { + bn.c(se_JSONOutput(input[_JSON], context).n(_JSON)); + } + return bn; +}, "se_OutputSerialization"); +var se_Owner = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_O); + bn.cc(input, _DN); + bn.cc(input, _ID_); + return bn; +}, "se_Owner"); +var se_OwnershipControls = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_OC); + bn.l(input, "Rules", "Rule", () => se_OwnershipControlsRules(input[_Rul], context)); + return bn; +}, "se_OwnershipControls"); +var se_OwnershipControlsRule = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_OCR); + bn.cc(input, _OO); + return bn; +}, "se_OwnershipControlsRule"); +var se_OwnershipControlsRules = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = se_OwnershipControlsRule(entry, context); + return n.n(_me); + }); +}, "se_OwnershipControlsRules"); +var se_ParquetInput = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_PI); + return bn; +}, "se_ParquetInput"); +var se_PartitionedPrefix = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_PP); + bn.cc(input, _PDS); + return bn; +}, "se_PartitionedPrefix"); +var se_PublicAccessBlockConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_PABC); + if (input[_BPA] != null) { + bn.c(import_xml_builder.XmlNode.of(_Se, String(input[_BPA])).n(_BPA)); + } + if (input[_IPA] != null) { + bn.c(import_xml_builder.XmlNode.of(_Se, String(input[_IPA])).n(_IPA)); + } + if (input[_BPP] != null) { + bn.c(import_xml_builder.XmlNode.of(_Se, String(input[_BPP])).n(_BPP)); + } + if (input[_RPB] != null) { + bn.c(import_xml_builder.XmlNode.of(_Se, String(input[_RPB])).n(_RPB)); + } + return bn; +}, "se_PublicAccessBlockConfiguration"); +var se_QueueConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_QC); + if (input[_I] != null) { + bn.c(import_xml_builder.XmlNode.of(_NI, input[_I]).n(_I)); + } + if (input[_QA] != null) { + bn.c(import_xml_builder.XmlNode.of(_QA, input[_QA]).n(_Qu)); + } + bn.l(input, "Events", "Event", () => se_EventList(input[_Eve], context)); + if (input[_F] != null) { + bn.c(se_NotificationConfigurationFilter(input[_F], context).n(_F)); + } + return bn; +}, "se_QueueConfiguration"); +var se_QueueConfigurationList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = se_QueueConfiguration(entry, context); + return n.n(_me); + }); +}, "se_QueueConfigurationList"); +var se_Redirect = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_Red); + bn.cc(input, _HN); + bn.cc(input, _HRC); + bn.cc(input, _Pr); + bn.cc(input, _RKPW); + bn.cc(input, _RKW); + return bn; +}, "se_Redirect"); +var se_RedirectAllRequestsTo = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_RART); + bn.cc(input, _HN); + bn.cc(input, _Pr); + return bn; +}, "se_RedirectAllRequestsTo"); +var se_ReplicaModifications = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_RM); + if (input[_S] != null) { + bn.c(import_xml_builder.XmlNode.of(_RMS, input[_S]).n(_S)); + } + return bn; +}, "se_ReplicaModifications"); +var se_ReplicationConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_RCe); + bn.cc(input, _Ro); + bn.l(input, "Rules", "Rule", () => se_ReplicationRules(input[_Rul], context)); + return bn; +}, "se_ReplicationConfiguration"); +var se_ReplicationRule = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_RRe); + bn.cc(input, _ID_); + if (input[_Pri] != null) { + bn.c(import_xml_builder.XmlNode.of(_Pri, String(input[_Pri])).n(_Pri)); + } + bn.cc(input, _P); + if (input[_F] != null) { + bn.c(se_ReplicationRuleFilter(input[_F], context).n(_F)); + } + if (input[_S] != null) { + bn.c(import_xml_builder.XmlNode.of(_RRS, input[_S]).n(_S)); + } + if (input[_SSC] != null) { + bn.c(se_SourceSelectionCriteria(input[_SSC], context).n(_SSC)); + } + if (input[_EOR] != null) { + bn.c(se_ExistingObjectReplication(input[_EOR], context).n(_EOR)); + } + if (input[_Des] != null) { + bn.c(se_Destination(input[_Des], context).n(_Des)); + } + if (input[_DMR] != null) { + bn.c(se_DeleteMarkerReplication(input[_DMR], context).n(_DMR)); + } + return bn; +}, "se_ReplicationRule"); +var se_ReplicationRuleAndOperator = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_RRAO); + bn.cc(input, _P); + bn.l(input, "Tags", "Tag", () => se_TagSet(input[_Tag], context)); + return bn; +}, "se_ReplicationRuleAndOperator"); +var se_ReplicationRuleFilter = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_RRF); + bn.cc(input, _P); + if (input[_Ta] != null) { + bn.c(se_Tag(input[_Ta], context).n(_Ta)); + } + if (input[_A] != null) { + bn.c(se_ReplicationRuleAndOperator(input[_A], context).n(_A)); + } + return bn; +}, "se_ReplicationRuleFilter"); +var se_ReplicationRules = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = se_ReplicationRule(entry, context); + return n.n(_me); + }); +}, "se_ReplicationRules"); +var se_ReplicationTime = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_RTe); + if (input[_S] != null) { + bn.c(import_xml_builder.XmlNode.of(_RTS, input[_S]).n(_S)); + } + if (input[_Tim] != null) { + bn.c(se_ReplicationTimeValue(input[_Tim], context).n(_Tim)); + } + return bn; +}, "se_ReplicationTime"); +var se_ReplicationTimeValue = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_RTV); + if (input[_Mi] != null) { + bn.c(import_xml_builder.XmlNode.of(_Mi, String(input[_Mi])).n(_Mi)); + } + return bn; +}, "se_ReplicationTimeValue"); +var se_RequestPaymentConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_RPC); + bn.cc(input, _Pa); + return bn; +}, "se_RequestPaymentConfiguration"); +var se_RequestProgress = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_RPe); + if (input[_Ena] != null) { + bn.c(import_xml_builder.XmlNode.of(_ERP, String(input[_Ena])).n(_Ena)); + } + return bn; +}, "se_RequestProgress"); +var se_RestoreRequest = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_RRes); + if (input[_Da] != null) { + bn.c(import_xml_builder.XmlNode.of(_Da, String(input[_Da])).n(_Da)); + } + if (input[_GJP] != null) { + bn.c(se_GlacierJobParameters(input[_GJP], context).n(_GJP)); + } + if (input[_Ty] != null) { + bn.c(import_xml_builder.XmlNode.of(_RRT, input[_Ty]).n(_Ty)); + } + bn.cc(input, _Ti); + bn.cc(input, _Desc); + if (input[_SP] != null) { + bn.c(se_SelectParameters(input[_SP], context).n(_SP)); + } + if (input[_OL] != null) { + bn.c(se_OutputLocation(input[_OL], context).n(_OL)); + } + return bn; +}, "se_RestoreRequest"); +var se_RoutingRule = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_RRou); + if (input[_Con] != null) { + bn.c(se_Condition(input[_Con], context).n(_Con)); + } + if (input[_Red] != null) { + bn.c(se_Redirect(input[_Red], context).n(_Red)); + } + return bn; +}, "se_RoutingRule"); +var se_RoutingRules = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = se_RoutingRule(entry, context); + return n.n(_RRou); + }); +}, "se_RoutingRules"); +var se_S3KeyFilter = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_SKF); + bn.l(input, "FilterRules", "FilterRule", () => se_FilterRuleList(input[_FRi], context)); + return bn; +}, "se_S3KeyFilter"); +var se_S3Location = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_SL); + bn.cc(input, _BN); + if (input[_P] != null) { + bn.c(import_xml_builder.XmlNode.of(_LP, input[_P]).n(_P)); + } + if (input[_En] != null) { + bn.c(se_Encryption(input[_En], context).n(_En)); + } + if (input[_CACL] != null) { + bn.c(import_xml_builder.XmlNode.of(_OCACL, input[_CACL]).n(_CACL)); + } + bn.lc(input, "AccessControlList", "AccessControlList", () => se_Grants(input[_ACLc], context)); + if (input[_T] != null) { + bn.c(se_Tagging(input[_T], context).n(_T)); + } + bn.lc(input, "UserMetadata", "UserMetadata", () => se_UserMetadata(input[_UM], context)); + bn.cc(input, _SC); + return bn; +}, "se_S3Location"); +var se_S3TablesDestination = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_STD); + if (input[_TBA] != null) { + bn.c(import_xml_builder.XmlNode.of(_STBA, input[_TBA]).n(_TBA)); + } + if (input[_TN] != null) { + bn.c(import_xml_builder.XmlNode.of(_STN, input[_TN]).n(_TN)); + } + return bn; +}, "se_S3TablesDestination"); +var se_ScanRange = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_SR); + if (input[_St] != null) { + bn.c(import_xml_builder.XmlNode.of(_St, String(input[_St])).n(_St)); + } + if (input[_End] != null) { + bn.c(import_xml_builder.XmlNode.of(_End, String(input[_End])).n(_End)); + } + return bn; +}, "se_ScanRange"); +var se_SelectParameters = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_SP); + if (input[_IS] != null) { + bn.c(se_InputSerialization(input[_IS], context).n(_IS)); + } + bn.cc(input, _ETx); + bn.cc(input, _Ex); + if (input[_OS] != null) { + bn.c(se_OutputSerialization(input[_OS], context).n(_OS)); + } + return bn; +}, "se_SelectParameters"); +var se_ServerSideEncryptionByDefault = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_SSEBD); + if (input[_SSEA] != null) { + bn.c(import_xml_builder.XmlNode.of(_SSE, input[_SSEA]).n(_SSEA)); + } + if (input[_KMSMKID] != null) { + bn.c(import_xml_builder.XmlNode.of(_SSEKMSKI, input[_KMSMKID]).n(_KMSMKID)); + } + return bn; +}, "se_ServerSideEncryptionByDefault"); +var se_ServerSideEncryptionConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_SSEC); + bn.l(input, "Rules", "Rule", () => se_ServerSideEncryptionRules(input[_Rul], context)); + return bn; +}, "se_ServerSideEncryptionConfiguration"); +var se_ServerSideEncryptionRule = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_SSER); + if (input[_ASSEBD] != null) { + bn.c(se_ServerSideEncryptionByDefault(input[_ASSEBD], context).n(_ASSEBD)); + } + if (input[_BKE] != null) { + bn.c(import_xml_builder.XmlNode.of(_BKE, String(input[_BKE])).n(_BKE)); + } + return bn; +}, "se_ServerSideEncryptionRule"); +var se_ServerSideEncryptionRules = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = se_ServerSideEncryptionRule(entry, context); + return n.n(_me); + }); +}, "se_ServerSideEncryptionRules"); +var se_SimplePrefix = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_SPi); + return bn; +}, "se_SimplePrefix"); +var se_SourceSelectionCriteria = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_SSC); + if (input[_SKEO] != null) { + bn.c(se_SseKmsEncryptedObjects(input[_SKEO], context).n(_SKEO)); + } + if (input[_RM] != null) { + bn.c(se_ReplicaModifications(input[_RM], context).n(_RM)); + } + return bn; +}, "se_SourceSelectionCriteria"); +var se_SSEKMS = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_SK); + if (input[_KI] != null) { + bn.c(import_xml_builder.XmlNode.of(_SSEKMSKI, input[_KI]).n(_KI)); + } + return bn; +}, "se_SSEKMS"); +var se_SseKmsEncryptedObjects = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_SKEO); + if (input[_S] != null) { + bn.c(import_xml_builder.XmlNode.of(_SKEOS, input[_S]).n(_S)); + } + return bn; +}, "se_SseKmsEncryptedObjects"); +var se_SSES3 = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_SS); + return bn; +}, "se_SSES3"); +var se_StorageClassAnalysis = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_SCA); + if (input[_DE] != null) { + bn.c(se_StorageClassAnalysisDataExport(input[_DE], context).n(_DE)); + } + return bn; +}, "se_StorageClassAnalysis"); +var se_StorageClassAnalysisDataExport = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_SCADE); + if (input[_OSV] != null) { + bn.c(import_xml_builder.XmlNode.of(_SCASV, input[_OSV]).n(_OSV)); + } + if (input[_Des] != null) { + bn.c(se_AnalyticsExportDestination(input[_Des], context).n(_Des)); + } + return bn; +}, "se_StorageClassAnalysisDataExport"); +var se_Tag = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_Ta); + if (input[_K] != null) { + bn.c(import_xml_builder.XmlNode.of(_OK, input[_K]).n(_K)); + } + bn.cc(input, _Va); + return bn; +}, "se_Tag"); +var se_Tagging = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_T); + bn.lc(input, "TagSet", "TagSet", () => se_TagSet(input[_TS], context)); + return bn; +}, "se_Tagging"); +var se_TagSet = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = se_Tag(entry, context); + return n.n(_Ta); + }); +}, "se_TagSet"); +var se_TargetGrant = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_TGa); + if (input[_Gra] != null) { + const n = se_Grantee(input[_Gra], context).n(_Gra); + n.a("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance"); + bn.c(n); + } + if (input[_Pe] != null) { + bn.c(import_xml_builder.XmlNode.of(_BLP, input[_Pe]).n(_Pe)); + } + return bn; +}, "se_TargetGrant"); +var se_TargetGrants = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = se_TargetGrant(entry, context); + return n.n(_G); + }); +}, "se_TargetGrants"); +var se_TargetObjectKeyFormat = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_TOKF); + if (input[_SPi] != null) { + bn.c(se_SimplePrefix(input[_SPi], context).n(_SPi)); + } + if (input[_PP] != null) { + bn.c(se_PartitionedPrefix(input[_PP], context).n(_PP)); + } + return bn; +}, "se_TargetObjectKeyFormat"); +var se_Tiering = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_Tier); + if (input[_Da] != null) { + bn.c(import_xml_builder.XmlNode.of(_ITD, String(input[_Da])).n(_Da)); + } + if (input[_AT] != null) { + bn.c(import_xml_builder.XmlNode.of(_ITAT, input[_AT]).n(_AT)); + } + return bn; +}, "se_Tiering"); +var se_TieringList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = se_Tiering(entry, context); + return n.n(_me); + }); +}, "se_TieringList"); +var se_TopicConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_TCo); + if (input[_I] != null) { + bn.c(import_xml_builder.XmlNode.of(_NI, input[_I]).n(_I)); + } + if (input[_TA] != null) { + bn.c(import_xml_builder.XmlNode.of(_TA, input[_TA]).n(_Top)); + } + bn.l(input, "Events", "Event", () => se_EventList(input[_Eve], context)); + if (input[_F] != null) { + bn.c(se_NotificationConfigurationFilter(input[_F], context).n(_F)); + } + return bn; +}, "se_TopicConfiguration"); +var se_TopicConfigurationList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = se_TopicConfiguration(entry, context); + return n.n(_me); + }); +}, "se_TopicConfigurationList"); +var se_Transition = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_Tra); + if (input[_Dat] != null) { + bn.c(import_xml_builder.XmlNode.of(_Dat, (0, import_smithy_client.serializeDateTime)(input[_Dat]).toString()).n(_Dat)); + } + if (input[_Da] != null) { + bn.c(import_xml_builder.XmlNode.of(_Da, String(input[_Da])).n(_Da)); + } + if (input[_SC] != null) { + bn.c(import_xml_builder.XmlNode.of(_TSC, input[_SC]).n(_SC)); + } + return bn; +}, "se_Transition"); +var se_TransitionList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = se_Transition(entry, context); + return n.n(_me); + }); +}, "se_TransitionList"); +var se_UserMetadata = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + const n = se_MetadataEntry(entry, context); + return n.n(_ME); + }); +}, "se_UserMetadata"); +var se_VersioningConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_VCe); + if (input[_MFAD] != null) { + bn.c(import_xml_builder.XmlNode.of(_MFAD, input[_MFAD]).n(_MDf)); + } + if (input[_S] != null) { + bn.c(import_xml_builder.XmlNode.of(_BVS, input[_S]).n(_S)); + } + return bn; +}, "se_VersioningConfiguration"); +var se_WebsiteConfiguration = /* @__PURE__ */ __name((input, context) => { + const bn = new import_xml_builder.XmlNode(_WC); + if (input[_ED] != null) { + bn.c(se_ErrorDocument(input[_ED], context).n(_ED)); + } + if (input[_ID] != null) { + bn.c(se_IndexDocument(input[_ID], context).n(_ID)); + } + if (input[_RART] != null) { + bn.c(se_RedirectAllRequestsTo(input[_RART], context).n(_RART)); + } + bn.lc(input, "RoutingRules", "RoutingRules", () => se_RoutingRules(input[_RRo], context)); + return bn; +}, "se_WebsiteConfiguration"); +var de_AbortIncompleteMultipartUpload = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_DAI] != null) { + contents[_DAI] = (0, import_smithy_client.strictParseInt32)(output[_DAI]); + } + return contents; +}, "de_AbortIncompleteMultipartUpload"); +var de_AccessControlTranslation = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_O] != null) { + contents[_O] = (0, import_smithy_client.expectString)(output[_O]); + } + return contents; +}, "de_AccessControlTranslation"); +var de_AllowedHeaders = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return (0, import_smithy_client.expectString)(entry); + }); +}, "de_AllowedHeaders"); +var de_AllowedMethods = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return (0, import_smithy_client.expectString)(entry); + }); +}, "de_AllowedMethods"); +var de_AllowedOrigins = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return (0, import_smithy_client.expectString)(entry); + }); +}, "de_AllowedOrigins"); +var de_AnalyticsAndOperator = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = (0, import_smithy_client.expectString)(output[_P]); + } + if (output.Tag === "") { + contents[_Tag] = []; + } else if (output[_Ta] != null) { + contents[_Tag] = de_TagSet((0, import_smithy_client.getArrayIfSingleItem)(output[_Ta]), context); + } + return contents; +}, "de_AnalyticsAndOperator"); +var de_AnalyticsConfiguration = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_I] != null) { + contents[_I] = (0, import_smithy_client.expectString)(output[_I]); + } + if (output.Filter === "") { + } else if (output[_F] != null) { + contents[_F] = de_AnalyticsFilter((0, import_smithy_client.expectUnion)(output[_F]), context); + } + if (output[_SCA] != null) { + contents[_SCA] = de_StorageClassAnalysis(output[_SCA], context); + } + return contents; +}, "de_AnalyticsConfiguration"); +var de_AnalyticsConfigurationList = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_AnalyticsConfiguration(entry, context); + }); +}, "de_AnalyticsConfigurationList"); +var de_AnalyticsExportDestination = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_SBD] != null) { + contents[_SBD] = de_AnalyticsS3BucketDestination(output[_SBD], context); + } + return contents; +}, "de_AnalyticsExportDestination"); +var de_AnalyticsFilter = /* @__PURE__ */ __name((output, context) => { + if (output[_P] != null) { + return { + Prefix: (0, import_smithy_client.expectString)(output[_P]) + }; + } + if (output[_Ta] != null) { + return { + Tag: de_Tag(output[_Ta], context) + }; + } + if (output[_A] != null) { + return { + And: de_AnalyticsAndOperator(output[_A], context) + }; + } + return { $unknown: Object.entries(output)[0] }; +}, "de_AnalyticsFilter"); +var de_AnalyticsS3BucketDestination = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_Fo] != null) { + contents[_Fo] = (0, import_smithy_client.expectString)(output[_Fo]); + } + if (output[_BAI] != null) { + contents[_BAI] = (0, import_smithy_client.expectString)(output[_BAI]); + } + if (output[_B] != null) { + contents[_B] = (0, import_smithy_client.expectString)(output[_B]); + } + if (output[_P] != null) { + contents[_P] = (0, import_smithy_client.expectString)(output[_P]); + } + return contents; +}, "de_AnalyticsS3BucketDestination"); +var de_Bucket = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_N] != null) { + contents[_N] = (0, import_smithy_client.expectString)(output[_N]); + } + if (output[_CDr] != null) { + contents[_CDr] = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc3339DateTimeWithOffset)(output[_CDr])); + } + if (output[_BR] != null) { + contents[_BR] = (0, import_smithy_client.expectString)(output[_BR]); + } + return contents; +}, "de_Bucket"); +var de_Buckets = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_Bucket(entry, context); + }); +}, "de_Buckets"); +var de_Checksum = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_CCRC] != null) { + contents[_CCRC] = (0, import_smithy_client.expectString)(output[_CCRC]); + } + if (output[_CCRCC] != null) { + contents[_CCRCC] = (0, import_smithy_client.expectString)(output[_CCRCC]); + } + if (output[_CCRCNVME] != null) { + contents[_CCRCNVME] = (0, import_smithy_client.expectString)(output[_CCRCNVME]); + } + if (output[_CSHA] != null) { + contents[_CSHA] = (0, import_smithy_client.expectString)(output[_CSHA]); + } + if (output[_CSHAh] != null) { + contents[_CSHAh] = (0, import_smithy_client.expectString)(output[_CSHAh]); + } + if (output[_CT] != null) { + contents[_CT] = (0, import_smithy_client.expectString)(output[_CT]); + } + return contents; +}, "de_Checksum"); +var de_ChecksumAlgorithmList = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return (0, import_smithy_client.expectString)(entry); + }); +}, "de_ChecksumAlgorithmList"); +var de_CommonPrefix = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = (0, import_smithy_client.expectString)(output[_P]); + } + return contents; +}, "de_CommonPrefix"); +var de_CommonPrefixList = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_CommonPrefix(entry, context); + }); +}, "de_CommonPrefixList"); +var de_Condition = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_HECRE] != null) { + contents[_HECRE] = (0, import_smithy_client.expectString)(output[_HECRE]); + } + if (output[_KPE] != null) { + contents[_KPE] = (0, import_smithy_client.expectString)(output[_KPE]); + } + return contents; +}, "de_Condition"); +var de_ContinuationEvent = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + return contents; +}, "de_ContinuationEvent"); +var de_CopyObjectResult = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_ETa] != null) { + contents[_ETa] = (0, import_smithy_client.expectString)(output[_ETa]); + } + if (output[_LM] != null) { + contents[_LM] = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc3339DateTimeWithOffset)(output[_LM])); + } + if (output[_CT] != null) { + contents[_CT] = (0, import_smithy_client.expectString)(output[_CT]); + } + if (output[_CCRC] != null) { + contents[_CCRC] = (0, import_smithy_client.expectString)(output[_CCRC]); + } + if (output[_CCRCC] != null) { + contents[_CCRCC] = (0, import_smithy_client.expectString)(output[_CCRCC]); + } + if (output[_CCRCNVME] != null) { + contents[_CCRCNVME] = (0, import_smithy_client.expectString)(output[_CCRCNVME]); + } + if (output[_CSHA] != null) { + contents[_CSHA] = (0, import_smithy_client.expectString)(output[_CSHA]); + } + if (output[_CSHAh] != null) { + contents[_CSHAh] = (0, import_smithy_client.expectString)(output[_CSHAh]); + } + return contents; +}, "de_CopyObjectResult"); +var de_CopyPartResult = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_ETa] != null) { + contents[_ETa] = (0, import_smithy_client.expectString)(output[_ETa]); + } + if (output[_LM] != null) { + contents[_LM] = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc3339DateTimeWithOffset)(output[_LM])); + } + if (output[_CCRC] != null) { + contents[_CCRC] = (0, import_smithy_client.expectString)(output[_CCRC]); + } + if (output[_CCRCC] != null) { + contents[_CCRCC] = (0, import_smithy_client.expectString)(output[_CCRCC]); + } + if (output[_CCRCNVME] != null) { + contents[_CCRCNVME] = (0, import_smithy_client.expectString)(output[_CCRCNVME]); + } + if (output[_CSHA] != null) { + contents[_CSHA] = (0, import_smithy_client.expectString)(output[_CSHA]); + } + if (output[_CSHAh] != null) { + contents[_CSHAh] = (0, import_smithy_client.expectString)(output[_CSHAh]); + } + return contents; +}, "de_CopyPartResult"); +var de_CORSRule = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_ID_] != null) { + contents[_ID_] = (0, import_smithy_client.expectString)(output[_ID_]); + } + if (output.AllowedHeader === "") { + contents[_AHl] = []; + } else if (output[_AH] != null) { + contents[_AHl] = de_AllowedHeaders((0, import_smithy_client.getArrayIfSingleItem)(output[_AH]), context); + } + if (output.AllowedMethod === "") { + contents[_AMl] = []; + } else if (output[_AM] != null) { + contents[_AMl] = de_AllowedMethods((0, import_smithy_client.getArrayIfSingleItem)(output[_AM]), context); + } + if (output.AllowedOrigin === "") { + contents[_AOl] = []; + } else if (output[_AO] != null) { + contents[_AOl] = de_AllowedOrigins((0, import_smithy_client.getArrayIfSingleItem)(output[_AO]), context); + } + if (output.ExposeHeader === "") { + contents[_EH] = []; + } else if (output[_EHx] != null) { + contents[_EH] = de_ExposeHeaders((0, import_smithy_client.getArrayIfSingleItem)(output[_EHx]), context); + } + if (output[_MAS] != null) { + contents[_MAS] = (0, import_smithy_client.strictParseInt32)(output[_MAS]); + } + return contents; +}, "de_CORSRule"); +var de_CORSRules = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_CORSRule(entry, context); + }); +}, "de_CORSRules"); +var de_DefaultRetention = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_Mo] != null) { + contents[_Mo] = (0, import_smithy_client.expectString)(output[_Mo]); + } + if (output[_Da] != null) { + contents[_Da] = (0, import_smithy_client.strictParseInt32)(output[_Da]); + } + if (output[_Y] != null) { + contents[_Y] = (0, import_smithy_client.strictParseInt32)(output[_Y]); + } + return contents; +}, "de_DefaultRetention"); +var de_DeletedObject = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_K] != null) { + contents[_K] = (0, import_smithy_client.expectString)(output[_K]); + } + if (output[_VI] != null) { + contents[_VI] = (0, import_smithy_client.expectString)(output[_VI]); + } + if (output[_DM] != null) { + contents[_DM] = (0, import_smithy_client.parseBoolean)(output[_DM]); + } + if (output[_DMVI] != null) { + contents[_DMVI] = (0, import_smithy_client.expectString)(output[_DMVI]); + } + return contents; +}, "de_DeletedObject"); +var de_DeletedObjects = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_DeletedObject(entry, context); + }); +}, "de_DeletedObjects"); +var de_DeleteMarkerEntry = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_O] != null) { + contents[_O] = de_Owner(output[_O], context); + } + if (output[_K] != null) { + contents[_K] = (0, import_smithy_client.expectString)(output[_K]); + } + if (output[_VI] != null) { + contents[_VI] = (0, import_smithy_client.expectString)(output[_VI]); + } + if (output[_IL] != null) { + contents[_IL] = (0, import_smithy_client.parseBoolean)(output[_IL]); + } + if (output[_LM] != null) { + contents[_LM] = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc3339DateTimeWithOffset)(output[_LM])); + } + return contents; +}, "de_DeleteMarkerEntry"); +var de_DeleteMarkerReplication = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_S] != null) { + contents[_S] = (0, import_smithy_client.expectString)(output[_S]); + } + return contents; +}, "de_DeleteMarkerReplication"); +var de_DeleteMarkers = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_DeleteMarkerEntry(entry, context); + }); +}, "de_DeleteMarkers"); +var de_Destination = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_B] != null) { + contents[_B] = (0, import_smithy_client.expectString)(output[_B]); + } + if (output[_Ac] != null) { + contents[_Ac] = (0, import_smithy_client.expectString)(output[_Ac]); + } + if (output[_SC] != null) { + contents[_SC] = (0, import_smithy_client.expectString)(output[_SC]); + } + if (output[_ACT] != null) { + contents[_ACT] = de_AccessControlTranslation(output[_ACT], context); + } + if (output[_ECn] != null) { + contents[_ECn] = de_EncryptionConfiguration(output[_ECn], context); + } + if (output[_RTe] != null) { + contents[_RTe] = de_ReplicationTime(output[_RTe], context); + } + if (output[_Me] != null) { + contents[_Me] = de_Metrics(output[_Me], context); + } + return contents; +}, "de_Destination"); +var de_EncryptionConfiguration = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_RKKID] != null) { + contents[_RKKID] = (0, import_smithy_client.expectString)(output[_RKKID]); + } + return contents; +}, "de_EncryptionConfiguration"); +var de_EndEvent = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + return contents; +}, "de_EndEvent"); +var de__Error = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_K] != null) { + contents[_K] = (0, import_smithy_client.expectString)(output[_K]); + } + if (output[_VI] != null) { + contents[_VI] = (0, import_smithy_client.expectString)(output[_VI]); + } + if (output[_Cod] != null) { + contents[_Cod] = (0, import_smithy_client.expectString)(output[_Cod]); + } + if (output[_Mes] != null) { + contents[_Mes] = (0, import_smithy_client.expectString)(output[_Mes]); + } + return contents; +}, "de__Error"); +var de_ErrorDetails = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_EC] != null) { + contents[_EC] = (0, import_smithy_client.expectString)(output[_EC]); + } + if (output[_EM] != null) { + contents[_EM] = (0, import_smithy_client.expectString)(output[_EM]); + } + return contents; +}, "de_ErrorDetails"); +var de_ErrorDocument = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_K] != null) { + contents[_K] = (0, import_smithy_client.expectString)(output[_K]); + } + return contents; +}, "de_ErrorDocument"); +var de_Errors = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de__Error(entry, context); + }); +}, "de_Errors"); +var de_EventBridgeConfiguration = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + return contents; +}, "de_EventBridgeConfiguration"); +var de_EventList = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return (0, import_smithy_client.expectString)(entry); + }); +}, "de_EventList"); +var de_ExistingObjectReplication = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_S] != null) { + contents[_S] = (0, import_smithy_client.expectString)(output[_S]); + } + return contents; +}, "de_ExistingObjectReplication"); +var de_ExposeHeaders = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return (0, import_smithy_client.expectString)(entry); + }); +}, "de_ExposeHeaders"); +var de_FilterRule = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_N] != null) { + contents[_N] = (0, import_smithy_client.expectString)(output[_N]); + } + if (output[_Va] != null) { + contents[_Va] = (0, import_smithy_client.expectString)(output[_Va]); + } + return contents; +}, "de_FilterRule"); +var de_FilterRuleList = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_FilterRule(entry, context); + }); +}, "de_FilterRuleList"); +var de_GetBucketMetadataTableConfigurationResult = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_MTCR] != null) { + contents[_MTCR] = de_MetadataTableConfigurationResult(output[_MTCR], context); + } + if (output[_S] != null) { + contents[_S] = (0, import_smithy_client.expectString)(output[_S]); + } + if (output[_Er] != null) { + contents[_Er] = de_ErrorDetails(output[_Er], context); + } + return contents; +}, "de_GetBucketMetadataTableConfigurationResult"); +var de_GetObjectAttributesParts = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_PC] != null) { + contents[_TPC] = (0, import_smithy_client.strictParseInt32)(output[_PC]); + } + if (output[_PNM] != null) { + contents[_PNM] = (0, import_smithy_client.expectString)(output[_PNM]); + } + if (output[_NPNM] != null) { + contents[_NPNM] = (0, import_smithy_client.expectString)(output[_NPNM]); + } + if (output[_MP] != null) { + contents[_MP] = (0, import_smithy_client.strictParseInt32)(output[_MP]); + } + if (output[_IT] != null) { + contents[_IT] = (0, import_smithy_client.parseBoolean)(output[_IT]); + } + if (output.Part === "") { + contents[_Part] = []; + } else if (output[_Par] != null) { + contents[_Part] = de_PartsList((0, import_smithy_client.getArrayIfSingleItem)(output[_Par]), context); + } + return contents; +}, "de_GetObjectAttributesParts"); +var de_Grant = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_Gra] != null) { + contents[_Gra] = de_Grantee(output[_Gra], context); + } + if (output[_Pe] != null) { + contents[_Pe] = (0, import_smithy_client.expectString)(output[_Pe]); + } + return contents; +}, "de_Grant"); +var de_Grantee = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_DN] != null) { + contents[_DN] = (0, import_smithy_client.expectString)(output[_DN]); + } + if (output[_EA] != null) { + contents[_EA] = (0, import_smithy_client.expectString)(output[_EA]); + } + if (output[_ID_] != null) { + contents[_ID_] = (0, import_smithy_client.expectString)(output[_ID_]); + } + if (output[_URI] != null) { + contents[_URI] = (0, import_smithy_client.expectString)(output[_URI]); + } + if (output[_x] != null) { + contents[_Ty] = (0, import_smithy_client.expectString)(output[_x]); + } + return contents; +}, "de_Grantee"); +var de_Grants = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_Grant(entry, context); + }); +}, "de_Grants"); +var de_IndexDocument = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_Su] != null) { + contents[_Su] = (0, import_smithy_client.expectString)(output[_Su]); + } + return contents; +}, "de_IndexDocument"); +var de_Initiator = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_ID_] != null) { + contents[_ID_] = (0, import_smithy_client.expectString)(output[_ID_]); + } + if (output[_DN] != null) { + contents[_DN] = (0, import_smithy_client.expectString)(output[_DN]); + } + return contents; +}, "de_Initiator"); +var de_IntelligentTieringAndOperator = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = (0, import_smithy_client.expectString)(output[_P]); + } + if (output.Tag === "") { + contents[_Tag] = []; + } else if (output[_Ta] != null) { + contents[_Tag] = de_TagSet((0, import_smithy_client.getArrayIfSingleItem)(output[_Ta]), context); + } + return contents; +}, "de_IntelligentTieringAndOperator"); +var de_IntelligentTieringConfiguration = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_I] != null) { + contents[_I] = (0, import_smithy_client.expectString)(output[_I]); + } + if (output[_F] != null) { + contents[_F] = de_IntelligentTieringFilter(output[_F], context); + } + if (output[_S] != null) { + contents[_S] = (0, import_smithy_client.expectString)(output[_S]); + } + if (output.Tiering === "") { + contents[_Tie] = []; + } else if (output[_Tier] != null) { + contents[_Tie] = de_TieringList((0, import_smithy_client.getArrayIfSingleItem)(output[_Tier]), context); + } + return contents; +}, "de_IntelligentTieringConfiguration"); +var de_IntelligentTieringConfigurationList = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_IntelligentTieringConfiguration(entry, context); + }); +}, "de_IntelligentTieringConfigurationList"); +var de_IntelligentTieringFilter = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = (0, import_smithy_client.expectString)(output[_P]); + } + if (output[_Ta] != null) { + contents[_Ta] = de_Tag(output[_Ta], context); + } + if (output[_A] != null) { + contents[_A] = de_IntelligentTieringAndOperator(output[_A], context); + } + return contents; +}, "de_IntelligentTieringFilter"); +var de_InventoryConfiguration = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_Des] != null) { + contents[_Des] = de_InventoryDestination(output[_Des], context); + } + if (output[_IE] != null) { + contents[_IE] = (0, import_smithy_client.parseBoolean)(output[_IE]); + } + if (output[_F] != null) { + contents[_F] = de_InventoryFilter(output[_F], context); + } + if (output[_I] != null) { + contents[_I] = (0, import_smithy_client.expectString)(output[_I]); + } + if (output[_IOV] != null) { + contents[_IOV] = (0, import_smithy_client.expectString)(output[_IOV]); + } + if (output.OptionalFields === "") { + contents[_OF] = []; + } else if (output[_OF] != null && output[_OF][_Fi] != null) { + contents[_OF] = de_InventoryOptionalFields((0, import_smithy_client.getArrayIfSingleItem)(output[_OF][_Fi]), context); + } + if (output[_Sc] != null) { + contents[_Sc] = de_InventorySchedule(output[_Sc], context); + } + return contents; +}, "de_InventoryConfiguration"); +var de_InventoryConfigurationList = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_InventoryConfiguration(entry, context); + }); +}, "de_InventoryConfigurationList"); +var de_InventoryDestination = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_SBD] != null) { + contents[_SBD] = de_InventoryS3BucketDestination(output[_SBD], context); + } + return contents; +}, "de_InventoryDestination"); +var de_InventoryEncryption = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_SS] != null) { + contents[_SSES] = de_SSES3(output[_SS], context); + } + if (output[_SK] != null) { + contents[_SSEKMS] = de_SSEKMS(output[_SK], context); + } + return contents; +}, "de_InventoryEncryption"); +var de_InventoryFilter = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = (0, import_smithy_client.expectString)(output[_P]); + } + return contents; +}, "de_InventoryFilter"); +var de_InventoryOptionalFields = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return (0, import_smithy_client.expectString)(entry); + }); +}, "de_InventoryOptionalFields"); +var de_InventoryS3BucketDestination = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_AIc] != null) { + contents[_AIc] = (0, import_smithy_client.expectString)(output[_AIc]); + } + if (output[_B] != null) { + contents[_B] = (0, import_smithy_client.expectString)(output[_B]); + } + if (output[_Fo] != null) { + contents[_Fo] = (0, import_smithy_client.expectString)(output[_Fo]); + } + if (output[_P] != null) { + contents[_P] = (0, import_smithy_client.expectString)(output[_P]); + } + if (output[_En] != null) { + contents[_En] = de_InventoryEncryption(output[_En], context); + } + return contents; +}, "de_InventoryS3BucketDestination"); +var de_InventorySchedule = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_Fr] != null) { + contents[_Fr] = (0, import_smithy_client.expectString)(output[_Fr]); + } + return contents; +}, "de_InventorySchedule"); +var de_LambdaFunctionConfiguration = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_I] != null) { + contents[_I] = (0, import_smithy_client.expectString)(output[_I]); + } + if (output[_CF] != null) { + contents[_LFA] = (0, import_smithy_client.expectString)(output[_CF]); + } + if (output.Event === "") { + contents[_Eve] = []; + } else if (output[_Ev] != null) { + contents[_Eve] = de_EventList((0, import_smithy_client.getArrayIfSingleItem)(output[_Ev]), context); + } + if (output[_F] != null) { + contents[_F] = de_NotificationConfigurationFilter(output[_F], context); + } + return contents; +}, "de_LambdaFunctionConfiguration"); +var de_LambdaFunctionConfigurationList = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_LambdaFunctionConfiguration(entry, context); + }); +}, "de_LambdaFunctionConfigurationList"); +var de_LifecycleExpiration = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_Dat] != null) { + contents[_Dat] = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc3339DateTimeWithOffset)(output[_Dat])); + } + if (output[_Da] != null) { + contents[_Da] = (0, import_smithy_client.strictParseInt32)(output[_Da]); + } + if (output[_EODM] != null) { + contents[_EODM] = (0, import_smithy_client.parseBoolean)(output[_EODM]); + } + return contents; +}, "de_LifecycleExpiration"); +var de_LifecycleRule = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_Exp] != null) { + contents[_Exp] = de_LifecycleExpiration(output[_Exp], context); + } + if (output[_ID_] != null) { + contents[_ID_] = (0, import_smithy_client.expectString)(output[_ID_]); + } + if (output[_P] != null) { + contents[_P] = (0, import_smithy_client.expectString)(output[_P]); + } + if (output[_F] != null) { + contents[_F] = de_LifecycleRuleFilter(output[_F], context); + } + if (output[_S] != null) { + contents[_S] = (0, import_smithy_client.expectString)(output[_S]); + } + if (output.Transition === "") { + contents[_Tr] = []; + } else if (output[_Tra] != null) { + contents[_Tr] = de_TransitionList((0, import_smithy_client.getArrayIfSingleItem)(output[_Tra]), context); + } + if (output.NoncurrentVersionTransition === "") { + contents[_NVT] = []; + } else if (output[_NVTo] != null) { + contents[_NVT] = de_NoncurrentVersionTransitionList((0, import_smithy_client.getArrayIfSingleItem)(output[_NVTo]), context); + } + if (output[_NVE] != null) { + contents[_NVE] = de_NoncurrentVersionExpiration(output[_NVE], context); + } + if (output[_AIMU] != null) { + contents[_AIMU] = de_AbortIncompleteMultipartUpload(output[_AIMU], context); + } + return contents; +}, "de_LifecycleRule"); +var de_LifecycleRuleAndOperator = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = (0, import_smithy_client.expectString)(output[_P]); + } + if (output.Tag === "") { + contents[_Tag] = []; + } else if (output[_Ta] != null) { + contents[_Tag] = de_TagSet((0, import_smithy_client.getArrayIfSingleItem)(output[_Ta]), context); + } + if (output[_OSGT] != null) { + contents[_OSGT] = (0, import_smithy_client.strictParseLong)(output[_OSGT]); + } + if (output[_OSLT] != null) { + contents[_OSLT] = (0, import_smithy_client.strictParseLong)(output[_OSLT]); + } + return contents; +}, "de_LifecycleRuleAndOperator"); +var de_LifecycleRuleFilter = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = (0, import_smithy_client.expectString)(output[_P]); + } + if (output[_Ta] != null) { + contents[_Ta] = de_Tag(output[_Ta], context); + } + if (output[_OSGT] != null) { + contents[_OSGT] = (0, import_smithy_client.strictParseLong)(output[_OSGT]); + } + if (output[_OSLT] != null) { + contents[_OSLT] = (0, import_smithy_client.strictParseLong)(output[_OSLT]); + } + if (output[_A] != null) { + contents[_A] = de_LifecycleRuleAndOperator(output[_A], context); + } + return contents; +}, "de_LifecycleRuleFilter"); +var de_LifecycleRules = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_LifecycleRule(entry, context); + }); +}, "de_LifecycleRules"); +var de_LoggingEnabled = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_TB] != null) { + contents[_TB] = (0, import_smithy_client.expectString)(output[_TB]); + } + if (output.TargetGrants === "") { + contents[_TG] = []; + } else if (output[_TG] != null && output[_TG][_G] != null) { + contents[_TG] = de_TargetGrants((0, import_smithy_client.getArrayIfSingleItem)(output[_TG][_G]), context); + } + if (output[_TP] != null) { + contents[_TP] = (0, import_smithy_client.expectString)(output[_TP]); + } + if (output[_TOKF] != null) { + contents[_TOKF] = de_TargetObjectKeyFormat(output[_TOKF], context); + } + return contents; +}, "de_LoggingEnabled"); +var de_MetadataTableConfigurationResult = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_STDR] != null) { + contents[_STDR] = de_S3TablesDestinationResult(output[_STDR], context); + } + return contents; +}, "de_MetadataTableConfigurationResult"); +var de_Metrics = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_S] != null) { + contents[_S] = (0, import_smithy_client.expectString)(output[_S]); + } + if (output[_ETv] != null) { + contents[_ETv] = de_ReplicationTimeValue(output[_ETv], context); + } + return contents; +}, "de_Metrics"); +var de_MetricsAndOperator = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = (0, import_smithy_client.expectString)(output[_P]); + } + if (output.Tag === "") { + contents[_Tag] = []; + } else if (output[_Ta] != null) { + contents[_Tag] = de_TagSet((0, import_smithy_client.getArrayIfSingleItem)(output[_Ta]), context); + } + if (output[_APAc] != null) { + contents[_APAc] = (0, import_smithy_client.expectString)(output[_APAc]); + } + return contents; +}, "de_MetricsAndOperator"); +var de_MetricsConfiguration = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_I] != null) { + contents[_I] = (0, import_smithy_client.expectString)(output[_I]); + } + if (output.Filter === "") { + } else if (output[_F] != null) { + contents[_F] = de_MetricsFilter((0, import_smithy_client.expectUnion)(output[_F]), context); + } + return contents; +}, "de_MetricsConfiguration"); +var de_MetricsConfigurationList = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_MetricsConfiguration(entry, context); + }); +}, "de_MetricsConfigurationList"); +var de_MetricsFilter = /* @__PURE__ */ __name((output, context) => { + if (output[_P] != null) { + return { + Prefix: (0, import_smithy_client.expectString)(output[_P]) + }; + } + if (output[_Ta] != null) { + return { + Tag: de_Tag(output[_Ta], context) + }; + } + if (output[_APAc] != null) { + return { + AccessPointArn: (0, import_smithy_client.expectString)(output[_APAc]) + }; + } + if (output[_A] != null) { + return { + And: de_MetricsAndOperator(output[_A], context) + }; + } + return { $unknown: Object.entries(output)[0] }; +}, "de_MetricsFilter"); +var de_MultipartUpload = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_UI] != null) { + contents[_UI] = (0, import_smithy_client.expectString)(output[_UI]); + } + if (output[_K] != null) { + contents[_K] = (0, import_smithy_client.expectString)(output[_K]); + } + if (output[_Ini] != null) { + contents[_Ini] = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc3339DateTimeWithOffset)(output[_Ini])); + } + if (output[_SC] != null) { + contents[_SC] = (0, import_smithy_client.expectString)(output[_SC]); + } + if (output[_O] != null) { + contents[_O] = de_Owner(output[_O], context); + } + if (output[_In] != null) { + contents[_In] = de_Initiator(output[_In], context); + } + if (output[_CA] != null) { + contents[_CA] = (0, import_smithy_client.expectString)(output[_CA]); + } + if (output[_CT] != null) { + contents[_CT] = (0, import_smithy_client.expectString)(output[_CT]); + } + return contents; +}, "de_MultipartUpload"); +var de_MultipartUploadList = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_MultipartUpload(entry, context); + }); +}, "de_MultipartUploadList"); +var de_NoncurrentVersionExpiration = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_ND] != null) { + contents[_ND] = (0, import_smithy_client.strictParseInt32)(output[_ND]); + } + if (output[_NNV] != null) { + contents[_NNV] = (0, import_smithy_client.strictParseInt32)(output[_NNV]); + } + return contents; +}, "de_NoncurrentVersionExpiration"); +var de_NoncurrentVersionTransition = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_ND] != null) { + contents[_ND] = (0, import_smithy_client.strictParseInt32)(output[_ND]); + } + if (output[_SC] != null) { + contents[_SC] = (0, import_smithy_client.expectString)(output[_SC]); + } + if (output[_NNV] != null) { + contents[_NNV] = (0, import_smithy_client.strictParseInt32)(output[_NNV]); + } + return contents; +}, "de_NoncurrentVersionTransition"); +var de_NoncurrentVersionTransitionList = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_NoncurrentVersionTransition(entry, context); + }); +}, "de_NoncurrentVersionTransitionList"); +var de_NotificationConfigurationFilter = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_SKe] != null) { + contents[_K] = de_S3KeyFilter(output[_SKe], context); + } + return contents; +}, "de_NotificationConfigurationFilter"); +var de__Object = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_K] != null) { + contents[_K] = (0, import_smithy_client.expectString)(output[_K]); + } + if (output[_LM] != null) { + contents[_LM] = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc3339DateTimeWithOffset)(output[_LM])); + } + if (output[_ETa] != null) { + contents[_ETa] = (0, import_smithy_client.expectString)(output[_ETa]); + } + if (output.ChecksumAlgorithm === "") { + contents[_CA] = []; + } else if (output[_CA] != null) { + contents[_CA] = de_ChecksumAlgorithmList((0, import_smithy_client.getArrayIfSingleItem)(output[_CA]), context); + } + if (output[_CT] != null) { + contents[_CT] = (0, import_smithy_client.expectString)(output[_CT]); + } + if (output[_Si] != null) { + contents[_Si] = (0, import_smithy_client.strictParseLong)(output[_Si]); + } + if (output[_SC] != null) { + contents[_SC] = (0, import_smithy_client.expectString)(output[_SC]); + } + if (output[_O] != null) { + contents[_O] = de_Owner(output[_O], context); + } + if (output[_RSe] != null) { + contents[_RSe] = de_RestoreStatus(output[_RSe], context); + } + return contents; +}, "de__Object"); +var de_ObjectList = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de__Object(entry, context); + }); +}, "de_ObjectList"); +var de_ObjectLockConfiguration = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_OLE] != null) { + contents[_OLE] = (0, import_smithy_client.expectString)(output[_OLE]); + } + if (output[_Ru] != null) { + contents[_Ru] = de_ObjectLockRule(output[_Ru], context); + } + return contents; +}, "de_ObjectLockConfiguration"); +var de_ObjectLockLegalHold = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_S] != null) { + contents[_S] = (0, import_smithy_client.expectString)(output[_S]); + } + return contents; +}, "de_ObjectLockLegalHold"); +var de_ObjectLockRetention = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_Mo] != null) { + contents[_Mo] = (0, import_smithy_client.expectString)(output[_Mo]); + } + if (output[_RUD] != null) { + contents[_RUD] = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc3339DateTimeWithOffset)(output[_RUD])); + } + return contents; +}, "de_ObjectLockRetention"); +var de_ObjectLockRule = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_DRe] != null) { + contents[_DRe] = de_DefaultRetention(output[_DRe], context); + } + return contents; +}, "de_ObjectLockRule"); +var de_ObjectPart = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_PN] != null) { + contents[_PN] = (0, import_smithy_client.strictParseInt32)(output[_PN]); + } + if (output[_Si] != null) { + contents[_Si] = (0, import_smithy_client.strictParseLong)(output[_Si]); + } + if (output[_CCRC] != null) { + contents[_CCRC] = (0, import_smithy_client.expectString)(output[_CCRC]); + } + if (output[_CCRCC] != null) { + contents[_CCRCC] = (0, import_smithy_client.expectString)(output[_CCRCC]); + } + if (output[_CCRCNVME] != null) { + contents[_CCRCNVME] = (0, import_smithy_client.expectString)(output[_CCRCNVME]); + } + if (output[_CSHA] != null) { + contents[_CSHA] = (0, import_smithy_client.expectString)(output[_CSHA]); + } + if (output[_CSHAh] != null) { + contents[_CSHAh] = (0, import_smithy_client.expectString)(output[_CSHAh]); + } + return contents; +}, "de_ObjectPart"); +var de_ObjectVersion = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_ETa] != null) { + contents[_ETa] = (0, import_smithy_client.expectString)(output[_ETa]); + } + if (output.ChecksumAlgorithm === "") { + contents[_CA] = []; + } else if (output[_CA] != null) { + contents[_CA] = de_ChecksumAlgorithmList((0, import_smithy_client.getArrayIfSingleItem)(output[_CA]), context); + } + if (output[_CT] != null) { + contents[_CT] = (0, import_smithy_client.expectString)(output[_CT]); + } + if (output[_Si] != null) { + contents[_Si] = (0, import_smithy_client.strictParseLong)(output[_Si]); + } + if (output[_SC] != null) { + contents[_SC] = (0, import_smithy_client.expectString)(output[_SC]); + } + if (output[_K] != null) { + contents[_K] = (0, import_smithy_client.expectString)(output[_K]); + } + if (output[_VI] != null) { + contents[_VI] = (0, import_smithy_client.expectString)(output[_VI]); + } + if (output[_IL] != null) { + contents[_IL] = (0, import_smithy_client.parseBoolean)(output[_IL]); + } + if (output[_LM] != null) { + contents[_LM] = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc3339DateTimeWithOffset)(output[_LM])); + } + if (output[_O] != null) { + contents[_O] = de_Owner(output[_O], context); + } + if (output[_RSe] != null) { + contents[_RSe] = de_RestoreStatus(output[_RSe], context); + } + return contents; +}, "de_ObjectVersion"); +var de_ObjectVersionList = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_ObjectVersion(entry, context); + }); +}, "de_ObjectVersionList"); +var de_Owner = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_DN] != null) { + contents[_DN] = (0, import_smithy_client.expectString)(output[_DN]); + } + if (output[_ID_] != null) { + contents[_ID_] = (0, import_smithy_client.expectString)(output[_ID_]); + } + return contents; +}, "de_Owner"); +var de_OwnershipControls = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output.Rule === "") { + contents[_Rul] = []; + } else if (output[_Ru] != null) { + contents[_Rul] = de_OwnershipControlsRules((0, import_smithy_client.getArrayIfSingleItem)(output[_Ru]), context); + } + return contents; +}, "de_OwnershipControls"); +var de_OwnershipControlsRule = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_OO] != null) { + contents[_OO] = (0, import_smithy_client.expectString)(output[_OO]); + } + return contents; +}, "de_OwnershipControlsRule"); +var de_OwnershipControlsRules = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_OwnershipControlsRule(entry, context); + }); +}, "de_OwnershipControlsRules"); +var de_Part = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_PN] != null) { + contents[_PN] = (0, import_smithy_client.strictParseInt32)(output[_PN]); + } + if (output[_LM] != null) { + contents[_LM] = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc3339DateTimeWithOffset)(output[_LM])); + } + if (output[_ETa] != null) { + contents[_ETa] = (0, import_smithy_client.expectString)(output[_ETa]); + } + if (output[_Si] != null) { + contents[_Si] = (0, import_smithy_client.strictParseLong)(output[_Si]); + } + if (output[_CCRC] != null) { + contents[_CCRC] = (0, import_smithy_client.expectString)(output[_CCRC]); + } + if (output[_CCRCC] != null) { + contents[_CCRCC] = (0, import_smithy_client.expectString)(output[_CCRCC]); + } + if (output[_CCRCNVME] != null) { + contents[_CCRCNVME] = (0, import_smithy_client.expectString)(output[_CCRCNVME]); + } + if (output[_CSHA] != null) { + contents[_CSHA] = (0, import_smithy_client.expectString)(output[_CSHA]); + } + if (output[_CSHAh] != null) { + contents[_CSHAh] = (0, import_smithy_client.expectString)(output[_CSHAh]); + } + return contents; +}, "de_Part"); +var de_PartitionedPrefix = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_PDS] != null) { + contents[_PDS] = (0, import_smithy_client.expectString)(output[_PDS]); + } + return contents; +}, "de_PartitionedPrefix"); +var de_Parts = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_Part(entry, context); + }); +}, "de_Parts"); +var de_PartsList = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_ObjectPart(entry, context); + }); +}, "de_PartsList"); +var de_PolicyStatus = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_IP] != null) { + contents[_IP] = (0, import_smithy_client.parseBoolean)(output[_IP]); + } + return contents; +}, "de_PolicyStatus"); +var de_Progress = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_BS] != null) { + contents[_BS] = (0, import_smithy_client.strictParseLong)(output[_BS]); + } + if (output[_BP] != null) { + contents[_BP] = (0, import_smithy_client.strictParseLong)(output[_BP]); + } + if (output[_BRy] != null) { + contents[_BRy] = (0, import_smithy_client.strictParseLong)(output[_BRy]); + } + return contents; +}, "de_Progress"); +var de_PublicAccessBlockConfiguration = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_BPA] != null) { + contents[_BPA] = (0, import_smithy_client.parseBoolean)(output[_BPA]); + } + if (output[_IPA] != null) { + contents[_IPA] = (0, import_smithy_client.parseBoolean)(output[_IPA]); + } + if (output[_BPP] != null) { + contents[_BPP] = (0, import_smithy_client.parseBoolean)(output[_BPP]); + } + if (output[_RPB] != null) { + contents[_RPB] = (0, import_smithy_client.parseBoolean)(output[_RPB]); + } + return contents; +}, "de_PublicAccessBlockConfiguration"); +var de_QueueConfiguration = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_I] != null) { + contents[_I] = (0, import_smithy_client.expectString)(output[_I]); + } + if (output[_Qu] != null) { + contents[_QA] = (0, import_smithy_client.expectString)(output[_Qu]); + } + if (output.Event === "") { + contents[_Eve] = []; + } else if (output[_Ev] != null) { + contents[_Eve] = de_EventList((0, import_smithy_client.getArrayIfSingleItem)(output[_Ev]), context); + } + if (output[_F] != null) { + contents[_F] = de_NotificationConfigurationFilter(output[_F], context); + } + return contents; +}, "de_QueueConfiguration"); +var de_QueueConfigurationList = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_QueueConfiguration(entry, context); + }); +}, "de_QueueConfigurationList"); +var de_Redirect = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_HN] != null) { + contents[_HN] = (0, import_smithy_client.expectString)(output[_HN]); + } + if (output[_HRC] != null) { + contents[_HRC] = (0, import_smithy_client.expectString)(output[_HRC]); + } + if (output[_Pr] != null) { + contents[_Pr] = (0, import_smithy_client.expectString)(output[_Pr]); + } + if (output[_RKPW] != null) { + contents[_RKPW] = (0, import_smithy_client.expectString)(output[_RKPW]); + } + if (output[_RKW] != null) { + contents[_RKW] = (0, import_smithy_client.expectString)(output[_RKW]); + } + return contents; +}, "de_Redirect"); +var de_RedirectAllRequestsTo = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_HN] != null) { + contents[_HN] = (0, import_smithy_client.expectString)(output[_HN]); + } + if (output[_Pr] != null) { + contents[_Pr] = (0, import_smithy_client.expectString)(output[_Pr]); + } + return contents; +}, "de_RedirectAllRequestsTo"); +var de_ReplicaModifications = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_S] != null) { + contents[_S] = (0, import_smithy_client.expectString)(output[_S]); + } + return contents; +}, "de_ReplicaModifications"); +var de_ReplicationConfiguration = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_Ro] != null) { + contents[_Ro] = (0, import_smithy_client.expectString)(output[_Ro]); + } + if (output.Rule === "") { + contents[_Rul] = []; + } else if (output[_Ru] != null) { + contents[_Rul] = de_ReplicationRules((0, import_smithy_client.getArrayIfSingleItem)(output[_Ru]), context); + } + return contents; +}, "de_ReplicationConfiguration"); +var de_ReplicationRule = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_ID_] != null) { + contents[_ID_] = (0, import_smithy_client.expectString)(output[_ID_]); + } + if (output[_Pri] != null) { + contents[_Pri] = (0, import_smithy_client.strictParseInt32)(output[_Pri]); + } + if (output[_P] != null) { + contents[_P] = (0, import_smithy_client.expectString)(output[_P]); + } + if (output[_F] != null) { + contents[_F] = de_ReplicationRuleFilter(output[_F], context); + } + if (output[_S] != null) { + contents[_S] = (0, import_smithy_client.expectString)(output[_S]); + } + if (output[_SSC] != null) { + contents[_SSC] = de_SourceSelectionCriteria(output[_SSC], context); + } + if (output[_EOR] != null) { + contents[_EOR] = de_ExistingObjectReplication(output[_EOR], context); + } + if (output[_Des] != null) { + contents[_Des] = de_Destination(output[_Des], context); + } + if (output[_DMR] != null) { + contents[_DMR] = de_DeleteMarkerReplication(output[_DMR], context); + } + return contents; +}, "de_ReplicationRule"); +var de_ReplicationRuleAndOperator = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = (0, import_smithy_client.expectString)(output[_P]); + } + if (output.Tag === "") { + contents[_Tag] = []; + } else if (output[_Ta] != null) { + contents[_Tag] = de_TagSet((0, import_smithy_client.getArrayIfSingleItem)(output[_Ta]), context); + } + return contents; +}, "de_ReplicationRuleAndOperator"); +var de_ReplicationRuleFilter = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = (0, import_smithy_client.expectString)(output[_P]); + } + if (output[_Ta] != null) { + contents[_Ta] = de_Tag(output[_Ta], context); + } + if (output[_A] != null) { + contents[_A] = de_ReplicationRuleAndOperator(output[_A], context); + } + return contents; +}, "de_ReplicationRuleFilter"); +var de_ReplicationRules = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_ReplicationRule(entry, context); + }); +}, "de_ReplicationRules"); +var de_ReplicationTime = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_S] != null) { + contents[_S] = (0, import_smithy_client.expectString)(output[_S]); + } + if (output[_Tim] != null) { + contents[_Tim] = de_ReplicationTimeValue(output[_Tim], context); + } + return contents; +}, "de_ReplicationTime"); +var de_ReplicationTimeValue = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_Mi] != null) { + contents[_Mi] = (0, import_smithy_client.strictParseInt32)(output[_Mi]); + } + return contents; +}, "de_ReplicationTimeValue"); +var de_RestoreStatus = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_IRIP] != null) { + contents[_IRIP] = (0, import_smithy_client.parseBoolean)(output[_IRIP]); + } + if (output[_RED] != null) { + contents[_RED] = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc3339DateTimeWithOffset)(output[_RED])); + } + return contents; +}, "de_RestoreStatus"); +var de_RoutingRule = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_Con] != null) { + contents[_Con] = de_Condition(output[_Con], context); + } + if (output[_Red] != null) { + contents[_Red] = de_Redirect(output[_Red], context); + } + return contents; +}, "de_RoutingRule"); +var de_RoutingRules = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_RoutingRule(entry, context); + }); +}, "de_RoutingRules"); +var de_S3KeyFilter = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output.FilterRule === "") { + contents[_FRi] = []; + } else if (output[_FR] != null) { + contents[_FRi] = de_FilterRuleList((0, import_smithy_client.getArrayIfSingleItem)(output[_FR]), context); + } + return contents; +}, "de_S3KeyFilter"); +var de_S3TablesDestinationResult = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_TBA] != null) { + contents[_TBA] = (0, import_smithy_client.expectString)(output[_TBA]); + } + if (output[_TN] != null) { + contents[_TN] = (0, import_smithy_client.expectString)(output[_TN]); + } + if (output[_TAa] != null) { + contents[_TAa] = (0, import_smithy_client.expectString)(output[_TAa]); + } + if (output[_TNa] != null) { + contents[_TNa] = (0, import_smithy_client.expectString)(output[_TNa]); + } + return contents; +}, "de_S3TablesDestinationResult"); +var de_ServerSideEncryptionByDefault = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_SSEA] != null) { + contents[_SSEA] = (0, import_smithy_client.expectString)(output[_SSEA]); + } + if (output[_KMSMKID] != null) { + contents[_KMSMKID] = (0, import_smithy_client.expectString)(output[_KMSMKID]); + } + return contents; +}, "de_ServerSideEncryptionByDefault"); +var de_ServerSideEncryptionConfiguration = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output.Rule === "") { + contents[_Rul] = []; + } else if (output[_Ru] != null) { + contents[_Rul] = de_ServerSideEncryptionRules((0, import_smithy_client.getArrayIfSingleItem)(output[_Ru]), context); + } + return contents; +}, "de_ServerSideEncryptionConfiguration"); +var de_ServerSideEncryptionRule = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_ASSEBD] != null) { + contents[_ASSEBD] = de_ServerSideEncryptionByDefault(output[_ASSEBD], context); + } + if (output[_BKE] != null) { + contents[_BKE] = (0, import_smithy_client.parseBoolean)(output[_BKE]); + } + return contents; +}, "de_ServerSideEncryptionRule"); +var de_ServerSideEncryptionRules = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_ServerSideEncryptionRule(entry, context); + }); +}, "de_ServerSideEncryptionRules"); +var de_SessionCredentials = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_AKI] != null) { + contents[_AKI] = (0, import_smithy_client.expectString)(output[_AKI]); + } + if (output[_SAK] != null) { + contents[_SAK] = (0, import_smithy_client.expectString)(output[_SAK]); + } + if (output[_ST] != null) { + contents[_ST] = (0, import_smithy_client.expectString)(output[_ST]); + } + if (output[_Exp] != null) { + contents[_Exp] = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc3339DateTimeWithOffset)(output[_Exp])); + } + return contents; +}, "de_SessionCredentials"); +var de_SimplePrefix = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + return contents; +}, "de_SimplePrefix"); +var de_SourceSelectionCriteria = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_SKEO] != null) { + contents[_SKEO] = de_SseKmsEncryptedObjects(output[_SKEO], context); + } + if (output[_RM] != null) { + contents[_RM] = de_ReplicaModifications(output[_RM], context); + } + return contents; +}, "de_SourceSelectionCriteria"); +var de_SSEKMS = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_KI] != null) { + contents[_KI] = (0, import_smithy_client.expectString)(output[_KI]); + } + return contents; +}, "de_SSEKMS"); +var de_SseKmsEncryptedObjects = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_S] != null) { + contents[_S] = (0, import_smithy_client.expectString)(output[_S]); + } + return contents; +}, "de_SseKmsEncryptedObjects"); +var de_SSES3 = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + return contents; +}, "de_SSES3"); +var de_Stats = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_BS] != null) { + contents[_BS] = (0, import_smithy_client.strictParseLong)(output[_BS]); + } + if (output[_BP] != null) { + contents[_BP] = (0, import_smithy_client.strictParseLong)(output[_BP]); + } + if (output[_BRy] != null) { + contents[_BRy] = (0, import_smithy_client.strictParseLong)(output[_BRy]); + } + return contents; +}, "de_Stats"); +var de_StorageClassAnalysis = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_DE] != null) { + contents[_DE] = de_StorageClassAnalysisDataExport(output[_DE], context); + } + return contents; +}, "de_StorageClassAnalysis"); +var de_StorageClassAnalysisDataExport = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_OSV] != null) { + contents[_OSV] = (0, import_smithy_client.expectString)(output[_OSV]); + } + if (output[_Des] != null) { + contents[_Des] = de_AnalyticsExportDestination(output[_Des], context); + } + return contents; +}, "de_StorageClassAnalysisDataExport"); +var de_Tag = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_K] != null) { + contents[_K] = (0, import_smithy_client.expectString)(output[_K]); + } + if (output[_Va] != null) { + contents[_Va] = (0, import_smithy_client.expectString)(output[_Va]); + } + return contents; +}, "de_Tag"); +var de_TagSet = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_Tag(entry, context); + }); +}, "de_TagSet"); +var de_TargetGrant = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_Gra] != null) { + contents[_Gra] = de_Grantee(output[_Gra], context); + } + if (output[_Pe] != null) { + contents[_Pe] = (0, import_smithy_client.expectString)(output[_Pe]); + } + return contents; +}, "de_TargetGrant"); +var de_TargetGrants = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_TargetGrant(entry, context); + }); +}, "de_TargetGrants"); +var de_TargetObjectKeyFormat = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_SPi] != null) { + contents[_SPi] = de_SimplePrefix(output[_SPi], context); + } + if (output[_PP] != null) { + contents[_PP] = de_PartitionedPrefix(output[_PP], context); + } + return contents; +}, "de_TargetObjectKeyFormat"); +var de_Tiering = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_Da] != null) { + contents[_Da] = (0, import_smithy_client.strictParseInt32)(output[_Da]); + } + if (output[_AT] != null) { + contents[_AT] = (0, import_smithy_client.expectString)(output[_AT]); + } + return contents; +}, "de_Tiering"); +var de_TieringList = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_Tiering(entry, context); + }); +}, "de_TieringList"); +var de_TopicConfiguration = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_I] != null) { + contents[_I] = (0, import_smithy_client.expectString)(output[_I]); + } + if (output[_Top] != null) { + contents[_TA] = (0, import_smithy_client.expectString)(output[_Top]); + } + if (output.Event === "") { + contents[_Eve] = []; + } else if (output[_Ev] != null) { + contents[_Eve] = de_EventList((0, import_smithy_client.getArrayIfSingleItem)(output[_Ev]), context); + } + if (output[_F] != null) { + contents[_F] = de_NotificationConfigurationFilter(output[_F], context); + } + return contents; +}, "de_TopicConfiguration"); +var de_TopicConfigurationList = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_TopicConfiguration(entry, context); + }); +}, "de_TopicConfigurationList"); +var de_Transition = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_Dat] != null) { + contents[_Dat] = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc3339DateTimeWithOffset)(output[_Dat])); + } + if (output[_Da] != null) { + contents[_Da] = (0, import_smithy_client.strictParseInt32)(output[_Da]); + } + if (output[_SC] != null) { + contents[_SC] = (0, import_smithy_client.expectString)(output[_SC]); + } + return contents; +}, "de_Transition"); +var de_TransitionList = /* @__PURE__ */ __name((output, context) => { + return (output || []).filter((e) => e != null).map((entry) => { + return de_Transition(entry, context); + }); +}, "de_TransitionList"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); +var collectBodyString = /* @__PURE__ */ __name((streamBody, context) => (0, import_smithy_client.collectBody)(streamBody, context).then((body) => context.utf8Encoder(body)), "collectBodyString"); +var _A = "And"; +var _AAO = "AnalyticsAndOperator"; +var _AC = "AnalyticsConfiguration"; +var _ACL = "ACL"; +var _ACLc = "AccessControlList"; +var _ACLn = "AnalyticsConfigurationList"; +var _ACP = "AccessControlPolicy"; +var _ACT = "AccessControlTranslation"; +var _ACc = "AccelerateConfiguration"; +var _AD = "AbortDate"; +var _AED = "AnalyticsExportDestination"; +var _AF = "AnalyticsFilter"; +var _AH = "AllowedHeader"; +var _AHl = "AllowedHeaders"; +var _AI = "AnalyticsId"; +var _AIMU = "AbortIncompleteMultipartUpload"; +var _AIc = "AccountId"; +var _AKI = "AccessKeyId"; +var _AM = "AllowedMethod"; +var _AMl = "AllowedMethods"; +var _AO = "AllowedOrigin"; +var _AOl = "AllowedOrigins"; +var _APA = "AccessPointAlias"; +var _APAc = "AccessPointArn"; +var _AQRD = "AllowQuotedRecordDelimiter"; +var _AR = "AcceptRanges"; +var _ARI = "AbortRuleId"; +var _AS = "ArchiveStatus"; +var _ASBD = "AnalyticsS3BucketDestination"; +var _ASEFF = "AnalyticsS3ExportFileFormat"; +var _ASSEBD = "ApplyServerSideEncryptionByDefault"; +var _AT = "AccessTier"; +var _Ac = "Account"; +var _B = "Bucket"; +var _BAI = "BucketAccountId"; +var _BAS = "BucketAccelerateStatus"; +var _BGR = "BypassGovernanceRetention"; +var _BI = "BucketInfo"; +var _BKE = "BucketKeyEnabled"; +var _BLC = "BucketLifecycleConfiguration"; +var _BLCu = "BucketLocationConstraint"; +var _BLN = "BucketLocationName"; +var _BLP = "BucketLogsPermission"; +var _BLS = "BucketLoggingStatus"; +var _BLT = "BucketLocationType"; +var _BN = "BucketName"; +var _BP = "BytesProcessed"; +var _BPA = "BlockPublicAcls"; +var _BPP = "BlockPublicPolicy"; +var _BR = "BucketRegion"; +var _BRy = "BytesReturned"; +var _BS = "BytesScanned"; +var _BT = "BucketType"; +var _BVS = "BucketVersioningStatus"; +var _Bu = "Buckets"; +var _C = "Credentials"; +var _CA = "ChecksumAlgorithm"; +var _CACL = "CannedACL"; +var _CBC = "CreateBucketConfiguration"; +var _CC = "CacheControl"; +var _CCRC = "ChecksumCRC32"; +var _CCRCC = "ChecksumCRC32C"; +var _CCRCNVME = "ChecksumCRC64NVME"; +var _CD = "ContentDisposition"; +var _CDr = "CreationDate"; +var _CE = "ContentEncoding"; +var _CF = "CloudFunction"; +var _CFC = "CloudFunctionConfiguration"; +var _CL = "ContentLanguage"; +var _CLo = "ContentLength"; +var _CM = "ChecksumMode"; +var _CMD = "ContentMD5"; +var _CMU = "CompletedMultipartUpload"; +var _CORSC = "CORSConfiguration"; +var _CORSR = "CORSRule"; +var _CORSRu = "CORSRules"; +var _CP = "CommonPrefixes"; +var _CPo = "CompletedPart"; +var _CR = "ContentRange"; +var _CRSBA = "ConfirmRemoveSelfBucketAccess"; +var _CS = "CopySource"; +var _CSHA = "ChecksumSHA1"; +var _CSHAh = "ChecksumSHA256"; +var _CSIM = "CopySourceIfMatch"; +var _CSIMS = "CopySourceIfModifiedSince"; +var _CSINM = "CopySourceIfNoneMatch"; +var _CSIUS = "CopySourceIfUnmodifiedSince"; +var _CSR = "CopySourceRange"; +var _CSSSECA = "CopySourceSSECustomerAlgorithm"; +var _CSSSECK = "CopySourceSSECustomerKey"; +var _CSSSECKMD = "CopySourceSSECustomerKeyMD5"; +var _CSV = "CSV"; +var _CSVI = "CopySourceVersionId"; +var _CSVIn = "CSVInput"; +var _CSVO = "CSVOutput"; +var _CT = "ChecksumType"; +var _CTo = "ContentType"; +var _CTom = "CompressionType"; +var _CTon = "ContinuationToken"; +var _Ch = "Checksum"; +var _Co = "Contents"; +var _Cod = "Code"; +var _Com = "Comments"; +var _Con = "Condition"; +var _D = "Delimiter"; +var _DAI = "DaysAfterInitiation"; +var _DE = "DataExport"; +var _DM = "DeleteMarker"; +var _DMR = "DeleteMarkerReplication"; +var _DMRS = "DeleteMarkerReplicationStatus"; +var _DMVI = "DeleteMarkerVersionId"; +var _DMe = "DeleteMarkers"; +var _DN = "DisplayName"; +var _DR = "DataRedundancy"; +var _DRe = "DefaultRetention"; +var _Da = "Days"; +var _Dat = "Date"; +var _De = "Deleted"; +var _Del = "Delete"; +var _Des = "Destination"; +var _Desc = "Description"; +var _E = "Expires"; +var _EA = "EmailAddress"; +var _EBC = "EventBridgeConfiguration"; +var _EBO = "ExpectedBucketOwner"; +var _EC = "ErrorCode"; +var _ECn = "EncryptionConfiguration"; +var _ED = "ErrorDocument"; +var _EH = "ExposeHeaders"; +var _EHx = "ExposeHeader"; +var _EM = "ErrorMessage"; +var _EODM = "ExpiredObjectDeleteMarker"; +var _EOR = "ExistingObjectReplication"; +var _EORS = "ExistingObjectReplicationStatus"; +var _ERP = "EnableRequestProgress"; +var _ES = "ExpiresString"; +var _ESBO = "ExpectedSourceBucketOwner"; +var _ESx = "ExpirationStatus"; +var _ET = "EncodingType"; +var _ETa = "ETag"; +var _ETn = "EncryptionType"; +var _ETv = "EventThreshold"; +var _ETx = "ExpressionType"; +var _En = "Encryption"; +var _Ena = "Enabled"; +var _End = "End"; +var _Er = "Error"; +var _Err = "Errors"; +var _Ev = "Event"; +var _Eve = "Events"; +var _Ex = "Expression"; +var _Exp = "Expiration"; +var _F = "Filter"; +var _FD = "FieldDelimiter"; +var _FHI = "FileHeaderInfo"; +var _FO = "FetchOwner"; +var _FR = "FilterRule"; +var _FRN = "FilterRuleName"; +var _FRV = "FilterRuleValue"; +var _FRi = "FilterRules"; +var _Fi = "Field"; +var _Fo = "Format"; +var _Fr = "Frequency"; +var _G = "Grant"; +var _GFC = "GrantFullControl"; +var _GJP = "GlacierJobParameters"; +var _GR = "GrantRead"; +var _GRACP = "GrantReadACP"; +var _GW = "GrantWrite"; +var _GWACP = "GrantWriteACP"; +var _Gr = "Grants"; +var _Gra = "Grantee"; +var _HECRE = "HttpErrorCodeReturnedEquals"; +var _HN = "HostName"; +var _HRC = "HttpRedirectCode"; +var _I = "Id"; +var _IC = "InventoryConfiguration"; +var _ICL = "InventoryConfigurationList"; +var _ID = "IndexDocument"; +var _ID_ = "ID"; +var _IDn = "InventoryDestination"; +var _IE = "IsEnabled"; +var _IEn = "InventoryEncryption"; +var _IF = "InventoryFilter"; +var _IFn = "InventoryFormat"; +var _IFnv = "InventoryFrequency"; +var _II = "InventoryId"; +var _IIOV = "InventoryIncludedObjectVersions"; +var _IL = "IsLatest"; +var _IM = "IfMatch"; +var _IMIT = "IfMatchInitiatedTime"; +var _IMLMT = "IfMatchLastModifiedTime"; +var _IMS = "IfMatchSize"; +var _IMSf = "IfModifiedSince"; +var _INM = "IfNoneMatch"; +var _IOF = "InventoryOptionalField"; +var _IOV = "IncludedObjectVersions"; +var _IP = "IsPublic"; +var _IPA = "IgnorePublicAcls"; +var _IRIP = "IsRestoreInProgress"; +var _IS = "InputSerialization"; +var _ISBD = "InventoryS3BucketDestination"; +var _ISn = "InventorySchedule"; +var _IT = "IsTruncated"; +var _ITAO = "IntelligentTieringAndOperator"; +var _ITAT = "IntelligentTieringAccessTier"; +var _ITC = "IntelligentTieringConfiguration"; +var _ITCL = "IntelligentTieringConfigurationList"; +var _ITD = "IntelligentTieringDays"; +var _ITF = "IntelligentTieringFilter"; +var _ITI = "IntelligentTieringId"; +var _ITS = "IntelligentTieringStatus"; +var _IUS = "IfUnmodifiedSince"; +var _In = "Initiator"; +var _Ini = "Initiated"; +var _JSON = "JSON"; +var _JSONI = "JSONInput"; +var _JSONO = "JSONOutput"; +var _JSONT = "JSONType"; +var _K = "Key"; +var _KC = "KeyCount"; +var _KI = "KeyId"; +var _KM = "KeyMarker"; +var _KMSC = "KMSContext"; +var _KMSKI = "KMSKeyId"; +var _KMSMKID = "KMSMasterKeyID"; +var _KPE = "KeyPrefixEquals"; +var _L = "Location"; +var _LC = "LocationConstraint"; +var _LE = "LoggingEnabled"; +var _LEi = "LifecycleExpiration"; +var _LFA = "LambdaFunctionArn"; +var _LFC = "LambdaFunctionConfigurations"; +var _LFCa = "LambdaFunctionConfiguration"; +var _LI = "LocationInfo"; +var _LM = "LastModified"; +var _LMT = "LastModifiedTime"; +var _LNAS = "LocationNameAsString"; +var _LP = "LocationPrefix"; +var _LR = "LifecycleRule"; +var _LRAO = "LifecycleRuleAndOperator"; +var _LRF = "LifecycleRuleFilter"; +var _LT = "LocationType"; +var _M = "Marker"; +var _MAO = "MetricsAndOperator"; +var _MAS = "MaxAgeSeconds"; +var _MB = "MaxBuckets"; +var _MC = "MetricsConfiguration"; +var _MCL = "MetricsConfigurationList"; +var _MD = "MetadataDirective"; +var _MDB = "MaxDirectoryBuckets"; +var _MDf = "MfaDelete"; +var _ME = "MetadataEntry"; +var _MF = "MetricsFilter"; +var _MFA = "MFA"; +var _MFAD = "MFADelete"; +var _MI = "MetricsId"; +var _MK = "MaxKeys"; +var _MKe = "MetadataKey"; +var _MM = "MissingMeta"; +var _MOS = "MpuObjectSize"; +var _MP = "MaxParts"; +var _MS = "MetricsStatus"; +var _MTC = "MetadataTableConfiguration"; +var _MTCR = "MetadataTableConfigurationResult"; +var _MU = "MaxUploads"; +var _MV = "MetadataValue"; +var _Me = "Metrics"; +var _Mes = "Message"; +var _Mi = "Minutes"; +var _Mo = "Mode"; +var _N = "Name"; +var _NC = "NotificationConfiguration"; +var _NCF = "NotificationConfigurationFilter"; +var _NCT = "NextContinuationToken"; +var _ND = "NoncurrentDays"; +var _NI = "NotificationId"; +var _NKM = "NextKeyMarker"; +var _NM = "NextMarker"; +var _NNV = "NewerNoncurrentVersions"; +var _NPNM = "NextPartNumberMarker"; +var _NUIM = "NextUploadIdMarker"; +var _NVE = "NoncurrentVersionExpiration"; +var _NVIM = "NextVersionIdMarker"; +var _NVT = "NoncurrentVersionTransitions"; +var _NVTo = "NoncurrentVersionTransition"; +var _O = "Owner"; +var _OA = "ObjectAttributes"; +var _OC = "OwnershipControls"; +var _OCACL = "ObjectCannedACL"; +var _OCR = "OwnershipControlsRule"; +var _OF = "OptionalFields"; +var _OI = "ObjectIdentifier"; +var _OK = "ObjectKey"; +var _OL = "OutputLocation"; +var _OLC = "ObjectLockConfiguration"; +var _OLE = "ObjectLockEnabled"; +var _OLEFB = "ObjectLockEnabledForBucket"; +var _OLLH = "ObjectLockLegalHold"; +var _OLLHS = "ObjectLockLegalHoldStatus"; +var _OLM = "ObjectLockMode"; +var _OLR = "ObjectLockRetention"; +var _OLRM = "ObjectLockRetentionMode"; +var _OLRUD = "ObjectLockRetainUntilDate"; +var _OLRb = "ObjectLockRule"; +var _OO = "ObjectOwnership"; +var _OOA = "OptionalObjectAttributes"; +var _OOw = "OwnerOverride"; +var _OP = "ObjectParts"; +var _OS = "OutputSerialization"; +var _OSGT = "ObjectSizeGreaterThan"; +var _OSGTB = "ObjectSizeGreaterThanBytes"; +var _OSLT = "ObjectSizeLessThan"; +var _OSLTB = "ObjectSizeLessThanBytes"; +var _OSV = "OutputSchemaVersion"; +var _OSb = "ObjectSize"; +var _OVI = "ObjectVersionId"; +var _Ob = "Objects"; +var _P = "Prefix"; +var _PABC = "PublicAccessBlockConfiguration"; +var _PC = "PartsCount"; +var _PDS = "PartitionDateSource"; +var _PI = "ParquetInput"; +var _PN = "PartNumber"; +var _PNM = "PartNumberMarker"; +var _PP = "PartitionedPrefix"; +var _Pa = "Payer"; +var _Par = "Part"; +var _Parq = "Parquet"; +var _Part = "Parts"; +var _Pe = "Permission"; +var _Pr = "Protocol"; +var _Pri = "Priority"; +var _Q = "Quiet"; +var _QA = "QueueArn"; +var _QC = "QueueConfiguration"; +var _QCu = "QueueConfigurations"; +var _QCuo = "QuoteCharacter"; +var _QEC = "QuoteEscapeCharacter"; +var _QF = "QuoteFields"; +var _Qu = "Queue"; +var _R = "Range"; +var _RART = "RedirectAllRequestsTo"; +var _RC = "RequestCharged"; +var _RCC = "ResponseCacheControl"; +var _RCD = "ResponseContentDisposition"; +var _RCE = "ResponseContentEncoding"; +var _RCL = "ResponseContentLanguage"; +var _RCT = "ResponseContentType"; +var _RCe = "ReplicationConfiguration"; +var _RD = "RecordDelimiter"; +var _RE = "ResponseExpires"; +var _RED = "RestoreExpiryDate"; +var _RKKID = "ReplicaKmsKeyID"; +var _RKPW = "ReplaceKeyPrefixWith"; +var _RKW = "ReplaceKeyWith"; +var _RM = "ReplicaModifications"; +var _RMS = "ReplicaModificationsStatus"; +var _ROP = "RestoreOutputPath"; +var _RP = "RequestPayer"; +var _RPB = "RestrictPublicBuckets"; +var _RPC = "RequestPaymentConfiguration"; +var _RPe = "RequestProgress"; +var _RR = "RequestRoute"; +var _RRAO = "ReplicationRuleAndOperator"; +var _RRF = "ReplicationRuleFilter"; +var _RRS = "ReplicationRuleStatus"; +var _RRT = "RestoreRequestType"; +var _RRe = "ReplicationRule"; +var _RRes = "RestoreRequest"; +var _RRo = "RoutingRules"; +var _RRou = "RoutingRule"; +var _RS = "ReplicationStatus"; +var _RSe = "RestoreStatus"; +var _RT = "RequestToken"; +var _RTS = "ReplicationTimeStatus"; +var _RTV = "ReplicationTimeValue"; +var _RTe = "ReplicationTime"; +var _RUD = "RetainUntilDate"; +var _Re = "Restore"; +var _Red = "Redirect"; +var _Ro = "Role"; +var _Ru = "Rule"; +var _Rul = "Rules"; +var _S = "Status"; +var _SA = "StartAfter"; +var _SAK = "SecretAccessKey"; +var _SBD = "S3BucketDestination"; +var _SC = "StorageClass"; +var _SCA = "StorageClassAnalysis"; +var _SCADE = "StorageClassAnalysisDataExport"; +var _SCASV = "StorageClassAnalysisSchemaVersion"; +var _SCt = "StatusCode"; +var _SDV = "SkipDestinationValidation"; +var _SK = "SSE-KMS"; +var _SKEO = "SseKmsEncryptedObjects"; +var _SKEOS = "SseKmsEncryptedObjectsStatus"; +var _SKF = "S3KeyFilter"; +var _SKe = "S3Key"; +var _SL = "S3Location"; +var _SM = "SessionMode"; +var _SOCR = "SelectObjectContentRequest"; +var _SP = "SelectParameters"; +var _SPi = "SimplePrefix"; +var _SR = "ScanRange"; +var _SS = "SSE-S3"; +var _SSC = "SourceSelectionCriteria"; +var _SSE = "ServerSideEncryption"; +var _SSEA = "SSEAlgorithm"; +var _SSEBD = "ServerSideEncryptionByDefault"; +var _SSEC = "ServerSideEncryptionConfiguration"; +var _SSECA = "SSECustomerAlgorithm"; +var _SSECK = "SSECustomerKey"; +var _SSECKMD = "SSECustomerKeyMD5"; +var _SSEKMS = "SSEKMS"; +var _SSEKMSEC = "SSEKMSEncryptionContext"; +var _SSEKMSKI = "SSEKMSKeyId"; +var _SSER = "ServerSideEncryptionRule"; +var _SSES = "SSES3"; +var _ST = "SessionToken"; +var _STBA = "S3TablesBucketArn"; +var _STD = "S3TablesDestination"; +var _STDR = "S3TablesDestinationResult"; +var _STN = "S3TablesName"; +var _S_ = "S3"; +var _Sc = "Schedule"; +var _Se = "Setting"; +var _Si = "Size"; +var _St = "Start"; +var _Su = "Suffix"; +var _T = "Tagging"; +var _TA = "TopicArn"; +var _TAa = "TableArn"; +var _TB = "TargetBucket"; +var _TBA = "TableBucketArn"; +var _TC = "TagCount"; +var _TCo = "TopicConfiguration"; +var _TCop = "TopicConfigurations"; +var _TD = "TaggingDirective"; +var _TDMOS = "TransitionDefaultMinimumObjectSize"; +var _TG = "TargetGrants"; +var _TGa = "TargetGrant"; +var _TN = "TableName"; +var _TNa = "TableNamespace"; +var _TOKF = "TargetObjectKeyFormat"; +var _TP = "TargetPrefix"; +var _TPC = "TotalPartsCount"; +var _TS = "TagSet"; +var _TSC = "TransitionStorageClass"; +var _Ta = "Tag"; +var _Tag = "Tags"; +var _Ti = "Tier"; +var _Tie = "Tierings"; +var _Tier = "Tiering"; +var _Tim = "Time"; +var _To = "Token"; +var _Top = "Topic"; +var _Tr = "Transitions"; +var _Tra = "Transition"; +var _Ty = "Type"; +var _U = "Upload"; +var _UI = "UploadId"; +var _UIM = "UploadIdMarker"; +var _UM = "UserMetadata"; +var _URI = "URI"; +var _Up = "Uploads"; +var _V = "Version"; +var _VC = "VersionCount"; +var _VCe = "VersioningConfiguration"; +var _VI = "VersionId"; +var _VIM = "VersionIdMarker"; +var _Va = "Value"; +var _Ve = "Versions"; +var _WC = "WebsiteConfiguration"; +var _WOB = "WriteOffsetBytes"; +var _WRL = "WebsiteRedirectLocation"; +var _Y = "Years"; +var _a = "analytics"; +var _ac = "accelerate"; +var _acl = "acl"; +var _ar = "accept-ranges"; +var _at = "attributes"; +var _br = "bucket-region"; +var _c = "cors"; +var _cc = "cache-control"; +var _cd = "content-disposition"; +var _ce = "content-encoding"; +var _cl = "content-language"; +var _cl_ = "content-length"; +var _cm = "content-md5"; +var _cr = "content-range"; +var _ct = "content-type"; +var _ct_ = "continuation-token"; +var _d = "delete"; +var _de = "delimiter"; +var _e = "expires"; +var _en = "encryption"; +var _et = "encoding-type"; +var _eta = "etag"; +var _ex = "expiresstring"; +var _fo = "fetch-owner"; +var _i = "id"; +var _im = "if-match"; +var _ims = "if-modified-since"; +var _in = "inventory"; +var _inm = "if-none-match"; +var _it = "intelligent-tiering"; +var _ius = "if-unmodified-since"; +var _km = "key-marker"; +var _l = "lifecycle"; +var _lh = "legal-hold"; +var _lm = "last-modified"; +var _lo = "location"; +var _log = "logging"; +var _lt = "list-type"; +var _m = "metrics"; +var _mT = "metadataTable"; +var _ma = "marker"; +var _mb = "max-buckets"; +var _mdb = "max-directory-buckets"; +var _me = "member"; +var _mk = "max-keys"; +var _mp = "max-parts"; +var _mu = "max-uploads"; +var _n = "notification"; +var _oC = "ownershipControls"; +var _ol = "object-lock"; +var _p = "policy"; +var _pAB = "publicAccessBlock"; +var _pN = "partNumber"; +var _pS = "policyStatus"; +var _pnm = "part-number-marker"; +var _pr = "prefix"; +var _r = "replication"; +var _rP = "requestPayment"; +var _ra = "range"; +var _rcc = "response-cache-control"; +var _rcd = "response-content-disposition"; +var _rce = "response-content-encoding"; +var _rcl = "response-content-language"; +var _rct = "response-content-type"; +var _re = "response-expires"; +var _res = "restore"; +var _ret = "retention"; +var _s = "session"; +var _sa = "start-after"; +var _se = "select"; +var _st = "select-type"; +var _t = "tagging"; +var _to = "torrent"; +var _u = "uploads"; +var _uI = "uploadId"; +var _uim = "upload-id-marker"; +var _v = "versioning"; +var _vI = "versionId"; +var _ve = ''; +var _ver = "versions"; +var _vim = "version-id-marker"; +var _w = "website"; +var _x = "xsi:type"; +var _xaa = "x-amz-acl"; +var _xaad = "x-amz-abort-date"; +var _xaapa = "x-amz-access-point-alias"; +var _xaari = "x-amz-abort-rule-id"; +var _xaas = "x-amz-archive-status"; +var _xabgr = "x-amz-bypass-governance-retention"; +var _xabln = "x-amz-bucket-location-name"; +var _xablt = "x-amz-bucket-location-type"; +var _xabole = "x-amz-bucket-object-lock-enabled"; +var _xabolt = "x-amz-bucket-object-lock-token"; +var _xabr = "x-amz-bucket-region"; +var _xaca = "x-amz-checksum-algorithm"; +var _xacc = "x-amz-checksum-crc32"; +var _xacc_ = "x-amz-checksum-crc32c"; +var _xacc__ = "x-amz-checksum-crc64nvme"; +var _xacm = "x-amz-checksum-mode"; +var _xacrsba = "x-amz-confirm-remove-self-bucket-access"; +var _xacs = "x-amz-checksum-sha1"; +var _xacs_ = "x-amz-checksum-sha256"; +var _xacs__ = "x-amz-copy-source"; +var _xacsim = "x-amz-copy-source-if-match"; +var _xacsims = "x-amz-copy-source-if-modified-since"; +var _xacsinm = "x-amz-copy-source-if-none-match"; +var _xacsius = "x-amz-copy-source-if-unmodified-since"; +var _xacsm = "x-amz-create-session-mode"; +var _xacsr = "x-amz-copy-source-range"; +var _xacssseca = "x-amz-copy-source-server-side-encryption-customer-algorithm"; +var _xacssseck = "x-amz-copy-source-server-side-encryption-customer-key"; +var _xacssseckm = "x-amz-copy-source-server-side-encryption-customer-key-md5"; +var _xacsvi = "x-amz-copy-source-version-id"; +var _xact = "x-amz-checksum-type"; +var _xadm = "x-amz-delete-marker"; +var _xae = "x-amz-expiration"; +var _xaebo = "x-amz-expected-bucket-owner"; +var _xafec = "x-amz-fwd-error-code"; +var _xafem = "x-amz-fwd-error-message"; +var _xafhar = "x-amz-fwd-header-accept-ranges"; +var _xafhcc = "x-amz-fwd-header-cache-control"; +var _xafhcd = "x-amz-fwd-header-content-disposition"; +var _xafhce = "x-amz-fwd-header-content-encoding"; +var _xafhcl = "x-amz-fwd-header-content-language"; +var _xafhcr = "x-amz-fwd-header-content-range"; +var _xafhct = "x-amz-fwd-header-content-type"; +var _xafhe = "x-amz-fwd-header-etag"; +var _xafhe_ = "x-amz-fwd-header-expires"; +var _xafhlm = "x-amz-fwd-header-last-modified"; +var _xafhxacc = "x-amz-fwd-header-x-amz-checksum-crc32"; +var _xafhxacc_ = "x-amz-fwd-header-x-amz-checksum-crc32c"; +var _xafhxacc__ = "x-amz-fwd-header-x-amz-checksum-crc64nvme"; +var _xafhxacs = "x-amz-fwd-header-x-amz-checksum-sha1"; +var _xafhxacs_ = "x-amz-fwd-header-x-amz-checksum-sha256"; +var _xafhxadm = "x-amz-fwd-header-x-amz-delete-marker"; +var _xafhxae = "x-amz-fwd-header-x-amz-expiration"; +var _xafhxamm = "x-amz-fwd-header-x-amz-missing-meta"; +var _xafhxampc = "x-amz-fwd-header-x-amz-mp-parts-count"; +var _xafhxaollh = "x-amz-fwd-header-x-amz-object-lock-legal-hold"; +var _xafhxaolm = "x-amz-fwd-header-x-amz-object-lock-mode"; +var _xafhxaolrud = "x-amz-fwd-header-x-amz-object-lock-retain-until-date"; +var _xafhxar = "x-amz-fwd-header-x-amz-restore"; +var _xafhxarc = "x-amz-fwd-header-x-amz-request-charged"; +var _xafhxars = "x-amz-fwd-header-x-amz-replication-status"; +var _xafhxasc = "x-amz-fwd-header-x-amz-storage-class"; +var _xafhxasse = "x-amz-fwd-header-x-amz-server-side-encryption"; +var _xafhxasseakki = "x-amz-fwd-header-x-amz-server-side-encryption-aws-kms-key-id"; +var _xafhxassebke = "x-amz-fwd-header-x-amz-server-side-encryption-bucket-key-enabled"; +var _xafhxasseca = "x-amz-fwd-header-x-amz-server-side-encryption-customer-algorithm"; +var _xafhxasseckm = "x-amz-fwd-header-x-amz-server-side-encryption-customer-key-md5"; +var _xafhxatc = "x-amz-fwd-header-x-amz-tagging-count"; +var _xafhxavi = "x-amz-fwd-header-x-amz-version-id"; +var _xafs = "x-amz-fwd-status"; +var _xagfc = "x-amz-grant-full-control"; +var _xagr = "x-amz-grant-read"; +var _xagra = "x-amz-grant-read-acp"; +var _xagw = "x-amz-grant-write"; +var _xagwa = "x-amz-grant-write-acp"; +var _xaimit = "x-amz-if-match-initiated-time"; +var _xaimlmt = "x-amz-if-match-last-modified-time"; +var _xaims = "x-amz-if-match-size"; +var _xam = "x-amz-mfa"; +var _xamd = "x-amz-metadata-directive"; +var _xamm = "x-amz-missing-meta"; +var _xamos = "x-amz-mp-object-size"; +var _xamp = "x-amz-max-parts"; +var _xampc = "x-amz-mp-parts-count"; +var _xaoa = "x-amz-object-attributes"; +var _xaollh = "x-amz-object-lock-legal-hold"; +var _xaolm = "x-amz-object-lock-mode"; +var _xaolrud = "x-amz-object-lock-retain-until-date"; +var _xaoo = "x-amz-object-ownership"; +var _xaooa = "x-amz-optional-object-attributes"; +var _xaos = "x-amz-object-size"; +var _xapnm = "x-amz-part-number-marker"; +var _xar = "x-amz-restore"; +var _xarc = "x-amz-request-charged"; +var _xarop = "x-amz-restore-output-path"; +var _xarp = "x-amz-request-payer"; +var _xarr = "x-amz-request-route"; +var _xars = "x-amz-replication-status"; +var _xart = "x-amz-request-token"; +var _xasc = "x-amz-storage-class"; +var _xasca = "x-amz-sdk-checksum-algorithm"; +var _xasdv = "x-amz-skip-destination-validation"; +var _xasebo = "x-amz-source-expected-bucket-owner"; +var _xasse = "x-amz-server-side-encryption"; +var _xasseakki = "x-amz-server-side-encryption-aws-kms-key-id"; +var _xassebke = "x-amz-server-side-encryption-bucket-key-enabled"; +var _xassec = "x-amz-server-side-encryption-context"; +var _xasseca = "x-amz-server-side-encryption-customer-algorithm"; +var _xasseck = "x-amz-server-side-encryption-customer-key"; +var _xasseckm = "x-amz-server-side-encryption-customer-key-md5"; +var _xat = "x-amz-tagging"; +var _xatc = "x-amz-tagging-count"; +var _xatd = "x-amz-tagging-directive"; +var _xatdmos = "x-amz-transition-default-minimum-object-size"; +var _xavi = "x-amz-version-id"; +var _xawob = "x-amz-write-offset-bytes"; +var _xawrl = "x-amz-website-redirect-location"; +var _xi = "x-id"; + +// src/commands/CreateSessionCommand.ts +var CreateSessionCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + DisableS3ExpressSessionAuth: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s3.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "CreateSession", {}).n("S3Client", "CreateSessionCommand").f(CreateSessionRequestFilterSensitiveLog, CreateSessionOutputFilterSensitiveLog).ser(se_CreateSessionCommand).de(de_CreateSessionCommand).build() { + static { + __name(this, "CreateSessionCommand"); + } +}; + +// src/S3Client.ts +var import_runtimeConfig = require("././runtimeConfig"); + +// src/runtimeExtensions.ts +var import_region_config_resolver = require("@aws-sdk/region-config-resolver"); + + + +// src/auth/httpAuthExtensionConfiguration.ts +var getHttpAuthExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + } + }; +}, "getHttpAuthExtensionConfiguration"); +var resolveHttpAuthRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials() + }; +}, "resolveHttpAuthRuntimeConfig"); + +// src/runtimeExtensions.ts +var resolveRuntimeExtensions = /* @__PURE__ */ __name((runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign( + (0, import_region_config_resolver.getAwsRegionExtensionConfiguration)(runtimeConfig), + (0, import_smithy_client.getDefaultExtensionConfiguration)(runtimeConfig), + (0, import_protocol_http.getHttpHandlerExtensionConfiguration)(runtimeConfig), + getHttpAuthExtensionConfiguration(runtimeConfig) + ); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign( + runtimeConfig, + (0, import_region_config_resolver.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), + (0, import_smithy_client.resolveDefaultRuntimeConfig)(extensionConfiguration), + (0, import_protocol_http.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), + resolveHttpAuthRuntimeConfig(extensionConfiguration) + ); +}, "resolveRuntimeExtensions"); + +// src/S3Client.ts +var S3Client = class extends import_smithy_client.Client { + static { + __name(this, "S3Client"); + } + /** + * The resolved configuration of S3Client class. This is resolved and normalized from the {@link S3ClientConfig | constructor configuration interface}. + */ + config; + constructor(...[configuration]) { + const _config_0 = (0, import_runtimeConfig.getRuntimeConfig)(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = (0, import_middleware_user_agent.resolveUserAgentConfig)(_config_1); + const _config_3 = (0, import_middleware_flexible_checksums.resolveFlexibleChecksumsConfig)(_config_2); + const _config_4 = (0, import_middleware_retry.resolveRetryConfig)(_config_3); + const _config_5 = (0, import_config_resolver.resolveRegionConfig)(_config_4); + const _config_6 = (0, import_middleware_host_header.resolveHostHeaderConfig)(_config_5); + const _config_7 = (0, import_middleware_endpoint.resolveEndpointConfig)(_config_6); + const _config_8 = (0, import_eventstream_serde_config_resolver.resolveEventStreamSerdeConfig)(_config_7); + const _config_9 = (0, import_httpAuthSchemeProvider.resolveHttpAuthSchemeConfig)(_config_8); + const _config_10 = (0, import_middleware_sdk_s32.resolveS3Config)(_config_9, { session: [() => this, CreateSessionCommand] }); + const _config_11 = resolveRuntimeExtensions(_config_10, configuration?.extensions || []); + this.config = _config_11; + this.middlewareStack.use((0, import_middleware_user_agent.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_retry.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_content_length.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_host_header.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_logger.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_recursion_detection.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use( + (0, import_core3.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: import_httpAuthSchemeProvider.defaultS3HttpAuthSchemeParametersProvider, + identityProviderConfigProvider: /* @__PURE__ */ __name(async (config) => new import_core3.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + "aws.auth#sigv4a": config.credentials + }), "identityProviderConfigProvider") + }) + ); + this.middlewareStack.use((0, import_core3.getHttpSigningPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_sdk_s32.getValidateBucketNamePlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_expect_continue.getAddExpectContinuePlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_sdk_s32.getRegionRedirectMiddlewarePlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_sdk_s32.getS3ExpressPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_sdk_s32.getS3ExpressHttpSigningPlugin)(this.config)); + } + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy() { + super.destroy(); + } +}; + +// src/S3.ts + + +// src/commands/AbortMultipartUploadCommand.ts +var import_middleware_sdk_s33 = require("@aws-sdk/middleware-sdk-s3"); + + + +var AbortMultipartUploadCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s33.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "AbortMultipartUpload", {}).n("S3Client", "AbortMultipartUploadCommand").f(void 0, void 0).ser(se_AbortMultipartUploadCommand).de(de_AbortMultipartUploadCommand).build() { + static { + __name(this, "AbortMultipartUploadCommand"); + } +}; + +// src/commands/CompleteMultipartUploadCommand.ts +var import_middleware_sdk_s34 = require("@aws-sdk/middleware-sdk-s3"); +var import_middleware_ssec = require("@aws-sdk/middleware-ssec"); + + + +var CompleteMultipartUploadCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s34.getThrow200ExceptionsPlugin)(config), + (0, import_middleware_ssec.getSsecPlugin)(config) + ]; +}).s("AmazonS3", "CompleteMultipartUpload", {}).n("S3Client", "CompleteMultipartUploadCommand").f(CompleteMultipartUploadRequestFilterSensitiveLog, CompleteMultipartUploadOutputFilterSensitiveLog).ser(se_CompleteMultipartUploadCommand).de(de_CompleteMultipartUploadCommand).build() { + static { + __name(this, "CompleteMultipartUploadCommand"); + } +}; + +// src/commands/CopyObjectCommand.ts +var import_middleware_sdk_s35 = require("@aws-sdk/middleware-sdk-s3"); + + + + +var CopyObjectCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + DisableS3ExpressSessionAuth: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" }, + CopySource: { type: "contextParams", name: "CopySource" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s35.getThrow200ExceptionsPlugin)(config), + (0, import_middleware_ssec.getSsecPlugin)(config) + ]; +}).s("AmazonS3", "CopyObject", {}).n("S3Client", "CopyObjectCommand").f(CopyObjectRequestFilterSensitiveLog, CopyObjectOutputFilterSensitiveLog).ser(se_CopyObjectCommand).de(de_CopyObjectCommand).build() { + static { + __name(this, "CopyObjectCommand"); + } +}; + +// src/commands/CreateBucketCommand.ts +var import_middleware_location_constraint = require("@aws-sdk/middleware-location-constraint"); +var import_middleware_sdk_s36 = require("@aws-sdk/middleware-sdk-s3"); + + + +var CreateBucketCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + DisableAccessPoints: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s36.getThrow200ExceptionsPlugin)(config), + (0, import_middleware_location_constraint.getLocationConstraintPlugin)(config) + ]; +}).s("AmazonS3", "CreateBucket", {}).n("S3Client", "CreateBucketCommand").f(void 0, void 0).ser(se_CreateBucketCommand).de(de_CreateBucketCommand).build() { + static { + __name(this, "CreateBucketCommand"); + } +}; + +// src/commands/CreateBucketMetadataTableConfigurationCommand.ts + + + + +var CreateBucketMetadataTableConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true + }) + ]; +}).s("AmazonS3", "CreateBucketMetadataTableConfiguration", {}).n("S3Client", "CreateBucketMetadataTableConfigurationCommand").f(void 0, void 0).ser(se_CreateBucketMetadataTableConfigurationCommand).de(de_CreateBucketMetadataTableConfigurationCommand).build() { + static { + __name(this, "CreateBucketMetadataTableConfigurationCommand"); + } +}; + +// src/commands/CreateMultipartUploadCommand.ts +var import_middleware_sdk_s37 = require("@aws-sdk/middleware-sdk-s3"); + + + + +var CreateMultipartUploadCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s37.getThrow200ExceptionsPlugin)(config), + (0, import_middleware_ssec.getSsecPlugin)(config) + ]; +}).s("AmazonS3", "CreateMultipartUpload", {}).n("S3Client", "CreateMultipartUploadCommand").f(CreateMultipartUploadRequestFilterSensitiveLog, CreateMultipartUploadOutputFilterSensitiveLog).ser(se_CreateMultipartUploadCommand).de(de_CreateMultipartUploadCommand).build() { + static { + __name(this, "CreateMultipartUploadCommand"); + } +}; + +// src/commands/DeleteBucketAnalyticsConfigurationCommand.ts + + + +var DeleteBucketAnalyticsConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "DeleteBucketAnalyticsConfiguration", {}).n("S3Client", "DeleteBucketAnalyticsConfigurationCommand").f(void 0, void 0).ser(se_DeleteBucketAnalyticsConfigurationCommand).de(de_DeleteBucketAnalyticsConfigurationCommand).build() { + static { + __name(this, "DeleteBucketAnalyticsConfigurationCommand"); + } +}; + +// src/commands/DeleteBucketCommand.ts + + + +var DeleteBucketCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "DeleteBucket", {}).n("S3Client", "DeleteBucketCommand").f(void 0, void 0).ser(se_DeleteBucketCommand).de(de_DeleteBucketCommand).build() { + static { + __name(this, "DeleteBucketCommand"); + } +}; + +// src/commands/DeleteBucketCorsCommand.ts + + + +var DeleteBucketCorsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "DeleteBucketCors", {}).n("S3Client", "DeleteBucketCorsCommand").f(void 0, void 0).ser(se_DeleteBucketCorsCommand).de(de_DeleteBucketCorsCommand).build() { + static { + __name(this, "DeleteBucketCorsCommand"); + } +}; + +// src/commands/DeleteBucketEncryptionCommand.ts + + + +var DeleteBucketEncryptionCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "DeleteBucketEncryption", {}).n("S3Client", "DeleteBucketEncryptionCommand").f(void 0, void 0).ser(se_DeleteBucketEncryptionCommand).de(de_DeleteBucketEncryptionCommand).build() { + static { + __name(this, "DeleteBucketEncryptionCommand"); + } +}; + +// src/commands/DeleteBucketIntelligentTieringConfigurationCommand.ts + + + +var DeleteBucketIntelligentTieringConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "DeleteBucketIntelligentTieringConfiguration", {}).n("S3Client", "DeleteBucketIntelligentTieringConfigurationCommand").f(void 0, void 0).ser(se_DeleteBucketIntelligentTieringConfigurationCommand).de(de_DeleteBucketIntelligentTieringConfigurationCommand).build() { + static { + __name(this, "DeleteBucketIntelligentTieringConfigurationCommand"); + } +}; + +// src/commands/DeleteBucketInventoryConfigurationCommand.ts + + + +var DeleteBucketInventoryConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "DeleteBucketInventoryConfiguration", {}).n("S3Client", "DeleteBucketInventoryConfigurationCommand").f(void 0, void 0).ser(se_DeleteBucketInventoryConfigurationCommand).de(de_DeleteBucketInventoryConfigurationCommand).build() { + static { + __name(this, "DeleteBucketInventoryConfigurationCommand"); + } +}; + +// src/commands/DeleteBucketLifecycleCommand.ts + + + +var DeleteBucketLifecycleCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "DeleteBucketLifecycle", {}).n("S3Client", "DeleteBucketLifecycleCommand").f(void 0, void 0).ser(se_DeleteBucketLifecycleCommand).de(de_DeleteBucketLifecycleCommand).build() { + static { + __name(this, "DeleteBucketLifecycleCommand"); + } +}; + +// src/commands/DeleteBucketMetadataTableConfigurationCommand.ts + + + +var DeleteBucketMetadataTableConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "DeleteBucketMetadataTableConfiguration", {}).n("S3Client", "DeleteBucketMetadataTableConfigurationCommand").f(void 0, void 0).ser(se_DeleteBucketMetadataTableConfigurationCommand).de(de_DeleteBucketMetadataTableConfigurationCommand).build() { + static { + __name(this, "DeleteBucketMetadataTableConfigurationCommand"); + } +}; + +// src/commands/DeleteBucketMetricsConfigurationCommand.ts + + + +var DeleteBucketMetricsConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "DeleteBucketMetricsConfiguration", {}).n("S3Client", "DeleteBucketMetricsConfigurationCommand").f(void 0, void 0).ser(se_DeleteBucketMetricsConfigurationCommand).de(de_DeleteBucketMetricsConfigurationCommand).build() { + static { + __name(this, "DeleteBucketMetricsConfigurationCommand"); + } +}; + +// src/commands/DeleteBucketOwnershipControlsCommand.ts + + + +var DeleteBucketOwnershipControlsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "DeleteBucketOwnershipControls", {}).n("S3Client", "DeleteBucketOwnershipControlsCommand").f(void 0, void 0).ser(se_DeleteBucketOwnershipControlsCommand).de(de_DeleteBucketOwnershipControlsCommand).build() { + static { + __name(this, "DeleteBucketOwnershipControlsCommand"); + } +}; + +// src/commands/DeleteBucketPolicyCommand.ts + + + +var DeleteBucketPolicyCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "DeleteBucketPolicy", {}).n("S3Client", "DeleteBucketPolicyCommand").f(void 0, void 0).ser(se_DeleteBucketPolicyCommand).de(de_DeleteBucketPolicyCommand).build() { + static { + __name(this, "DeleteBucketPolicyCommand"); + } +}; + +// src/commands/DeleteBucketReplicationCommand.ts + + + +var DeleteBucketReplicationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "DeleteBucketReplication", {}).n("S3Client", "DeleteBucketReplicationCommand").f(void 0, void 0).ser(se_DeleteBucketReplicationCommand).de(de_DeleteBucketReplicationCommand).build() { + static { + __name(this, "DeleteBucketReplicationCommand"); + } +}; + +// src/commands/DeleteBucketTaggingCommand.ts + + + +var DeleteBucketTaggingCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "DeleteBucketTagging", {}).n("S3Client", "DeleteBucketTaggingCommand").f(void 0, void 0).ser(se_DeleteBucketTaggingCommand).de(de_DeleteBucketTaggingCommand).build() { + static { + __name(this, "DeleteBucketTaggingCommand"); + } +}; + +// src/commands/DeleteBucketWebsiteCommand.ts + + + +var DeleteBucketWebsiteCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "DeleteBucketWebsite", {}).n("S3Client", "DeleteBucketWebsiteCommand").f(void 0, void 0).ser(se_DeleteBucketWebsiteCommand).de(de_DeleteBucketWebsiteCommand).build() { + static { + __name(this, "DeleteBucketWebsiteCommand"); + } +}; + +// src/commands/DeleteObjectCommand.ts +var import_middleware_sdk_s38 = require("@aws-sdk/middleware-sdk-s3"); + + + +var DeleteObjectCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s38.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "DeleteObject", {}).n("S3Client", "DeleteObjectCommand").f(void 0, void 0).ser(se_DeleteObjectCommand).de(de_DeleteObjectCommand).build() { + static { + __name(this, "DeleteObjectCommand"); + } +}; + +// src/commands/DeleteObjectsCommand.ts + +var import_middleware_sdk_s39 = require("@aws-sdk/middleware-sdk-s3"); + + + +var DeleteObjectsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true + }), + (0, import_middleware_sdk_s39.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "DeleteObjects", {}).n("S3Client", "DeleteObjectsCommand").f(void 0, void 0).ser(se_DeleteObjectsCommand).de(de_DeleteObjectsCommand).build() { + static { + __name(this, "DeleteObjectsCommand"); + } +}; + +// src/commands/DeleteObjectTaggingCommand.ts +var import_middleware_sdk_s310 = require("@aws-sdk/middleware-sdk-s3"); + + + +var DeleteObjectTaggingCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s310.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "DeleteObjectTagging", {}).n("S3Client", "DeleteObjectTaggingCommand").f(void 0, void 0).ser(se_DeleteObjectTaggingCommand).de(de_DeleteObjectTaggingCommand).build() { + static { + __name(this, "DeleteObjectTaggingCommand"); + } +}; + +// src/commands/DeletePublicAccessBlockCommand.ts + + + +var DeletePublicAccessBlockCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "DeletePublicAccessBlock", {}).n("S3Client", "DeletePublicAccessBlockCommand").f(void 0, void 0).ser(se_DeletePublicAccessBlockCommand).de(de_DeletePublicAccessBlockCommand).build() { + static { + __name(this, "DeletePublicAccessBlockCommand"); + } +}; + +// src/commands/GetBucketAccelerateConfigurationCommand.ts +var import_middleware_sdk_s311 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketAccelerateConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s311.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketAccelerateConfiguration", {}).n("S3Client", "GetBucketAccelerateConfigurationCommand").f(void 0, void 0).ser(se_GetBucketAccelerateConfigurationCommand).de(de_GetBucketAccelerateConfigurationCommand).build() { + static { + __name(this, "GetBucketAccelerateConfigurationCommand"); + } +}; + +// src/commands/GetBucketAclCommand.ts +var import_middleware_sdk_s312 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketAclCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s312.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketAcl", {}).n("S3Client", "GetBucketAclCommand").f(void 0, void 0).ser(se_GetBucketAclCommand).de(de_GetBucketAclCommand).build() { + static { + __name(this, "GetBucketAclCommand"); + } +}; + +// src/commands/GetBucketAnalyticsConfigurationCommand.ts +var import_middleware_sdk_s313 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketAnalyticsConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s313.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketAnalyticsConfiguration", {}).n("S3Client", "GetBucketAnalyticsConfigurationCommand").f(void 0, void 0).ser(se_GetBucketAnalyticsConfigurationCommand).de(de_GetBucketAnalyticsConfigurationCommand).build() { + static { + __name(this, "GetBucketAnalyticsConfigurationCommand"); + } +}; + +// src/commands/GetBucketCorsCommand.ts +var import_middleware_sdk_s314 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketCorsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s314.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketCors", {}).n("S3Client", "GetBucketCorsCommand").f(void 0, void 0).ser(se_GetBucketCorsCommand).de(de_GetBucketCorsCommand).build() { + static { + __name(this, "GetBucketCorsCommand"); + } +}; + +// src/commands/GetBucketEncryptionCommand.ts +var import_middleware_sdk_s315 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketEncryptionCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s315.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketEncryption", {}).n("S3Client", "GetBucketEncryptionCommand").f(void 0, GetBucketEncryptionOutputFilterSensitiveLog).ser(se_GetBucketEncryptionCommand).de(de_GetBucketEncryptionCommand).build() { + static { + __name(this, "GetBucketEncryptionCommand"); + } +}; + +// src/commands/GetBucketIntelligentTieringConfigurationCommand.ts +var import_middleware_sdk_s316 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketIntelligentTieringConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s316.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketIntelligentTieringConfiguration", {}).n("S3Client", "GetBucketIntelligentTieringConfigurationCommand").f(void 0, void 0).ser(se_GetBucketIntelligentTieringConfigurationCommand).de(de_GetBucketIntelligentTieringConfigurationCommand).build() { + static { + __name(this, "GetBucketIntelligentTieringConfigurationCommand"); + } +}; + +// src/commands/GetBucketInventoryConfigurationCommand.ts +var import_middleware_sdk_s317 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketInventoryConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s317.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketInventoryConfiguration", {}).n("S3Client", "GetBucketInventoryConfigurationCommand").f(void 0, GetBucketInventoryConfigurationOutputFilterSensitiveLog).ser(se_GetBucketInventoryConfigurationCommand).de(de_GetBucketInventoryConfigurationCommand).build() { + static { + __name(this, "GetBucketInventoryConfigurationCommand"); + } +}; + +// src/commands/GetBucketLifecycleConfigurationCommand.ts +var import_middleware_sdk_s318 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketLifecycleConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s318.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketLifecycleConfiguration", {}).n("S3Client", "GetBucketLifecycleConfigurationCommand").f(void 0, void 0).ser(se_GetBucketLifecycleConfigurationCommand).de(de_GetBucketLifecycleConfigurationCommand).build() { + static { + __name(this, "GetBucketLifecycleConfigurationCommand"); + } +}; + +// src/commands/GetBucketLocationCommand.ts +var import_middleware_sdk_s319 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketLocationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s319.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketLocation", {}).n("S3Client", "GetBucketLocationCommand").f(void 0, void 0).ser(se_GetBucketLocationCommand).de(de_GetBucketLocationCommand).build() { + static { + __name(this, "GetBucketLocationCommand"); + } +}; + +// src/commands/GetBucketLoggingCommand.ts +var import_middleware_sdk_s320 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketLoggingCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s320.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketLogging", {}).n("S3Client", "GetBucketLoggingCommand").f(void 0, void 0).ser(se_GetBucketLoggingCommand).de(de_GetBucketLoggingCommand).build() { + static { + __name(this, "GetBucketLoggingCommand"); + } +}; + +// src/commands/GetBucketMetadataTableConfigurationCommand.ts +var import_middleware_sdk_s321 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketMetadataTableConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s321.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketMetadataTableConfiguration", {}).n("S3Client", "GetBucketMetadataTableConfigurationCommand").f(void 0, void 0).ser(se_GetBucketMetadataTableConfigurationCommand).de(de_GetBucketMetadataTableConfigurationCommand).build() { + static { + __name(this, "GetBucketMetadataTableConfigurationCommand"); + } +}; + +// src/commands/GetBucketMetricsConfigurationCommand.ts +var import_middleware_sdk_s322 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketMetricsConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s322.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketMetricsConfiguration", {}).n("S3Client", "GetBucketMetricsConfigurationCommand").f(void 0, void 0).ser(se_GetBucketMetricsConfigurationCommand).de(de_GetBucketMetricsConfigurationCommand).build() { + static { + __name(this, "GetBucketMetricsConfigurationCommand"); + } +}; + +// src/commands/GetBucketNotificationConfigurationCommand.ts +var import_middleware_sdk_s323 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketNotificationConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s323.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketNotificationConfiguration", {}).n("S3Client", "GetBucketNotificationConfigurationCommand").f(void 0, void 0).ser(se_GetBucketNotificationConfigurationCommand).de(de_GetBucketNotificationConfigurationCommand).build() { + static { + __name(this, "GetBucketNotificationConfigurationCommand"); + } +}; + +// src/commands/GetBucketOwnershipControlsCommand.ts +var import_middleware_sdk_s324 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketOwnershipControlsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s324.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketOwnershipControls", {}).n("S3Client", "GetBucketOwnershipControlsCommand").f(void 0, void 0).ser(se_GetBucketOwnershipControlsCommand).de(de_GetBucketOwnershipControlsCommand).build() { + static { + __name(this, "GetBucketOwnershipControlsCommand"); + } +}; + +// src/commands/GetBucketPolicyCommand.ts +var import_middleware_sdk_s325 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketPolicyCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s325.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketPolicy", {}).n("S3Client", "GetBucketPolicyCommand").f(void 0, void 0).ser(se_GetBucketPolicyCommand).de(de_GetBucketPolicyCommand).build() { + static { + __name(this, "GetBucketPolicyCommand"); + } +}; + +// src/commands/GetBucketPolicyStatusCommand.ts +var import_middleware_sdk_s326 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketPolicyStatusCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s326.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketPolicyStatus", {}).n("S3Client", "GetBucketPolicyStatusCommand").f(void 0, void 0).ser(se_GetBucketPolicyStatusCommand).de(de_GetBucketPolicyStatusCommand).build() { + static { + __name(this, "GetBucketPolicyStatusCommand"); + } +}; + +// src/commands/GetBucketReplicationCommand.ts +var import_middleware_sdk_s327 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketReplicationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s327.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketReplication", {}).n("S3Client", "GetBucketReplicationCommand").f(void 0, void 0).ser(se_GetBucketReplicationCommand).de(de_GetBucketReplicationCommand).build() { + static { + __name(this, "GetBucketReplicationCommand"); + } +}; + +// src/commands/GetBucketRequestPaymentCommand.ts +var import_middleware_sdk_s328 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketRequestPaymentCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s328.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketRequestPayment", {}).n("S3Client", "GetBucketRequestPaymentCommand").f(void 0, void 0).ser(se_GetBucketRequestPaymentCommand).de(de_GetBucketRequestPaymentCommand).build() { + static { + __name(this, "GetBucketRequestPaymentCommand"); + } +}; + +// src/commands/GetBucketTaggingCommand.ts +var import_middleware_sdk_s329 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketTaggingCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s329.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketTagging", {}).n("S3Client", "GetBucketTaggingCommand").f(void 0, void 0).ser(se_GetBucketTaggingCommand).de(de_GetBucketTaggingCommand).build() { + static { + __name(this, "GetBucketTaggingCommand"); + } +}; + +// src/commands/GetBucketVersioningCommand.ts +var import_middleware_sdk_s330 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketVersioningCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s330.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketVersioning", {}).n("S3Client", "GetBucketVersioningCommand").f(void 0, void 0).ser(se_GetBucketVersioningCommand).de(de_GetBucketVersioningCommand).build() { + static { + __name(this, "GetBucketVersioningCommand"); + } +}; + +// src/commands/GetBucketWebsiteCommand.ts +var import_middleware_sdk_s331 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetBucketWebsiteCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s331.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetBucketWebsite", {}).n("S3Client", "GetBucketWebsiteCommand").f(void 0, void 0).ser(se_GetBucketWebsiteCommand).de(de_GetBucketWebsiteCommand).build() { + static { + __name(this, "GetBucketWebsiteCommand"); + } +}; + +// src/commands/GetObjectAclCommand.ts +var import_middleware_sdk_s332 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetObjectAclCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s332.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetObjectAcl", {}).n("S3Client", "GetObjectAclCommand").f(void 0, void 0).ser(se_GetObjectAclCommand).de(de_GetObjectAclCommand).build() { + static { + __name(this, "GetObjectAclCommand"); + } +}; + +// src/commands/GetObjectAttributesCommand.ts +var import_middleware_sdk_s333 = require("@aws-sdk/middleware-sdk-s3"); + + + + +var GetObjectAttributesCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s333.getThrow200ExceptionsPlugin)(config), + (0, import_middleware_ssec.getSsecPlugin)(config) + ]; +}).s("AmazonS3", "GetObjectAttributes", {}).n("S3Client", "GetObjectAttributesCommand").f(GetObjectAttributesRequestFilterSensitiveLog, void 0).ser(se_GetObjectAttributesCommand).de(de_GetObjectAttributesCommand).build() { + static { + __name(this, "GetObjectAttributesCommand"); + } +}; + +// src/commands/GetObjectCommand.ts + +var import_middleware_sdk_s334 = require("@aws-sdk/middleware-sdk-s3"); + + + + +var GetObjectCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestChecksumRequired: false, + requestValidationModeMember: "ChecksumMode", + responseAlgorithms: ["CRC64NVME", "CRC32", "CRC32C", "SHA256", "SHA1"] + }), + (0, import_middleware_ssec.getSsecPlugin)(config), + (0, import_middleware_sdk_s334.getS3ExpiresMiddlewarePlugin)(config) + ]; +}).s("AmazonS3", "GetObject", {}).n("S3Client", "GetObjectCommand").f(GetObjectRequestFilterSensitiveLog, GetObjectOutputFilterSensitiveLog).ser(se_GetObjectCommand).de(de_GetObjectCommand).build() { + static { + __name(this, "GetObjectCommand"); + } +}; + +// src/commands/GetObjectLegalHoldCommand.ts +var import_middleware_sdk_s335 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetObjectLegalHoldCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s335.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetObjectLegalHold", {}).n("S3Client", "GetObjectLegalHoldCommand").f(void 0, void 0).ser(se_GetObjectLegalHoldCommand).de(de_GetObjectLegalHoldCommand).build() { + static { + __name(this, "GetObjectLegalHoldCommand"); + } +}; + +// src/commands/GetObjectLockConfigurationCommand.ts +var import_middleware_sdk_s336 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetObjectLockConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s336.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetObjectLockConfiguration", {}).n("S3Client", "GetObjectLockConfigurationCommand").f(void 0, void 0).ser(se_GetObjectLockConfigurationCommand).de(de_GetObjectLockConfigurationCommand).build() { + static { + __name(this, "GetObjectLockConfigurationCommand"); + } +}; + +// src/commands/GetObjectRetentionCommand.ts +var import_middleware_sdk_s337 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetObjectRetentionCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s337.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetObjectRetention", {}).n("S3Client", "GetObjectRetentionCommand").f(void 0, void 0).ser(se_GetObjectRetentionCommand).de(de_GetObjectRetentionCommand).build() { + static { + __name(this, "GetObjectRetentionCommand"); + } +}; + +// src/commands/GetObjectTaggingCommand.ts +var import_middleware_sdk_s338 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetObjectTaggingCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s338.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetObjectTagging", {}).n("S3Client", "GetObjectTaggingCommand").f(void 0, void 0).ser(se_GetObjectTaggingCommand).de(de_GetObjectTaggingCommand).build() { + static { + __name(this, "GetObjectTaggingCommand"); + } +}; + +// src/commands/GetObjectTorrentCommand.ts + + + +var GetObjectTorrentCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "GetObjectTorrent", {}).n("S3Client", "GetObjectTorrentCommand").f(void 0, GetObjectTorrentOutputFilterSensitiveLog).ser(se_GetObjectTorrentCommand).de(de_GetObjectTorrentCommand).build() { + static { + __name(this, "GetObjectTorrentCommand"); + } +}; + +// src/commands/GetPublicAccessBlockCommand.ts +var import_middleware_sdk_s339 = require("@aws-sdk/middleware-sdk-s3"); + + + +var GetPublicAccessBlockCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s339.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "GetPublicAccessBlock", {}).n("S3Client", "GetPublicAccessBlockCommand").f(void 0, void 0).ser(se_GetPublicAccessBlockCommand).de(de_GetPublicAccessBlockCommand).build() { + static { + __name(this, "GetPublicAccessBlockCommand"); + } +}; + +// src/commands/HeadBucketCommand.ts +var import_middleware_sdk_s340 = require("@aws-sdk/middleware-sdk-s3"); + + + +var HeadBucketCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s340.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "HeadBucket", {}).n("S3Client", "HeadBucketCommand").f(void 0, void 0).ser(se_HeadBucketCommand).de(de_HeadBucketCommand).build() { + static { + __name(this, "HeadBucketCommand"); + } +}; + +// src/commands/HeadObjectCommand.ts +var import_middleware_sdk_s341 = require("@aws-sdk/middleware-sdk-s3"); + + + + +var HeadObjectCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s341.getThrow200ExceptionsPlugin)(config), + (0, import_middleware_ssec.getSsecPlugin)(config), + (0, import_middleware_sdk_s341.getS3ExpiresMiddlewarePlugin)(config) + ]; +}).s("AmazonS3", "HeadObject", {}).n("S3Client", "HeadObjectCommand").f(HeadObjectRequestFilterSensitiveLog, HeadObjectOutputFilterSensitiveLog).ser(se_HeadObjectCommand).de(de_HeadObjectCommand).build() { + static { + __name(this, "HeadObjectCommand"); + } +}; + +// src/commands/ListBucketAnalyticsConfigurationsCommand.ts +var import_middleware_sdk_s342 = require("@aws-sdk/middleware-sdk-s3"); + + + +var ListBucketAnalyticsConfigurationsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s342.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "ListBucketAnalyticsConfigurations", {}).n("S3Client", "ListBucketAnalyticsConfigurationsCommand").f(void 0, void 0).ser(se_ListBucketAnalyticsConfigurationsCommand).de(de_ListBucketAnalyticsConfigurationsCommand).build() { + static { + __name(this, "ListBucketAnalyticsConfigurationsCommand"); + } +}; + +// src/commands/ListBucketIntelligentTieringConfigurationsCommand.ts +var import_middleware_sdk_s343 = require("@aws-sdk/middleware-sdk-s3"); + + + +var ListBucketIntelligentTieringConfigurationsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s343.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "ListBucketIntelligentTieringConfigurations", {}).n("S3Client", "ListBucketIntelligentTieringConfigurationsCommand").f(void 0, void 0).ser(se_ListBucketIntelligentTieringConfigurationsCommand).de(de_ListBucketIntelligentTieringConfigurationsCommand).build() { + static { + __name(this, "ListBucketIntelligentTieringConfigurationsCommand"); + } +}; + +// src/commands/ListBucketInventoryConfigurationsCommand.ts +var import_middleware_sdk_s344 = require("@aws-sdk/middleware-sdk-s3"); + + + +var ListBucketInventoryConfigurationsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s344.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "ListBucketInventoryConfigurations", {}).n("S3Client", "ListBucketInventoryConfigurationsCommand").f(void 0, ListBucketInventoryConfigurationsOutputFilterSensitiveLog).ser(se_ListBucketInventoryConfigurationsCommand).de(de_ListBucketInventoryConfigurationsCommand).build() { + static { + __name(this, "ListBucketInventoryConfigurationsCommand"); + } +}; + +// src/commands/ListBucketMetricsConfigurationsCommand.ts +var import_middleware_sdk_s345 = require("@aws-sdk/middleware-sdk-s3"); + + + +var ListBucketMetricsConfigurationsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s345.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "ListBucketMetricsConfigurations", {}).n("S3Client", "ListBucketMetricsConfigurationsCommand").f(void 0, void 0).ser(se_ListBucketMetricsConfigurationsCommand).de(de_ListBucketMetricsConfigurationsCommand).build() { + static { + __name(this, "ListBucketMetricsConfigurationsCommand"); + } +}; + +// src/commands/ListBucketsCommand.ts +var import_middleware_sdk_s346 = require("@aws-sdk/middleware-sdk-s3"); + + + +var ListBucketsCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s346.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "ListBuckets", {}).n("S3Client", "ListBucketsCommand").f(void 0, void 0).ser(se_ListBucketsCommand).de(de_ListBucketsCommand).build() { + static { + __name(this, "ListBucketsCommand"); + } +}; + +// src/commands/ListDirectoryBucketsCommand.ts +var import_middleware_sdk_s347 = require("@aws-sdk/middleware-sdk-s3"); + + + +var ListDirectoryBucketsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s347.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "ListDirectoryBuckets", {}).n("S3Client", "ListDirectoryBucketsCommand").f(void 0, void 0).ser(se_ListDirectoryBucketsCommand).de(de_ListDirectoryBucketsCommand).build() { + static { + __name(this, "ListDirectoryBucketsCommand"); + } +}; + +// src/commands/ListMultipartUploadsCommand.ts +var import_middleware_sdk_s348 = require("@aws-sdk/middleware-sdk-s3"); + + + +var ListMultipartUploadsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Prefix: { type: "contextParams", name: "Prefix" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s348.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "ListMultipartUploads", {}).n("S3Client", "ListMultipartUploadsCommand").f(void 0, void 0).ser(se_ListMultipartUploadsCommand).de(de_ListMultipartUploadsCommand).build() { + static { + __name(this, "ListMultipartUploadsCommand"); + } +}; + +// src/commands/ListObjectsCommand.ts +var import_middleware_sdk_s349 = require("@aws-sdk/middleware-sdk-s3"); + + + +var ListObjectsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Prefix: { type: "contextParams", name: "Prefix" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s349.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "ListObjects", {}).n("S3Client", "ListObjectsCommand").f(void 0, void 0).ser(se_ListObjectsCommand).de(de_ListObjectsCommand).build() { + static { + __name(this, "ListObjectsCommand"); + } +}; + +// src/commands/ListObjectsV2Command.ts +var import_middleware_sdk_s350 = require("@aws-sdk/middleware-sdk-s3"); + + + +var ListObjectsV2Command = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Prefix: { type: "contextParams", name: "Prefix" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s350.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "ListObjectsV2", {}).n("S3Client", "ListObjectsV2Command").f(void 0, void 0).ser(se_ListObjectsV2Command).de(de_ListObjectsV2Command).build() { + static { + __name(this, "ListObjectsV2Command"); + } +}; + +// src/commands/ListObjectVersionsCommand.ts +var import_middleware_sdk_s351 = require("@aws-sdk/middleware-sdk-s3"); + + + +var ListObjectVersionsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Prefix: { type: "contextParams", name: "Prefix" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s351.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "ListObjectVersions", {}).n("S3Client", "ListObjectVersionsCommand").f(void 0, void 0).ser(se_ListObjectVersionsCommand).de(de_ListObjectVersionsCommand).build() { + static { + __name(this, "ListObjectVersionsCommand"); + } +}; + +// src/commands/ListPartsCommand.ts +var import_middleware_sdk_s352 = require("@aws-sdk/middleware-sdk-s3"); + + + + +var ListPartsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s352.getThrow200ExceptionsPlugin)(config), + (0, import_middleware_ssec.getSsecPlugin)(config) + ]; +}).s("AmazonS3", "ListParts", {}).n("S3Client", "ListPartsCommand").f(ListPartsRequestFilterSensitiveLog, void 0).ser(se_ListPartsCommand).de(de_ListPartsCommand).build() { + static { + __name(this, "ListPartsCommand"); + } +}; + +// src/commands/PutBucketAccelerateConfigurationCommand.ts + + + + +var PutBucketAccelerateConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: false + }) + ]; +}).s("AmazonS3", "PutBucketAccelerateConfiguration", {}).n("S3Client", "PutBucketAccelerateConfigurationCommand").f(void 0, void 0).ser(se_PutBucketAccelerateConfigurationCommand).de(de_PutBucketAccelerateConfigurationCommand).build() { + static { + __name(this, "PutBucketAccelerateConfigurationCommand"); + } +}; + +// src/commands/PutBucketAclCommand.ts + + + + +var PutBucketAclCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true + }) + ]; +}).s("AmazonS3", "PutBucketAcl", {}).n("S3Client", "PutBucketAclCommand").f(void 0, void 0).ser(se_PutBucketAclCommand).de(de_PutBucketAclCommand).build() { + static { + __name(this, "PutBucketAclCommand"); + } +}; + +// src/commands/PutBucketAnalyticsConfigurationCommand.ts + + + +var PutBucketAnalyticsConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "PutBucketAnalyticsConfiguration", {}).n("S3Client", "PutBucketAnalyticsConfigurationCommand").f(void 0, void 0).ser(se_PutBucketAnalyticsConfigurationCommand).de(de_PutBucketAnalyticsConfigurationCommand).build() { + static { + __name(this, "PutBucketAnalyticsConfigurationCommand"); + } +}; + +// src/commands/PutBucketCorsCommand.ts + + + + +var PutBucketCorsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true + }) + ]; +}).s("AmazonS3", "PutBucketCors", {}).n("S3Client", "PutBucketCorsCommand").f(void 0, void 0).ser(se_PutBucketCorsCommand).de(de_PutBucketCorsCommand).build() { + static { + __name(this, "PutBucketCorsCommand"); + } +}; + +// src/commands/PutBucketEncryptionCommand.ts + + + + +var PutBucketEncryptionCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true + }) + ]; +}).s("AmazonS3", "PutBucketEncryption", {}).n("S3Client", "PutBucketEncryptionCommand").f(PutBucketEncryptionRequestFilterSensitiveLog, void 0).ser(se_PutBucketEncryptionCommand).de(de_PutBucketEncryptionCommand).build() { + static { + __name(this, "PutBucketEncryptionCommand"); + } +}; + +// src/commands/PutBucketIntelligentTieringConfigurationCommand.ts + + + +var PutBucketIntelligentTieringConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "PutBucketIntelligentTieringConfiguration", {}).n("S3Client", "PutBucketIntelligentTieringConfigurationCommand").f(void 0, void 0).ser(se_PutBucketIntelligentTieringConfigurationCommand).de(de_PutBucketIntelligentTieringConfigurationCommand).build() { + static { + __name(this, "PutBucketIntelligentTieringConfigurationCommand"); + } +}; + +// src/commands/PutBucketInventoryConfigurationCommand.ts + + + +var PutBucketInventoryConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "PutBucketInventoryConfiguration", {}).n("S3Client", "PutBucketInventoryConfigurationCommand").f(PutBucketInventoryConfigurationRequestFilterSensitiveLog, void 0).ser(se_PutBucketInventoryConfigurationCommand).de(de_PutBucketInventoryConfigurationCommand).build() { + static { + __name(this, "PutBucketInventoryConfigurationCommand"); + } +}; + +// src/commands/PutBucketLifecycleConfigurationCommand.ts + +var import_middleware_sdk_s353 = require("@aws-sdk/middleware-sdk-s3"); + + + +var PutBucketLifecycleConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true + }), + (0, import_middleware_sdk_s353.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "PutBucketLifecycleConfiguration", {}).n("S3Client", "PutBucketLifecycleConfigurationCommand").f(void 0, void 0).ser(se_PutBucketLifecycleConfigurationCommand).de(de_PutBucketLifecycleConfigurationCommand).build() { + static { + __name(this, "PutBucketLifecycleConfigurationCommand"); + } +}; + +// src/commands/PutBucketLoggingCommand.ts + + + + +var PutBucketLoggingCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true + }) + ]; +}).s("AmazonS3", "PutBucketLogging", {}).n("S3Client", "PutBucketLoggingCommand").f(void 0, void 0).ser(se_PutBucketLoggingCommand).de(de_PutBucketLoggingCommand).build() { + static { + __name(this, "PutBucketLoggingCommand"); + } +}; + +// src/commands/PutBucketMetricsConfigurationCommand.ts + + + +var PutBucketMetricsConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "PutBucketMetricsConfiguration", {}).n("S3Client", "PutBucketMetricsConfigurationCommand").f(void 0, void 0).ser(se_PutBucketMetricsConfigurationCommand).de(de_PutBucketMetricsConfigurationCommand).build() { + static { + __name(this, "PutBucketMetricsConfigurationCommand"); + } +}; + +// src/commands/PutBucketNotificationConfigurationCommand.ts + + + +var PutBucketNotificationConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "PutBucketNotificationConfiguration", {}).n("S3Client", "PutBucketNotificationConfigurationCommand").f(void 0, void 0).ser(se_PutBucketNotificationConfigurationCommand).de(de_PutBucketNotificationConfigurationCommand).build() { + static { + __name(this, "PutBucketNotificationConfigurationCommand"); + } +}; + +// src/commands/PutBucketOwnershipControlsCommand.ts + + + + +var PutBucketOwnershipControlsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestChecksumRequired: true + }) + ]; +}).s("AmazonS3", "PutBucketOwnershipControls", {}).n("S3Client", "PutBucketOwnershipControlsCommand").f(void 0, void 0).ser(se_PutBucketOwnershipControlsCommand).de(de_PutBucketOwnershipControlsCommand).build() { + static { + __name(this, "PutBucketOwnershipControlsCommand"); + } +}; + +// src/commands/PutBucketPolicyCommand.ts + + + + +var PutBucketPolicyCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true + }) + ]; +}).s("AmazonS3", "PutBucketPolicy", {}).n("S3Client", "PutBucketPolicyCommand").f(void 0, void 0).ser(se_PutBucketPolicyCommand).de(de_PutBucketPolicyCommand).build() { + static { + __name(this, "PutBucketPolicyCommand"); + } +}; + +// src/commands/PutBucketReplicationCommand.ts + + + + +var PutBucketReplicationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true + }) + ]; +}).s("AmazonS3", "PutBucketReplication", {}).n("S3Client", "PutBucketReplicationCommand").f(void 0, void 0).ser(se_PutBucketReplicationCommand).de(de_PutBucketReplicationCommand).build() { + static { + __name(this, "PutBucketReplicationCommand"); + } +}; + +// src/commands/PutBucketRequestPaymentCommand.ts + + + + +var PutBucketRequestPaymentCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true + }) + ]; +}).s("AmazonS3", "PutBucketRequestPayment", {}).n("S3Client", "PutBucketRequestPaymentCommand").f(void 0, void 0).ser(se_PutBucketRequestPaymentCommand).de(de_PutBucketRequestPaymentCommand).build() { + static { + __name(this, "PutBucketRequestPaymentCommand"); + } +}; + +// src/commands/PutBucketTaggingCommand.ts + + + + +var PutBucketTaggingCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true + }) + ]; +}).s("AmazonS3", "PutBucketTagging", {}).n("S3Client", "PutBucketTaggingCommand").f(void 0, void 0).ser(se_PutBucketTaggingCommand).de(de_PutBucketTaggingCommand).build() { + static { + __name(this, "PutBucketTaggingCommand"); + } +}; + +// src/commands/PutBucketVersioningCommand.ts + + + + +var PutBucketVersioningCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true + }) + ]; +}).s("AmazonS3", "PutBucketVersioning", {}).n("S3Client", "PutBucketVersioningCommand").f(void 0, void 0).ser(se_PutBucketVersioningCommand).de(de_PutBucketVersioningCommand).build() { + static { + __name(this, "PutBucketVersioningCommand"); + } +}; + +// src/commands/PutBucketWebsiteCommand.ts + + + + +var PutBucketWebsiteCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true + }) + ]; +}).s("AmazonS3", "PutBucketWebsite", {}).n("S3Client", "PutBucketWebsiteCommand").f(void 0, void 0).ser(se_PutBucketWebsiteCommand).de(de_PutBucketWebsiteCommand).build() { + static { + __name(this, "PutBucketWebsiteCommand"); + } +}; + +// src/commands/PutObjectAclCommand.ts + +var import_middleware_sdk_s354 = require("@aws-sdk/middleware-sdk-s3"); + + + +var PutObjectAclCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true + }), + (0, import_middleware_sdk_s354.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "PutObjectAcl", {}).n("S3Client", "PutObjectAclCommand").f(void 0, void 0).ser(se_PutObjectAclCommand).de(de_PutObjectAclCommand).build() { + static { + __name(this, "PutObjectAclCommand"); + } +}; + +// src/commands/PutObjectCommand.ts + +var import_middleware_sdk_s355 = require("@aws-sdk/middleware-sdk-s3"); + + + + +var PutObjectCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: false + }), + (0, import_middleware_sdk_s355.getCheckContentLengthHeaderPlugin)(config), + (0, import_middleware_sdk_s355.getThrow200ExceptionsPlugin)(config), + (0, import_middleware_ssec.getSsecPlugin)(config) + ]; +}).s("AmazonS3", "PutObject", {}).n("S3Client", "PutObjectCommand").f(PutObjectRequestFilterSensitiveLog, PutObjectOutputFilterSensitiveLog).ser(se_PutObjectCommand).de(de_PutObjectCommand).build() { + static { + __name(this, "PutObjectCommand"); + } +}; + +// src/commands/PutObjectLegalHoldCommand.ts + +var import_middleware_sdk_s356 = require("@aws-sdk/middleware-sdk-s3"); + + + +var PutObjectLegalHoldCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true + }), + (0, import_middleware_sdk_s356.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "PutObjectLegalHold", {}).n("S3Client", "PutObjectLegalHoldCommand").f(void 0, void 0).ser(se_PutObjectLegalHoldCommand).de(de_PutObjectLegalHoldCommand).build() { + static { + __name(this, "PutObjectLegalHoldCommand"); + } +}; + +// src/commands/PutObjectLockConfigurationCommand.ts + +var import_middleware_sdk_s357 = require("@aws-sdk/middleware-sdk-s3"); + + + +var PutObjectLockConfigurationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true + }), + (0, import_middleware_sdk_s357.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "PutObjectLockConfiguration", {}).n("S3Client", "PutObjectLockConfigurationCommand").f(void 0, void 0).ser(se_PutObjectLockConfigurationCommand).de(de_PutObjectLockConfigurationCommand).build() { + static { + __name(this, "PutObjectLockConfigurationCommand"); + } +}; + +// src/commands/PutObjectRetentionCommand.ts + +var import_middleware_sdk_s358 = require("@aws-sdk/middleware-sdk-s3"); + + + +var PutObjectRetentionCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true + }), + (0, import_middleware_sdk_s358.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "PutObjectRetention", {}).n("S3Client", "PutObjectRetentionCommand").f(void 0, void 0).ser(se_PutObjectRetentionCommand).de(de_PutObjectRetentionCommand).build() { + static { + __name(this, "PutObjectRetentionCommand"); + } +}; + +// src/commands/PutObjectTaggingCommand.ts + +var import_middleware_sdk_s359 = require("@aws-sdk/middleware-sdk-s3"); + + + +var PutObjectTaggingCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true + }), + (0, import_middleware_sdk_s359.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "PutObjectTagging", {}).n("S3Client", "PutObjectTaggingCommand").f(void 0, void 0).ser(se_PutObjectTaggingCommand).de(de_PutObjectTaggingCommand).build() { + static { + __name(this, "PutObjectTaggingCommand"); + } +}; + +// src/commands/PutPublicAccessBlockCommand.ts + + + + +var PutPublicAccessBlockCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true + }) + ]; +}).s("AmazonS3", "PutPublicAccessBlock", {}).n("S3Client", "PutPublicAccessBlockCommand").f(void 0, void 0).ser(se_PutPublicAccessBlockCommand).de(de_PutPublicAccessBlockCommand).build() { + static { + __name(this, "PutPublicAccessBlockCommand"); + } +}; + +// src/commands/RestoreObjectCommand.ts + +var import_middleware_sdk_s360 = require("@aws-sdk/middleware-sdk-s3"); + + + +var RestoreObjectCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: false + }), + (0, import_middleware_sdk_s360.getThrow200ExceptionsPlugin)(config) + ]; +}).s("AmazonS3", "RestoreObject", {}).n("S3Client", "RestoreObjectCommand").f(RestoreObjectRequestFilterSensitiveLog, void 0).ser(se_RestoreObjectCommand).de(de_RestoreObjectCommand).build() { + static { + __name(this, "RestoreObjectCommand"); + } +}; + +// src/commands/SelectObjectContentCommand.ts +var import_middleware_sdk_s361 = require("@aws-sdk/middleware-sdk-s3"); + + + + +var SelectObjectContentCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s361.getThrow200ExceptionsPlugin)(config), + (0, import_middleware_ssec.getSsecPlugin)(config) + ]; +}).s("AmazonS3", "SelectObjectContent", { + /** + * @internal + */ + eventStream: { + output: true + } +}).n("S3Client", "SelectObjectContentCommand").f(SelectObjectContentRequestFilterSensitiveLog, SelectObjectContentOutputFilterSensitiveLog).ser(se_SelectObjectContentCommand).de(de_SelectObjectContentCommand).build() { + static { + __name(this, "SelectObjectContentCommand"); + } +}; + +// src/commands/UploadPartCommand.ts + +var import_middleware_sdk_s362 = require("@aws-sdk/middleware-sdk-s3"); + + + + +var UploadPartCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_flexible_checksums.getFlexibleChecksumsPlugin)(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: false + }), + (0, import_middleware_sdk_s362.getThrow200ExceptionsPlugin)(config), + (0, import_middleware_ssec.getSsecPlugin)(config) + ]; +}).s("AmazonS3", "UploadPart", {}).n("S3Client", "UploadPartCommand").f(UploadPartRequestFilterSensitiveLog, UploadPartOutputFilterSensitiveLog).ser(se_UploadPartCommand).de(de_UploadPartCommand).build() { + static { + __name(this, "UploadPartCommand"); + } +}; + +// src/commands/UploadPartCopyCommand.ts +var import_middleware_sdk_s363 = require("@aws-sdk/middleware-sdk-s3"); + + + + +var UploadPartCopyCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + DisableS3ExpressSessionAuth: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()), + (0, import_middleware_sdk_s363.getThrow200ExceptionsPlugin)(config), + (0, import_middleware_ssec.getSsecPlugin)(config) + ]; +}).s("AmazonS3", "UploadPartCopy", {}).n("S3Client", "UploadPartCopyCommand").f(UploadPartCopyRequestFilterSensitiveLog, UploadPartCopyOutputFilterSensitiveLog).ser(se_UploadPartCopyCommand).de(de_UploadPartCopyCommand).build() { + static { + __name(this, "UploadPartCopyCommand"); + } +}; + +// src/commands/WriteGetObjectResponseCommand.ts + + + +var WriteGetObjectResponseCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + UseObjectLambdaEndpoint: { type: "staticContextParams", value: true } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AmazonS3", "WriteGetObjectResponse", {}).n("S3Client", "WriteGetObjectResponseCommand").f(WriteGetObjectResponseRequestFilterSensitiveLog, void 0).ser(se_WriteGetObjectResponseCommand).de(de_WriteGetObjectResponseCommand).build() { + static { + __name(this, "WriteGetObjectResponseCommand"); + } +}; + +// src/S3.ts +var commands = { + AbortMultipartUploadCommand, + CompleteMultipartUploadCommand, + CopyObjectCommand, + CreateBucketCommand, + CreateBucketMetadataTableConfigurationCommand, + CreateMultipartUploadCommand, + CreateSessionCommand, + DeleteBucketCommand, + DeleteBucketAnalyticsConfigurationCommand, + DeleteBucketCorsCommand, + DeleteBucketEncryptionCommand, + DeleteBucketIntelligentTieringConfigurationCommand, + DeleteBucketInventoryConfigurationCommand, + DeleteBucketLifecycleCommand, + DeleteBucketMetadataTableConfigurationCommand, + DeleteBucketMetricsConfigurationCommand, + DeleteBucketOwnershipControlsCommand, + DeleteBucketPolicyCommand, + DeleteBucketReplicationCommand, + DeleteBucketTaggingCommand, + DeleteBucketWebsiteCommand, + DeleteObjectCommand, + DeleteObjectsCommand, + DeleteObjectTaggingCommand, + DeletePublicAccessBlockCommand, + GetBucketAccelerateConfigurationCommand, + GetBucketAclCommand, + GetBucketAnalyticsConfigurationCommand, + GetBucketCorsCommand, + GetBucketEncryptionCommand, + GetBucketIntelligentTieringConfigurationCommand, + GetBucketInventoryConfigurationCommand, + GetBucketLifecycleConfigurationCommand, + GetBucketLocationCommand, + GetBucketLoggingCommand, + GetBucketMetadataTableConfigurationCommand, + GetBucketMetricsConfigurationCommand, + GetBucketNotificationConfigurationCommand, + GetBucketOwnershipControlsCommand, + GetBucketPolicyCommand, + GetBucketPolicyStatusCommand, + GetBucketReplicationCommand, + GetBucketRequestPaymentCommand, + GetBucketTaggingCommand, + GetBucketVersioningCommand, + GetBucketWebsiteCommand, + GetObjectCommand, + GetObjectAclCommand, + GetObjectAttributesCommand, + GetObjectLegalHoldCommand, + GetObjectLockConfigurationCommand, + GetObjectRetentionCommand, + GetObjectTaggingCommand, + GetObjectTorrentCommand, + GetPublicAccessBlockCommand, + HeadBucketCommand, + HeadObjectCommand, + ListBucketAnalyticsConfigurationsCommand, + ListBucketIntelligentTieringConfigurationsCommand, + ListBucketInventoryConfigurationsCommand, + ListBucketMetricsConfigurationsCommand, + ListBucketsCommand, + ListDirectoryBucketsCommand, + ListMultipartUploadsCommand, + ListObjectsCommand, + ListObjectsV2Command, + ListObjectVersionsCommand, + ListPartsCommand, + PutBucketAccelerateConfigurationCommand, + PutBucketAclCommand, + PutBucketAnalyticsConfigurationCommand, + PutBucketCorsCommand, + PutBucketEncryptionCommand, + PutBucketIntelligentTieringConfigurationCommand, + PutBucketInventoryConfigurationCommand, + PutBucketLifecycleConfigurationCommand, + PutBucketLoggingCommand, + PutBucketMetricsConfigurationCommand, + PutBucketNotificationConfigurationCommand, + PutBucketOwnershipControlsCommand, + PutBucketPolicyCommand, + PutBucketReplicationCommand, + PutBucketRequestPaymentCommand, + PutBucketTaggingCommand, + PutBucketVersioningCommand, + PutBucketWebsiteCommand, + PutObjectCommand, + PutObjectAclCommand, + PutObjectLegalHoldCommand, + PutObjectLockConfigurationCommand, + PutObjectRetentionCommand, + PutObjectTaggingCommand, + PutPublicAccessBlockCommand, + RestoreObjectCommand, + SelectObjectContentCommand, + UploadPartCommand, + UploadPartCopyCommand, + WriteGetObjectResponseCommand +}; +var S3 = class extends S3Client { + static { + __name(this, "S3"); + } +}; +(0, import_smithy_client.createAggregatedClient)(commands, S3); + +// src/pagination/ListBucketsPaginator.ts +var import_core4 = require("@smithy/core"); +var paginateListBuckets = (0, import_core4.createPaginator)(S3Client, ListBucketsCommand, "ContinuationToken", "ContinuationToken", "MaxBuckets"); + +// src/pagination/ListDirectoryBucketsPaginator.ts +var import_core5 = require("@smithy/core"); +var paginateListDirectoryBuckets = (0, import_core5.createPaginator)(S3Client, ListDirectoryBucketsCommand, "ContinuationToken", "ContinuationToken", "MaxDirectoryBuckets"); + +// src/pagination/ListObjectsV2Paginator.ts +var import_core6 = require("@smithy/core"); +var paginateListObjectsV2 = (0, import_core6.createPaginator)(S3Client, ListObjectsV2Command, "ContinuationToken", "NextContinuationToken", "MaxKeys"); + +// src/pagination/ListPartsPaginator.ts +var import_core7 = require("@smithy/core"); +var paginateListParts = (0, import_core7.createPaginator)(S3Client, ListPartsCommand, "PartNumberMarker", "NextPartNumberMarker", "MaxParts"); + +// src/waiters/waitForBucketExists.ts +var import_util_waiter = require("@smithy/util-waiter"); +var checkState = /* @__PURE__ */ __name(async (client, input) => { + let reason; + try { + const result = await client.send(new HeadBucketCommand(input)); + reason = result; + return { state: import_util_waiter.WaiterState.SUCCESS, reason }; + } catch (exception) { + reason = exception; + if (exception.name && exception.name == "NotFound") { + return { state: import_util_waiter.WaiterState.RETRY, reason }; + } + } + return { state: import_util_waiter.WaiterState.RETRY, reason }; +}, "checkState"); +var waitForBucketExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 5, maxDelay: 120 }; + return (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState); +}, "waitForBucketExists"); +var waitUntilBucketExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 5, maxDelay: 120 }; + const result = await (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState); + return (0, import_util_waiter.checkExceptions)(result); +}, "waitUntilBucketExists"); + +// src/waiters/waitForBucketNotExists.ts + +var checkState2 = /* @__PURE__ */ __name(async (client, input) => { + let reason; + try { + const result = await client.send(new HeadBucketCommand(input)); + reason = result; + } catch (exception) { + reason = exception; + if (exception.name && exception.name == "NotFound") { + return { state: import_util_waiter.WaiterState.SUCCESS, reason }; + } + } + return { state: import_util_waiter.WaiterState.RETRY, reason }; +}, "checkState"); +var waitForBucketNotExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 5, maxDelay: 120 }; + return (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState2); +}, "waitForBucketNotExists"); +var waitUntilBucketNotExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 5, maxDelay: 120 }; + const result = await (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState2); + return (0, import_util_waiter.checkExceptions)(result); +}, "waitUntilBucketNotExists"); + +// src/waiters/waitForObjectExists.ts + +var checkState3 = /* @__PURE__ */ __name(async (client, input) => { + let reason; + try { + const result = await client.send(new HeadObjectCommand(input)); + reason = result; + return { state: import_util_waiter.WaiterState.SUCCESS, reason }; + } catch (exception) { + reason = exception; + if (exception.name && exception.name == "NotFound") { + return { state: import_util_waiter.WaiterState.RETRY, reason }; + } + } + return { state: import_util_waiter.WaiterState.RETRY, reason }; +}, "checkState"); +var waitForObjectExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 5, maxDelay: 120 }; + return (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState3); +}, "waitForObjectExists"); +var waitUntilObjectExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 5, maxDelay: 120 }; + const result = await (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState3); + return (0, import_util_waiter.checkExceptions)(result); +}, "waitUntilObjectExists"); + +// src/waiters/waitForObjectNotExists.ts + +var checkState4 = /* @__PURE__ */ __name(async (client, input) => { + let reason; + try { + const result = await client.send(new HeadObjectCommand(input)); + reason = result; + } catch (exception) { + reason = exception; + if (exception.name && exception.name == "NotFound") { + return { state: import_util_waiter.WaiterState.SUCCESS, reason }; + } + } + return { state: import_util_waiter.WaiterState.RETRY, reason }; +}, "checkState"); +var waitForObjectNotExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 5, maxDelay: 120 }; + return (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState4); +}, "waitForObjectNotExists"); +var waitUntilObjectNotExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 5, maxDelay: 120 }; + const result = await (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState4); + return (0, import_util_waiter.checkExceptions)(result); +}, "waitUntilObjectNotExists"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + S3ServiceException, + __Client, + S3Client, + S3, + $Command, + AbortMultipartUploadCommand, + CompleteMultipartUploadCommand, + CopyObjectCommand, + CreateBucketCommand, + CreateBucketMetadataTableConfigurationCommand, + CreateMultipartUploadCommand, + CreateSessionCommand, + DeleteBucketAnalyticsConfigurationCommand, + DeleteBucketCommand, + DeleteBucketCorsCommand, + DeleteBucketEncryptionCommand, + DeleteBucketIntelligentTieringConfigurationCommand, + DeleteBucketInventoryConfigurationCommand, + DeleteBucketLifecycleCommand, + DeleteBucketMetadataTableConfigurationCommand, + DeleteBucketMetricsConfigurationCommand, + DeleteBucketOwnershipControlsCommand, + DeleteBucketPolicyCommand, + DeleteBucketReplicationCommand, + DeleteBucketTaggingCommand, + DeleteBucketWebsiteCommand, + DeleteObjectCommand, + DeleteObjectTaggingCommand, + DeleteObjectsCommand, + DeletePublicAccessBlockCommand, + GetBucketAccelerateConfigurationCommand, + GetBucketAclCommand, + GetBucketAnalyticsConfigurationCommand, + GetBucketCorsCommand, + GetBucketEncryptionCommand, + GetBucketIntelligentTieringConfigurationCommand, + GetBucketInventoryConfigurationCommand, + GetBucketLifecycleConfigurationCommand, + GetBucketLocationCommand, + GetBucketLoggingCommand, + GetBucketMetadataTableConfigurationCommand, + GetBucketMetricsConfigurationCommand, + GetBucketNotificationConfigurationCommand, + GetBucketOwnershipControlsCommand, + GetBucketPolicyCommand, + GetBucketPolicyStatusCommand, + GetBucketReplicationCommand, + GetBucketRequestPaymentCommand, + GetBucketTaggingCommand, + GetBucketVersioningCommand, + GetBucketWebsiteCommand, + GetObjectAclCommand, + GetObjectAttributesCommand, + GetObjectCommand, + GetObjectLegalHoldCommand, + GetObjectLockConfigurationCommand, + GetObjectRetentionCommand, + GetObjectTaggingCommand, + GetObjectTorrentCommand, + GetPublicAccessBlockCommand, + HeadBucketCommand, + HeadObjectCommand, + ListBucketAnalyticsConfigurationsCommand, + ListBucketIntelligentTieringConfigurationsCommand, + ListBucketInventoryConfigurationsCommand, + ListBucketMetricsConfigurationsCommand, + ListBucketsCommand, + ListDirectoryBucketsCommand, + ListMultipartUploadsCommand, + ListObjectVersionsCommand, + ListObjectsCommand, + ListObjectsV2Command, + ListPartsCommand, + PutBucketAccelerateConfigurationCommand, + PutBucketAclCommand, + PutBucketAnalyticsConfigurationCommand, + PutBucketCorsCommand, + PutBucketEncryptionCommand, + PutBucketIntelligentTieringConfigurationCommand, + PutBucketInventoryConfigurationCommand, + PutBucketLifecycleConfigurationCommand, + PutBucketLoggingCommand, + PutBucketMetricsConfigurationCommand, + PutBucketNotificationConfigurationCommand, + PutBucketOwnershipControlsCommand, + PutBucketPolicyCommand, + PutBucketReplicationCommand, + PutBucketRequestPaymentCommand, + PutBucketTaggingCommand, + PutBucketVersioningCommand, + PutBucketWebsiteCommand, + PutObjectAclCommand, + PutObjectCommand, + PutObjectLegalHoldCommand, + PutObjectLockConfigurationCommand, + PutObjectRetentionCommand, + PutObjectTaggingCommand, + PutPublicAccessBlockCommand, + RestoreObjectCommand, + SelectObjectContentCommand, + UploadPartCommand, + UploadPartCopyCommand, + WriteGetObjectResponseCommand, + paginateListBuckets, + paginateListDirectoryBuckets, + paginateListObjectsV2, + paginateListParts, + waitForBucketExists, + waitUntilBucketExists, + waitForBucketNotExists, + waitUntilBucketNotExists, + waitForObjectExists, + waitUntilObjectExists, + waitForObjectNotExists, + waitUntilObjectNotExists, + RequestCharged, + RequestPayer, + NoSuchUpload, + BucketAccelerateStatus, + Type, + Permission, + OwnerOverride, + ChecksumType, + ServerSideEncryption, + ObjectCannedACL, + ChecksumAlgorithm, + MetadataDirective, + ObjectLockLegalHoldStatus, + ObjectLockMode, + StorageClass, + TaggingDirective, + ObjectNotInActiveTierError, + BucketAlreadyExists, + BucketAlreadyOwnedByYou, + BucketCannedACL, + DataRedundancy, + BucketType, + LocationType, + BucketLocationConstraint, + ObjectOwnership, + SessionMode, + NoSuchBucket, + AnalyticsFilter, + AnalyticsS3ExportFileFormat, + StorageClassAnalysisSchemaVersion, + IntelligentTieringStatus, + IntelligentTieringAccessTier, + InventoryFormat, + InventoryIncludedObjectVersions, + InventoryOptionalField, + InventoryFrequency, + TransitionStorageClass, + ExpirationStatus, + TransitionDefaultMinimumObjectSize, + BucketLogsPermission, + PartitionDateSource, + MetricsFilter, + Event, + FilterRuleName, + DeleteMarkerReplicationStatus, + MetricsStatus, + ReplicationTimeStatus, + ExistingObjectReplicationStatus, + ReplicaModificationsStatus, + SseKmsEncryptedObjectsStatus, + ReplicationRuleStatus, + Payer, + MFADeleteStatus, + BucketVersioningStatus, + Protocol, + ReplicationStatus, + ChecksumMode, + InvalidObjectState, + NoSuchKey, + ObjectAttributes, + ObjectLockEnabled, + ObjectLockRetentionMode, + NotFound, + ArchiveStatus, + EncodingType, + ObjectStorageClass, + OptionalObjectAttributes, + ObjectVersionStorageClass, + CompleteMultipartUploadOutputFilterSensitiveLog, + CompleteMultipartUploadRequestFilterSensitiveLog, + CopyObjectOutputFilterSensitiveLog, + CopyObjectRequestFilterSensitiveLog, + CreateMultipartUploadOutputFilterSensitiveLog, + CreateMultipartUploadRequestFilterSensitiveLog, + SessionCredentialsFilterSensitiveLog, + CreateSessionOutputFilterSensitiveLog, + CreateSessionRequestFilterSensitiveLog, + ServerSideEncryptionByDefaultFilterSensitiveLog, + ServerSideEncryptionRuleFilterSensitiveLog, + ServerSideEncryptionConfigurationFilterSensitiveLog, + GetBucketEncryptionOutputFilterSensitiveLog, + SSEKMSFilterSensitiveLog, + InventoryEncryptionFilterSensitiveLog, + InventoryS3BucketDestinationFilterSensitiveLog, + InventoryDestinationFilterSensitiveLog, + InventoryConfigurationFilterSensitiveLog, + GetBucketInventoryConfigurationOutputFilterSensitiveLog, + GetObjectOutputFilterSensitiveLog, + GetObjectRequestFilterSensitiveLog, + GetObjectAttributesRequestFilterSensitiveLog, + GetObjectTorrentOutputFilterSensitiveLog, + HeadObjectOutputFilterSensitiveLog, + HeadObjectRequestFilterSensitiveLog, + ListBucketInventoryConfigurationsOutputFilterSensitiveLog, + ListPartsRequestFilterSensitiveLog, + MFADelete, + EncryptionTypeMismatch, + InvalidRequest, + InvalidWriteOffset, + TooManyParts, + ObjectAlreadyInActiveTierError, + Tier, + ExpressionType, + CompressionType, + FileHeaderInfo, + JSONType, + QuoteFields, + RestoreRequestType, + SelectObjectContentEventStream, + PutBucketEncryptionRequestFilterSensitiveLog, + PutBucketInventoryConfigurationRequestFilterSensitiveLog, + PutObjectOutputFilterSensitiveLog, + PutObjectRequestFilterSensitiveLog, + EncryptionFilterSensitiveLog, + S3LocationFilterSensitiveLog, + OutputLocationFilterSensitiveLog, + RestoreRequestFilterSensitiveLog, + RestoreObjectRequestFilterSensitiveLog, + SelectObjectContentEventStreamFilterSensitiveLog, + SelectObjectContentOutputFilterSensitiveLog, + SelectObjectContentRequestFilterSensitiveLog, + UploadPartOutputFilterSensitiveLog, + UploadPartRequestFilterSensitiveLog, + UploadPartCopyOutputFilterSensitiveLog, + UploadPartCopyRequestFilterSensitiveLog, + WriteGetObjectResponseRequestFilterSensitiveLog +}); + diff --git a/node_modules/@aws-sdk/client-s3/dist-cjs/runtimeConfig.browser.js b/node_modules/@aws-sdk/client-s3/dist-cjs/runtimeConfig.browser.js new file mode 100644 index 00000000..4ed519f0 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-cjs/runtimeConfig.browser.js @@ -0,0 +1,47 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../package.json")); +const sha1_browser_1 = require("@aws-crypto/sha1-browser"); +const sha256_browser_1 = require("@aws-crypto/sha256-browser"); +const util_user_agent_browser_1 = require("@aws-sdk/util-user-agent-browser"); +const config_resolver_1 = require("@smithy/config-resolver"); +const eventstream_serde_browser_1 = require("@smithy/eventstream-serde-browser"); +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const hash_blob_browser_1 = require("@smithy/hash-blob-browser"); +const invalid_dependency_1 = require("@smithy/invalid-dependency"); +const md5_js_1 = require("@smithy/md5-js"); +const util_body_length_browser_1 = require("@smithy/util-body-length-browser"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_browser_1 = require("@smithy/util-defaults-mode-browser"); +const getRuntimeConfig = (config) => { + const defaultsMode = (0, util_defaults_mode_browser_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_browser_1.calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? ((_) => () => Promise.reject(new Error("Credential is missing"))), + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_browser_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + eventStreamSerdeProvider: config?.eventStreamSerdeProvider ?? eventstream_serde_browser_1.eventStreamSerdeProvider, + maxAttempts: config?.maxAttempts ?? util_retry_1.DEFAULT_MAX_ATTEMPTS, + md5: config?.md5 ?? md5_js_1.Md5, + region: config?.region ?? (0, invalid_dependency_1.invalidProvider)("Region is missing"), + requestHandler: fetch_http_handler_1.FetchHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE), + sha1: config?.sha1 ?? sha1_browser_1.Sha1, + sha256: config?.sha256 ?? sha256_browser_1.Sha256, + streamCollector: config?.streamCollector ?? fetch_http_handler_1.streamCollector, + streamHasher: config?.streamHasher ?? hash_blob_browser_1.blobHasher, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/node_modules/@aws-sdk/client-s3/dist-cjs/runtimeConfig.js b/node_modules/@aws-sdk/client-s3/dist-cjs/runtimeConfig.js new file mode 100644 index 00000000..a6a90a4b --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-cjs/runtimeConfig.js @@ -0,0 +1,70 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../package.json")); +const core_1 = require("@aws-sdk/core"); +const credential_provider_node_1 = require("@aws-sdk/credential-provider-node"); +const middleware_bucket_endpoint_1 = require("@aws-sdk/middleware-bucket-endpoint"); +const middleware_flexible_checksums_1 = require("@aws-sdk/middleware-flexible-checksums"); +const middleware_sdk_s3_1 = require("@aws-sdk/middleware-sdk-s3"); +const util_user_agent_node_1 = require("@aws-sdk/util-user-agent-node"); +const config_resolver_1 = require("@smithy/config-resolver"); +const eventstream_serde_node_1 = require("@smithy/eventstream-serde-node"); +const hash_node_1 = require("@smithy/hash-node"); +const hash_stream_node_1 = require("@smithy/hash-stream-node"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const node_config_provider_1 = require("@smithy/node-config-provider"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_body_length_node_1 = require("@smithy/util-body-length-node"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_node_1 = require("@smithy/util-defaults-mode-node"); +const smithy_client_2 = require("@smithy/smithy-client"); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? (0, node_config_provider_1.loadConfig)(core_1.NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? credential_provider_node_1.defaultProvider, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + disableS3ExpressSessionAuth: config?.disableS3ExpressSessionAuth ?? + (0, node_config_provider_1.loadConfig)(middleware_sdk_s3_1.NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_OPTIONS, profileConfig), + eventStreamSerdeProvider: config?.eventStreamSerdeProvider ?? eventstream_serde_node_1.eventStreamSerdeProvider, + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + md5: config?.md5 ?? hash_node_1.Hash.bind(null, "md5"), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestChecksumCalculation: config?.requestChecksumCalculation ?? + (0, node_config_provider_1.loadConfig)(middleware_flexible_checksums_1.NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS, profileConfig), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + responseChecksumValidation: config?.responseChecksumValidation ?? + (0, node_config_provider_1.loadConfig)(middleware_flexible_checksums_1.NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS, profileConfig), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha1: config?.sha1 ?? hash_node_1.Hash.bind(null, "sha1"), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + sigv4aSigningRegionSet: config?.sigv4aSigningRegionSet ?? (0, node_config_provider_1.loadConfig)(core_1.NODE_SIGV4A_CONFIG_OPTIONS, profileConfig), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + streamHasher: config?.streamHasher ?? hash_stream_node_1.readableStreamHasher, + useArnRegion: config?.useArnRegion ?? (0, node_config_provider_1.loadConfig)(middleware_bucket_endpoint_1.NODE_USE_ARN_REGION_CONFIG_OPTIONS, profileConfig), + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/node_modules/@aws-sdk/client-s3/dist-cjs/runtimeConfig.native.js b/node_modules/@aws-sdk/client-s3/dist-cjs/runtimeConfig.native.js new file mode 100644 index 00000000..34c5f8ec --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-cjs/runtimeConfig.native.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const sha256_js_1 = require("@aws-crypto/sha256-js"); +const runtimeConfig_browser_1 = require("./runtimeConfig.browser"); +const getRuntimeConfig = (config) => { + const browserDefaults = (0, runtimeConfig_browser_1.getRuntimeConfig)(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? sha256_js_1.Sha256, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/node_modules/@aws-sdk/client-s3/dist-cjs/runtimeConfig.shared.js b/node_modules/@aws-sdk/client-s3/dist-cjs/runtimeConfig.shared.js new file mode 100644 index 00000000..2a0891e8 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-cjs/runtimeConfig.shared.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const core_1 = require("@aws-sdk/core"); +const signature_v4_multi_region_1 = require("@aws-sdk/signature-v4-multi-region"); +const smithy_client_1 = require("@smithy/smithy-client"); +const url_parser_1 = require("@smithy/url-parser"); +const util_base64_1 = require("@smithy/util-base64"); +const util_stream_1 = require("@smithy/util-stream"); +const util_utf8_1 = require("@smithy/util-utf8"); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const endpointResolver_1 = require("./endpoint/endpointResolver"); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2006-03-01", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + getAwsChunkedEncodingStream: config?.getAwsChunkedEncodingStream ?? util_stream_1.getAwsChunkedEncodingStream, + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultS3HttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "aws.auth#sigv4a", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4a"), + signer: new core_1.AwsSdkSigV4ASigner(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + sdkStreamMixin: config?.sdkStreamMixin ?? util_stream_1.sdkStreamMixin, + serviceId: config?.serviceId ?? "S3", + signerConstructor: config?.signerConstructor ?? signature_v4_multi_region_1.SignatureV4MultiRegion, + signingEscapePath: config?.signingEscapePath ?? false, + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + useArnRegion: config?.useArnRegion ?? false, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/S3.js b/node_modules/@aws-sdk/client-s3/dist-es/S3.js new file mode 100644 index 00000000..2f6dad11 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/S3.js @@ -0,0 +1,203 @@ +import { createAggregatedClient } from "@smithy/smithy-client"; +import { AbortMultipartUploadCommand, } from "./commands/AbortMultipartUploadCommand"; +import { CompleteMultipartUploadCommand, } from "./commands/CompleteMultipartUploadCommand"; +import { CopyObjectCommand } from "./commands/CopyObjectCommand"; +import { CreateBucketCommand, } from "./commands/CreateBucketCommand"; +import { CreateBucketMetadataTableConfigurationCommand, } from "./commands/CreateBucketMetadataTableConfigurationCommand"; +import { CreateMultipartUploadCommand, } from "./commands/CreateMultipartUploadCommand"; +import { CreateSessionCommand, } from "./commands/CreateSessionCommand"; +import { DeleteBucketAnalyticsConfigurationCommand, } from "./commands/DeleteBucketAnalyticsConfigurationCommand"; +import { DeleteBucketCommand, } from "./commands/DeleteBucketCommand"; +import { DeleteBucketCorsCommand, } from "./commands/DeleteBucketCorsCommand"; +import { DeleteBucketEncryptionCommand, } from "./commands/DeleteBucketEncryptionCommand"; +import { DeleteBucketIntelligentTieringConfigurationCommand, } from "./commands/DeleteBucketIntelligentTieringConfigurationCommand"; +import { DeleteBucketInventoryConfigurationCommand, } from "./commands/DeleteBucketInventoryConfigurationCommand"; +import { DeleteBucketLifecycleCommand, } from "./commands/DeleteBucketLifecycleCommand"; +import { DeleteBucketMetadataTableConfigurationCommand, } from "./commands/DeleteBucketMetadataTableConfigurationCommand"; +import { DeleteBucketMetricsConfigurationCommand, } from "./commands/DeleteBucketMetricsConfigurationCommand"; +import { DeleteBucketOwnershipControlsCommand, } from "./commands/DeleteBucketOwnershipControlsCommand"; +import { DeleteBucketPolicyCommand, } from "./commands/DeleteBucketPolicyCommand"; +import { DeleteBucketReplicationCommand, } from "./commands/DeleteBucketReplicationCommand"; +import { DeleteBucketTaggingCommand, } from "./commands/DeleteBucketTaggingCommand"; +import { DeleteBucketWebsiteCommand, } from "./commands/DeleteBucketWebsiteCommand"; +import { DeleteObjectCommand, } from "./commands/DeleteObjectCommand"; +import { DeleteObjectsCommand, } from "./commands/DeleteObjectsCommand"; +import { DeleteObjectTaggingCommand, } from "./commands/DeleteObjectTaggingCommand"; +import { DeletePublicAccessBlockCommand, } from "./commands/DeletePublicAccessBlockCommand"; +import { GetBucketAccelerateConfigurationCommand, } from "./commands/GetBucketAccelerateConfigurationCommand"; +import { GetBucketAclCommand, } from "./commands/GetBucketAclCommand"; +import { GetBucketAnalyticsConfigurationCommand, } from "./commands/GetBucketAnalyticsConfigurationCommand"; +import { GetBucketCorsCommand, } from "./commands/GetBucketCorsCommand"; +import { GetBucketEncryptionCommand, } from "./commands/GetBucketEncryptionCommand"; +import { GetBucketIntelligentTieringConfigurationCommand, } from "./commands/GetBucketIntelligentTieringConfigurationCommand"; +import { GetBucketInventoryConfigurationCommand, } from "./commands/GetBucketInventoryConfigurationCommand"; +import { GetBucketLifecycleConfigurationCommand, } from "./commands/GetBucketLifecycleConfigurationCommand"; +import { GetBucketLocationCommand, } from "./commands/GetBucketLocationCommand"; +import { GetBucketLoggingCommand, } from "./commands/GetBucketLoggingCommand"; +import { GetBucketMetadataTableConfigurationCommand, } from "./commands/GetBucketMetadataTableConfigurationCommand"; +import { GetBucketMetricsConfigurationCommand, } from "./commands/GetBucketMetricsConfigurationCommand"; +import { GetBucketNotificationConfigurationCommand, } from "./commands/GetBucketNotificationConfigurationCommand"; +import { GetBucketOwnershipControlsCommand, } from "./commands/GetBucketOwnershipControlsCommand"; +import { GetBucketPolicyCommand, } from "./commands/GetBucketPolicyCommand"; +import { GetBucketPolicyStatusCommand, } from "./commands/GetBucketPolicyStatusCommand"; +import { GetBucketReplicationCommand, } from "./commands/GetBucketReplicationCommand"; +import { GetBucketRequestPaymentCommand, } from "./commands/GetBucketRequestPaymentCommand"; +import { GetBucketTaggingCommand, } from "./commands/GetBucketTaggingCommand"; +import { GetBucketVersioningCommand, } from "./commands/GetBucketVersioningCommand"; +import { GetBucketWebsiteCommand, } from "./commands/GetBucketWebsiteCommand"; +import { GetObjectAclCommand, } from "./commands/GetObjectAclCommand"; +import { GetObjectAttributesCommand, } from "./commands/GetObjectAttributesCommand"; +import { GetObjectCommand } from "./commands/GetObjectCommand"; +import { GetObjectLegalHoldCommand, } from "./commands/GetObjectLegalHoldCommand"; +import { GetObjectLockConfigurationCommand, } from "./commands/GetObjectLockConfigurationCommand"; +import { GetObjectRetentionCommand, } from "./commands/GetObjectRetentionCommand"; +import { GetObjectTaggingCommand, } from "./commands/GetObjectTaggingCommand"; +import { GetObjectTorrentCommand, } from "./commands/GetObjectTorrentCommand"; +import { GetPublicAccessBlockCommand, } from "./commands/GetPublicAccessBlockCommand"; +import { HeadBucketCommand } from "./commands/HeadBucketCommand"; +import { HeadObjectCommand } from "./commands/HeadObjectCommand"; +import { ListBucketAnalyticsConfigurationsCommand, } from "./commands/ListBucketAnalyticsConfigurationsCommand"; +import { ListBucketIntelligentTieringConfigurationsCommand, } from "./commands/ListBucketIntelligentTieringConfigurationsCommand"; +import { ListBucketInventoryConfigurationsCommand, } from "./commands/ListBucketInventoryConfigurationsCommand"; +import { ListBucketMetricsConfigurationsCommand, } from "./commands/ListBucketMetricsConfigurationsCommand"; +import { ListBucketsCommand } from "./commands/ListBucketsCommand"; +import { ListDirectoryBucketsCommand, } from "./commands/ListDirectoryBucketsCommand"; +import { ListMultipartUploadsCommand, } from "./commands/ListMultipartUploadsCommand"; +import { ListObjectsCommand } from "./commands/ListObjectsCommand"; +import { ListObjectsV2Command, } from "./commands/ListObjectsV2Command"; +import { ListObjectVersionsCommand, } from "./commands/ListObjectVersionsCommand"; +import { ListPartsCommand } from "./commands/ListPartsCommand"; +import { PutBucketAccelerateConfigurationCommand, } from "./commands/PutBucketAccelerateConfigurationCommand"; +import { PutBucketAclCommand, } from "./commands/PutBucketAclCommand"; +import { PutBucketAnalyticsConfigurationCommand, } from "./commands/PutBucketAnalyticsConfigurationCommand"; +import { PutBucketCorsCommand, } from "./commands/PutBucketCorsCommand"; +import { PutBucketEncryptionCommand, } from "./commands/PutBucketEncryptionCommand"; +import { PutBucketIntelligentTieringConfigurationCommand, } from "./commands/PutBucketIntelligentTieringConfigurationCommand"; +import { PutBucketInventoryConfigurationCommand, } from "./commands/PutBucketInventoryConfigurationCommand"; +import { PutBucketLifecycleConfigurationCommand, } from "./commands/PutBucketLifecycleConfigurationCommand"; +import { PutBucketLoggingCommand, } from "./commands/PutBucketLoggingCommand"; +import { PutBucketMetricsConfigurationCommand, } from "./commands/PutBucketMetricsConfigurationCommand"; +import { PutBucketNotificationConfigurationCommand, } from "./commands/PutBucketNotificationConfigurationCommand"; +import { PutBucketOwnershipControlsCommand, } from "./commands/PutBucketOwnershipControlsCommand"; +import { PutBucketPolicyCommand, } from "./commands/PutBucketPolicyCommand"; +import { PutBucketReplicationCommand, } from "./commands/PutBucketReplicationCommand"; +import { PutBucketRequestPaymentCommand, } from "./commands/PutBucketRequestPaymentCommand"; +import { PutBucketTaggingCommand, } from "./commands/PutBucketTaggingCommand"; +import { PutBucketVersioningCommand, } from "./commands/PutBucketVersioningCommand"; +import { PutBucketWebsiteCommand, } from "./commands/PutBucketWebsiteCommand"; +import { PutObjectAclCommand, } from "./commands/PutObjectAclCommand"; +import { PutObjectCommand } from "./commands/PutObjectCommand"; +import { PutObjectLegalHoldCommand, } from "./commands/PutObjectLegalHoldCommand"; +import { PutObjectLockConfigurationCommand, } from "./commands/PutObjectLockConfigurationCommand"; +import { PutObjectRetentionCommand, } from "./commands/PutObjectRetentionCommand"; +import { PutObjectTaggingCommand, } from "./commands/PutObjectTaggingCommand"; +import { PutPublicAccessBlockCommand, } from "./commands/PutPublicAccessBlockCommand"; +import { RestoreObjectCommand, } from "./commands/RestoreObjectCommand"; +import { SelectObjectContentCommand, } from "./commands/SelectObjectContentCommand"; +import { UploadPartCommand } from "./commands/UploadPartCommand"; +import { UploadPartCopyCommand, } from "./commands/UploadPartCopyCommand"; +import { WriteGetObjectResponseCommand, } from "./commands/WriteGetObjectResponseCommand"; +import { S3Client } from "./S3Client"; +const commands = { + AbortMultipartUploadCommand, + CompleteMultipartUploadCommand, + CopyObjectCommand, + CreateBucketCommand, + CreateBucketMetadataTableConfigurationCommand, + CreateMultipartUploadCommand, + CreateSessionCommand, + DeleteBucketCommand, + DeleteBucketAnalyticsConfigurationCommand, + DeleteBucketCorsCommand, + DeleteBucketEncryptionCommand, + DeleteBucketIntelligentTieringConfigurationCommand, + DeleteBucketInventoryConfigurationCommand, + DeleteBucketLifecycleCommand, + DeleteBucketMetadataTableConfigurationCommand, + DeleteBucketMetricsConfigurationCommand, + DeleteBucketOwnershipControlsCommand, + DeleteBucketPolicyCommand, + DeleteBucketReplicationCommand, + DeleteBucketTaggingCommand, + DeleteBucketWebsiteCommand, + DeleteObjectCommand, + DeleteObjectsCommand, + DeleteObjectTaggingCommand, + DeletePublicAccessBlockCommand, + GetBucketAccelerateConfigurationCommand, + GetBucketAclCommand, + GetBucketAnalyticsConfigurationCommand, + GetBucketCorsCommand, + GetBucketEncryptionCommand, + GetBucketIntelligentTieringConfigurationCommand, + GetBucketInventoryConfigurationCommand, + GetBucketLifecycleConfigurationCommand, + GetBucketLocationCommand, + GetBucketLoggingCommand, + GetBucketMetadataTableConfigurationCommand, + GetBucketMetricsConfigurationCommand, + GetBucketNotificationConfigurationCommand, + GetBucketOwnershipControlsCommand, + GetBucketPolicyCommand, + GetBucketPolicyStatusCommand, + GetBucketReplicationCommand, + GetBucketRequestPaymentCommand, + GetBucketTaggingCommand, + GetBucketVersioningCommand, + GetBucketWebsiteCommand, + GetObjectCommand, + GetObjectAclCommand, + GetObjectAttributesCommand, + GetObjectLegalHoldCommand, + GetObjectLockConfigurationCommand, + GetObjectRetentionCommand, + GetObjectTaggingCommand, + GetObjectTorrentCommand, + GetPublicAccessBlockCommand, + HeadBucketCommand, + HeadObjectCommand, + ListBucketAnalyticsConfigurationsCommand, + ListBucketIntelligentTieringConfigurationsCommand, + ListBucketInventoryConfigurationsCommand, + ListBucketMetricsConfigurationsCommand, + ListBucketsCommand, + ListDirectoryBucketsCommand, + ListMultipartUploadsCommand, + ListObjectsCommand, + ListObjectsV2Command, + ListObjectVersionsCommand, + ListPartsCommand, + PutBucketAccelerateConfigurationCommand, + PutBucketAclCommand, + PutBucketAnalyticsConfigurationCommand, + PutBucketCorsCommand, + PutBucketEncryptionCommand, + PutBucketIntelligentTieringConfigurationCommand, + PutBucketInventoryConfigurationCommand, + PutBucketLifecycleConfigurationCommand, + PutBucketLoggingCommand, + PutBucketMetricsConfigurationCommand, + PutBucketNotificationConfigurationCommand, + PutBucketOwnershipControlsCommand, + PutBucketPolicyCommand, + PutBucketReplicationCommand, + PutBucketRequestPaymentCommand, + PutBucketTaggingCommand, + PutBucketVersioningCommand, + PutBucketWebsiteCommand, + PutObjectCommand, + PutObjectAclCommand, + PutObjectLegalHoldCommand, + PutObjectLockConfigurationCommand, + PutObjectRetentionCommand, + PutObjectTaggingCommand, + PutPublicAccessBlockCommand, + RestoreObjectCommand, + SelectObjectContentCommand, + UploadPartCommand, + UploadPartCopyCommand, + WriteGetObjectResponseCommand, +}; +export class S3 extends S3Client { +} +createAggregatedClient(commands, S3); diff --git a/node_modules/@aws-sdk/client-s3/dist-es/S3Client.js b/node_modules/@aws-sdk/client-s3/dist-es/S3Client.js new file mode 100644 index 00000000..4e9ac74d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/S3Client.js @@ -0,0 +1,62 @@ +import { getAddExpectContinuePlugin } from "@aws-sdk/middleware-expect-continue"; +import { resolveFlexibleChecksumsConfig, } from "@aws-sdk/middleware-flexible-checksums"; +import { getHostHeaderPlugin, resolveHostHeaderConfig, } from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { getRecursionDetectionPlugin } from "@aws-sdk/middleware-recursion-detection"; +import { getRegionRedirectMiddlewarePlugin, getS3ExpressHttpSigningPlugin, getS3ExpressPlugin, getValidateBucketNamePlugin, resolveS3Config, } from "@aws-sdk/middleware-sdk-s3"; +import { getUserAgentPlugin, resolveUserAgentConfig, } from "@aws-sdk/middleware-user-agent"; +import { resolveRegionConfig } from "@smithy/config-resolver"; +import { DefaultIdentityProviderConfig, getHttpAuthSchemeEndpointRuleSetPlugin, getHttpSigningPlugin, } from "@smithy/core"; +import { resolveEventStreamSerdeConfig, } from "@smithy/eventstream-serde-config-resolver"; +import { getContentLengthPlugin } from "@smithy/middleware-content-length"; +import { resolveEndpointConfig } from "@smithy/middleware-endpoint"; +import { getRetryPlugin, resolveRetryConfig } from "@smithy/middleware-retry"; +import { Client as __Client, } from "@smithy/smithy-client"; +import { defaultS3HttpAuthSchemeParametersProvider, resolveHttpAuthSchemeConfig, } from "./auth/httpAuthSchemeProvider"; +import { CreateSessionCommand, } from "./commands/CreateSessionCommand"; +import { resolveClientEndpointParameters, } from "./endpoint/EndpointParameters"; +import { getRuntimeConfig as __getRuntimeConfig } from "./runtimeConfig"; +import { resolveRuntimeExtensions } from "./runtimeExtensions"; +export { __Client }; +export class S3Client extends __Client { + config; + constructor(...[configuration]) { + const _config_0 = __getRuntimeConfig(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = resolveUserAgentConfig(_config_1); + const _config_3 = resolveFlexibleChecksumsConfig(_config_2); + const _config_4 = resolveRetryConfig(_config_3); + const _config_5 = resolveRegionConfig(_config_4); + const _config_6 = resolveHostHeaderConfig(_config_5); + const _config_7 = resolveEndpointConfig(_config_6); + const _config_8 = resolveEventStreamSerdeConfig(_config_7); + const _config_9 = resolveHttpAuthSchemeConfig(_config_8); + const _config_10 = resolveS3Config(_config_9, { session: [() => this, CreateSessionCommand] }); + const _config_11 = resolveRuntimeExtensions(_config_10, configuration?.extensions || []); + this.config = _config_11; + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + this.middlewareStack.use(getRecursionDetectionPlugin(this.config)); + this.middlewareStack.use(getHttpAuthSchemeEndpointRuleSetPlugin(this.config, { + httpAuthSchemeParametersProvider: defaultS3HttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + "aws.auth#sigv4a": config.credentials, + }), + })); + this.middlewareStack.use(getHttpSigningPlugin(this.config)); + this.middlewareStack.use(getValidateBucketNamePlugin(this.config)); + this.middlewareStack.use(getAddExpectContinuePlugin(this.config)); + this.middlewareStack.use(getRegionRedirectMiddlewarePlugin(this.config)); + this.middlewareStack.use(getS3ExpressPlugin(this.config)); + this.middlewareStack.use(getS3ExpressHttpSigningPlugin(this.config)); + } + destroy() { + super.destroy(); + } +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/auth/httpAuthExtensionConfiguration.js b/node_modules/@aws-sdk/client-s3/dist-es/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 00000000..2ba1d48c --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,38 @@ +export const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +export const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/auth/httpAuthSchemeProvider.js b/node_modules/@aws-sdk/client-s3/dist-es/auth/httpAuthSchemeProvider.js new file mode 100644 index 00000000..ecc5dc15 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/auth/httpAuthSchemeProvider.js @@ -0,0 +1,123 @@ +import { resolveAwsSdkSigV4AConfig, resolveAwsSdkSigV4Config, } from "@aws-sdk/core"; +import { signatureV4CrtContainer } from "@aws-sdk/signature-v4-multi-region"; +import { resolveParams } from "@smithy/middleware-endpoint"; +import { getSmithyContext, normalizeProvider } from "@smithy/util-middleware"; +import { defaultEndpointResolver } from "../endpoint/endpointResolver"; +const createEndpointRuleSetHttpAuthSchemeParametersProvider = (defaultHttpAuthSchemeParametersProvider) => async (config, context, input) => { + if (!input) { + throw new Error(`Could not find \`input\` for \`defaultEndpointRuleSetHttpAuthSchemeParametersProvider\``); + } + const defaultParameters = await defaultHttpAuthSchemeParametersProvider(config, context, input); + const instructionsFn = getSmithyContext(context)?.commandInstance?.constructor + ?.getEndpointParameterInstructions; + if (!instructionsFn) { + throw new Error(`getEndpointParameterInstructions() is not defined on \`${context.commandName}\``); + } + const endpointParameters = await resolveParams(input, { getEndpointParameterInstructions: instructionsFn }, config); + return Object.assign(defaultParameters, endpointParameters); +}; +const _defaultS3HttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: getSmithyContext(context).operation, + region: (await normalizeProvider(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +export const defaultS3HttpAuthSchemeParametersProvider = createEndpointRuleSetHttpAuthSchemeParametersProvider(_defaultS3HttpAuthSchemeParametersProvider); +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "s3", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createAwsAuthSigv4aHttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4a", + signingProperties: { + name: "s3", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +const createEndpointRuleSetHttpAuthSchemeProvider = (defaultEndpointResolver, defaultHttpAuthSchemeResolver, createHttpAuthOptionFunctions) => { + const endpointRuleSetHttpAuthSchemeProvider = (authParameters) => { + const endpoint = defaultEndpointResolver(authParameters); + const authSchemes = endpoint.properties?.authSchemes; + if (!authSchemes) { + return defaultHttpAuthSchemeResolver(authParameters); + } + const options = []; + for (const scheme of authSchemes) { + const { name: resolvedName, properties = {}, ...rest } = scheme; + const name = resolvedName.toLowerCase(); + if (resolvedName !== name) { + console.warn(`HttpAuthScheme has been normalized with lowercasing: \`${resolvedName}\` to \`${name}\``); + } + let schemeId; + if (name === "sigv4a") { + schemeId = "aws.auth#sigv4a"; + const sigv4Present = authSchemes.find((s) => { + const name = s.name.toLowerCase(); + return name !== "sigv4a" && name.startsWith("sigv4"); + }); + if (!signatureV4CrtContainer.CrtSignerV4 && sigv4Present) { + continue; + } + } + else if (name.startsWith("sigv4")) { + schemeId = "aws.auth#sigv4"; + } + else { + throw new Error(`Unknown HttpAuthScheme found in \`@smithy.rules#endpointRuleSet\`: \`${name}\``); + } + const createOption = createHttpAuthOptionFunctions[schemeId]; + if (!createOption) { + throw new Error(`Could not find HttpAuthOption create function for \`${schemeId}\``); + } + const option = createOption(authParameters); + option.schemeId = schemeId; + option.signingProperties = { ...(option.signingProperties || {}), ...rest, ...properties }; + options.push(option); + } + return options; + }; + return endpointRuleSetHttpAuthSchemeProvider; +}; +const _defaultS3HttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + options.push(createAwsAuthSigv4aHttpAuthOption(authParameters)); + } + } + return options; +}; +export const defaultS3HttpAuthSchemeProvider = createEndpointRuleSetHttpAuthSchemeProvider(defaultEndpointResolver, _defaultS3HttpAuthSchemeProvider, { + "aws.auth#sigv4": createAwsAuthSigv4HttpAuthOption, + "aws.auth#sigv4a": createAwsAuthSigv4aHttpAuthOption, +}); +export const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = resolveAwsSdkSigV4Config(config); + const config_1 = resolveAwsSdkSigV4AConfig(config_0); + return Object.assign(config_1, { + authSchemePreference: normalizeProvider(config.authSchemePreference ?? []), + }); +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/AbortMultipartUploadCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/AbortMultipartUploadCommand.js new file mode 100644 index 00000000..0d499032 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/AbortMultipartUploadCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_AbortMultipartUploadCommand, se_AbortMultipartUploadCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class AbortMultipartUploadCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "AbortMultipartUpload", {}) + .n("S3Client", "AbortMultipartUploadCommand") + .f(void 0, void 0) + .ser(se_AbortMultipartUploadCommand) + .de(de_AbortMultipartUploadCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/CompleteMultipartUploadCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/CompleteMultipartUploadCommand.js new file mode 100644 index 00000000..33db05ca --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/CompleteMultipartUploadCommand.js @@ -0,0 +1,31 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getSsecPlugin } from "@aws-sdk/middleware-ssec"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { CompleteMultipartUploadOutputFilterSensitiveLog, CompleteMultipartUploadRequestFilterSensitiveLog, } from "../models/models_0"; +import { de_CompleteMultipartUploadCommand, se_CompleteMultipartUploadCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class CompleteMultipartUploadCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + getSsecPlugin(config), + ]; +}) + .s("AmazonS3", "CompleteMultipartUpload", {}) + .n("S3Client", "CompleteMultipartUploadCommand") + .f(CompleteMultipartUploadRequestFilterSensitiveLog, CompleteMultipartUploadOutputFilterSensitiveLog) + .ser(se_CompleteMultipartUploadCommand) + .de(de_CompleteMultipartUploadCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/CopyObjectCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/CopyObjectCommand.js new file mode 100644 index 00000000..73f6c748 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/CopyObjectCommand.js @@ -0,0 +1,33 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getSsecPlugin } from "@aws-sdk/middleware-ssec"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { CopyObjectOutputFilterSensitiveLog, CopyObjectRequestFilterSensitiveLog, } from "../models/models_0"; +import { de_CopyObjectCommand, se_CopyObjectCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class CopyObjectCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + DisableS3ExpressSessionAuth: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" }, + CopySource: { type: "contextParams", name: "CopySource" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + getSsecPlugin(config), + ]; +}) + .s("AmazonS3", "CopyObject", {}) + .n("S3Client", "CopyObjectCommand") + .f(CopyObjectRequestFilterSensitiveLog, CopyObjectOutputFilterSensitiveLog) + .ser(se_CopyObjectCommand) + .de(de_CopyObjectCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/CreateBucketCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/CreateBucketCommand.js new file mode 100644 index 00000000..eda058bc --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/CreateBucketCommand.js @@ -0,0 +1,31 @@ +import { getLocationConstraintPlugin } from "@aws-sdk/middleware-location-constraint"; +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_CreateBucketCommand, se_CreateBucketCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class CreateBucketCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + DisableAccessPoints: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + getLocationConstraintPlugin(config), + ]; +}) + .s("AmazonS3", "CreateBucket", {}) + .n("S3Client", "CreateBucketCommand") + .f(void 0, void 0) + .ser(se_CreateBucketCommand) + .de(de_CreateBucketCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/CreateBucketMetadataTableConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/CreateBucketMetadataTableConfigurationCommand.js new file mode 100644 index 00000000..69686878 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/CreateBucketMetadataTableConfigurationCommand.js @@ -0,0 +1,31 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_CreateBucketMetadataTableConfigurationCommand, se_CreateBucketMetadataTableConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class CreateBucketMetadataTableConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true, + }), + ]; +}) + .s("AmazonS3", "CreateBucketMetadataTableConfiguration", {}) + .n("S3Client", "CreateBucketMetadataTableConfigurationCommand") + .f(void 0, void 0) + .ser(se_CreateBucketMetadataTableConfigurationCommand) + .de(de_CreateBucketMetadataTableConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/CreateMultipartUploadCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/CreateMultipartUploadCommand.js new file mode 100644 index 00000000..a19a555c --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/CreateMultipartUploadCommand.js @@ -0,0 +1,31 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getSsecPlugin } from "@aws-sdk/middleware-ssec"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { CreateMultipartUploadOutputFilterSensitiveLog, CreateMultipartUploadRequestFilterSensitiveLog, } from "../models/models_0"; +import { de_CreateMultipartUploadCommand, se_CreateMultipartUploadCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class CreateMultipartUploadCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + getSsecPlugin(config), + ]; +}) + .s("AmazonS3", "CreateMultipartUpload", {}) + .n("S3Client", "CreateMultipartUploadCommand") + .f(CreateMultipartUploadRequestFilterSensitiveLog, CreateMultipartUploadOutputFilterSensitiveLog) + .ser(se_CreateMultipartUploadCommand) + .de(de_CreateMultipartUploadCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/CreateSessionCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/CreateSessionCommand.js new file mode 100644 index 00000000..a08acaee --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/CreateSessionCommand.js @@ -0,0 +1,29 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { CreateSessionOutputFilterSensitiveLog, CreateSessionRequestFilterSensitiveLog, } from "../models/models_0"; +import { de_CreateSessionCommand, se_CreateSessionCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class CreateSessionCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + DisableS3ExpressSessionAuth: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "CreateSession", {}) + .n("S3Client", "CreateSessionCommand") + .f(CreateSessionRequestFilterSensitiveLog, CreateSessionOutputFilterSensitiveLog) + .ser(se_CreateSessionCommand) + .de(de_CreateSessionCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketAnalyticsConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketAnalyticsConfigurationCommand.js new file mode 100644 index 00000000..994a47c1 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketAnalyticsConfigurationCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteBucketAnalyticsConfigurationCommand, se_DeleteBucketAnalyticsConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class DeleteBucketAnalyticsConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "DeleteBucketAnalyticsConfiguration", {}) + .n("S3Client", "DeleteBucketAnalyticsConfigurationCommand") + .f(void 0, void 0) + .ser(se_DeleteBucketAnalyticsConfigurationCommand) + .de(de_DeleteBucketAnalyticsConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketCommand.js new file mode 100644 index 00000000..965c76e6 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteBucketCommand, se_DeleteBucketCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class DeleteBucketCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "DeleteBucket", {}) + .n("S3Client", "DeleteBucketCommand") + .f(void 0, void 0) + .ser(se_DeleteBucketCommand) + .de(de_DeleteBucketCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketCorsCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketCorsCommand.js new file mode 100644 index 00000000..c0fe6d3f --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketCorsCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteBucketCorsCommand, se_DeleteBucketCorsCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class DeleteBucketCorsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "DeleteBucketCors", {}) + .n("S3Client", "DeleteBucketCorsCommand") + .f(void 0, void 0) + .ser(se_DeleteBucketCorsCommand) + .de(de_DeleteBucketCorsCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketEncryptionCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketEncryptionCommand.js new file mode 100644 index 00000000..517736f2 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketEncryptionCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteBucketEncryptionCommand, se_DeleteBucketEncryptionCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class DeleteBucketEncryptionCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "DeleteBucketEncryption", {}) + .n("S3Client", "DeleteBucketEncryptionCommand") + .f(void 0, void 0) + .ser(se_DeleteBucketEncryptionCommand) + .de(de_DeleteBucketEncryptionCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketIntelligentTieringConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketIntelligentTieringConfigurationCommand.js new file mode 100644 index 00000000..dcd45053 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketIntelligentTieringConfigurationCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteBucketIntelligentTieringConfigurationCommand, se_DeleteBucketIntelligentTieringConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class DeleteBucketIntelligentTieringConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "DeleteBucketIntelligentTieringConfiguration", {}) + .n("S3Client", "DeleteBucketIntelligentTieringConfigurationCommand") + .f(void 0, void 0) + .ser(se_DeleteBucketIntelligentTieringConfigurationCommand) + .de(de_DeleteBucketIntelligentTieringConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketInventoryConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketInventoryConfigurationCommand.js new file mode 100644 index 00000000..14ab9caa --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketInventoryConfigurationCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteBucketInventoryConfigurationCommand, se_DeleteBucketInventoryConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class DeleteBucketInventoryConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "DeleteBucketInventoryConfiguration", {}) + .n("S3Client", "DeleteBucketInventoryConfigurationCommand") + .f(void 0, void 0) + .ser(se_DeleteBucketInventoryConfigurationCommand) + .de(de_DeleteBucketInventoryConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketLifecycleCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketLifecycleCommand.js new file mode 100644 index 00000000..a6ff426a --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketLifecycleCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteBucketLifecycleCommand, se_DeleteBucketLifecycleCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class DeleteBucketLifecycleCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "DeleteBucketLifecycle", {}) + .n("S3Client", "DeleteBucketLifecycleCommand") + .f(void 0, void 0) + .ser(se_DeleteBucketLifecycleCommand) + .de(de_DeleteBucketLifecycleCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketMetadataTableConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketMetadataTableConfigurationCommand.js new file mode 100644 index 00000000..9a303d28 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketMetadataTableConfigurationCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteBucketMetadataTableConfigurationCommand, se_DeleteBucketMetadataTableConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class DeleteBucketMetadataTableConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "DeleteBucketMetadataTableConfiguration", {}) + .n("S3Client", "DeleteBucketMetadataTableConfigurationCommand") + .f(void 0, void 0) + .ser(se_DeleteBucketMetadataTableConfigurationCommand) + .de(de_DeleteBucketMetadataTableConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketMetricsConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketMetricsConfigurationCommand.js new file mode 100644 index 00000000..71a7bacd --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketMetricsConfigurationCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteBucketMetricsConfigurationCommand, se_DeleteBucketMetricsConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class DeleteBucketMetricsConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "DeleteBucketMetricsConfiguration", {}) + .n("S3Client", "DeleteBucketMetricsConfigurationCommand") + .f(void 0, void 0) + .ser(se_DeleteBucketMetricsConfigurationCommand) + .de(de_DeleteBucketMetricsConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketOwnershipControlsCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketOwnershipControlsCommand.js new file mode 100644 index 00000000..f5546479 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketOwnershipControlsCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteBucketOwnershipControlsCommand, se_DeleteBucketOwnershipControlsCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class DeleteBucketOwnershipControlsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "DeleteBucketOwnershipControls", {}) + .n("S3Client", "DeleteBucketOwnershipControlsCommand") + .f(void 0, void 0) + .ser(se_DeleteBucketOwnershipControlsCommand) + .de(de_DeleteBucketOwnershipControlsCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketPolicyCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketPolicyCommand.js new file mode 100644 index 00000000..e51dc8e3 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketPolicyCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteBucketPolicyCommand, se_DeleteBucketPolicyCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class DeleteBucketPolicyCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "DeleteBucketPolicy", {}) + .n("S3Client", "DeleteBucketPolicyCommand") + .f(void 0, void 0) + .ser(se_DeleteBucketPolicyCommand) + .de(de_DeleteBucketPolicyCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketReplicationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketReplicationCommand.js new file mode 100644 index 00000000..f4d63edc --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketReplicationCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteBucketReplicationCommand, se_DeleteBucketReplicationCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class DeleteBucketReplicationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "DeleteBucketReplication", {}) + .n("S3Client", "DeleteBucketReplicationCommand") + .f(void 0, void 0) + .ser(se_DeleteBucketReplicationCommand) + .de(de_DeleteBucketReplicationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketTaggingCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketTaggingCommand.js new file mode 100644 index 00000000..5253c7be --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketTaggingCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteBucketTaggingCommand, se_DeleteBucketTaggingCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class DeleteBucketTaggingCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "DeleteBucketTagging", {}) + .n("S3Client", "DeleteBucketTaggingCommand") + .f(void 0, void 0) + .ser(se_DeleteBucketTaggingCommand) + .de(de_DeleteBucketTaggingCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketWebsiteCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketWebsiteCommand.js new file mode 100644 index 00000000..fd66125b --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteBucketWebsiteCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteBucketWebsiteCommand, se_DeleteBucketWebsiteCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class DeleteBucketWebsiteCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "DeleteBucketWebsite", {}) + .n("S3Client", "DeleteBucketWebsiteCommand") + .f(void 0, void 0) + .ser(se_DeleteBucketWebsiteCommand) + .de(de_DeleteBucketWebsiteCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteObjectCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteObjectCommand.js new file mode 100644 index 00000000..bb577c8c --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteObjectCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteObjectCommand, se_DeleteObjectCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class DeleteObjectCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "DeleteObject", {}) + .n("S3Client", "DeleteObjectCommand") + .f(void 0, void 0) + .ser(se_DeleteObjectCommand) + .de(de_DeleteObjectCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteObjectTaggingCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteObjectTaggingCommand.js new file mode 100644 index 00000000..9a77c3ba --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteObjectTaggingCommand.js @@ -0,0 +1,27 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteObjectTaggingCommand, se_DeleteObjectTaggingCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class DeleteObjectTaggingCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "DeleteObjectTagging", {}) + .n("S3Client", "DeleteObjectTaggingCommand") + .f(void 0, void 0) + .ser(se_DeleteObjectTaggingCommand) + .de(de_DeleteObjectTaggingCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteObjectsCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteObjectsCommand.js new file mode 100644 index 00000000..85579a7f --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeleteObjectsCommand.js @@ -0,0 +1,32 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteObjectsCommand, se_DeleteObjectsCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class DeleteObjectsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true, + }), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "DeleteObjects", {}) + .n("S3Client", "DeleteObjectsCommand") + .f(void 0, void 0) + .ser(se_DeleteObjectsCommand) + .de(de_DeleteObjectsCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/DeletePublicAccessBlockCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeletePublicAccessBlockCommand.js new file mode 100644 index 00000000..fc90bff5 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/DeletePublicAccessBlockCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeletePublicAccessBlockCommand, se_DeletePublicAccessBlockCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class DeletePublicAccessBlockCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "DeletePublicAccessBlock", {}) + .n("S3Client", "DeletePublicAccessBlockCommand") + .f(void 0, void 0) + .ser(se_DeletePublicAccessBlockCommand) + .de(de_DeletePublicAccessBlockCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketAccelerateConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketAccelerateConfigurationCommand.js new file mode 100644 index 00000000..200179e7 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketAccelerateConfigurationCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetBucketAccelerateConfigurationCommand, se_GetBucketAccelerateConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketAccelerateConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketAccelerateConfiguration", {}) + .n("S3Client", "GetBucketAccelerateConfigurationCommand") + .f(void 0, void 0) + .ser(se_GetBucketAccelerateConfigurationCommand) + .de(de_GetBucketAccelerateConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketAclCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketAclCommand.js new file mode 100644 index 00000000..3f5e978b --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketAclCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetBucketAclCommand, se_GetBucketAclCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketAclCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketAcl", {}) + .n("S3Client", "GetBucketAclCommand") + .f(void 0, void 0) + .ser(se_GetBucketAclCommand) + .de(de_GetBucketAclCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketAnalyticsConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketAnalyticsConfigurationCommand.js new file mode 100644 index 00000000..27c67c7e --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketAnalyticsConfigurationCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetBucketAnalyticsConfigurationCommand, se_GetBucketAnalyticsConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketAnalyticsConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketAnalyticsConfiguration", {}) + .n("S3Client", "GetBucketAnalyticsConfigurationCommand") + .f(void 0, void 0) + .ser(se_GetBucketAnalyticsConfigurationCommand) + .de(de_GetBucketAnalyticsConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketCorsCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketCorsCommand.js new file mode 100644 index 00000000..e1e65947 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketCorsCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetBucketCorsCommand, se_GetBucketCorsCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketCorsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketCors", {}) + .n("S3Client", "GetBucketCorsCommand") + .f(void 0, void 0) + .ser(se_GetBucketCorsCommand) + .de(de_GetBucketCorsCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketEncryptionCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketEncryptionCommand.js new file mode 100644 index 00000000..82915967 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketEncryptionCommand.js @@ -0,0 +1,29 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { GetBucketEncryptionOutputFilterSensitiveLog, } from "../models/models_0"; +import { de_GetBucketEncryptionCommand, se_GetBucketEncryptionCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketEncryptionCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketEncryption", {}) + .n("S3Client", "GetBucketEncryptionCommand") + .f(void 0, GetBucketEncryptionOutputFilterSensitiveLog) + .ser(se_GetBucketEncryptionCommand) + .de(de_GetBucketEncryptionCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketIntelligentTieringConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketIntelligentTieringConfigurationCommand.js new file mode 100644 index 00000000..b781b1cd --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketIntelligentTieringConfigurationCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetBucketIntelligentTieringConfigurationCommand, se_GetBucketIntelligentTieringConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketIntelligentTieringConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketIntelligentTieringConfiguration", {}) + .n("S3Client", "GetBucketIntelligentTieringConfigurationCommand") + .f(void 0, void 0) + .ser(se_GetBucketIntelligentTieringConfigurationCommand) + .de(de_GetBucketIntelligentTieringConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketInventoryConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketInventoryConfigurationCommand.js new file mode 100644 index 00000000..3eb2c8c0 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketInventoryConfigurationCommand.js @@ -0,0 +1,29 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { GetBucketInventoryConfigurationOutputFilterSensitiveLog, } from "../models/models_0"; +import { de_GetBucketInventoryConfigurationCommand, se_GetBucketInventoryConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketInventoryConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketInventoryConfiguration", {}) + .n("S3Client", "GetBucketInventoryConfigurationCommand") + .f(void 0, GetBucketInventoryConfigurationOutputFilterSensitiveLog) + .ser(se_GetBucketInventoryConfigurationCommand) + .de(de_GetBucketInventoryConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketLifecycleConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketLifecycleConfigurationCommand.js new file mode 100644 index 00000000..87775ab7 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketLifecycleConfigurationCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetBucketLifecycleConfigurationCommand, se_GetBucketLifecycleConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketLifecycleConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketLifecycleConfiguration", {}) + .n("S3Client", "GetBucketLifecycleConfigurationCommand") + .f(void 0, void 0) + .ser(se_GetBucketLifecycleConfigurationCommand) + .de(de_GetBucketLifecycleConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketLocationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketLocationCommand.js new file mode 100644 index 00000000..1590f767 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketLocationCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetBucketLocationCommand, se_GetBucketLocationCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketLocationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketLocation", {}) + .n("S3Client", "GetBucketLocationCommand") + .f(void 0, void 0) + .ser(se_GetBucketLocationCommand) + .de(de_GetBucketLocationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketLoggingCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketLoggingCommand.js new file mode 100644 index 00000000..124b65b7 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketLoggingCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetBucketLoggingCommand, se_GetBucketLoggingCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketLoggingCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketLogging", {}) + .n("S3Client", "GetBucketLoggingCommand") + .f(void 0, void 0) + .ser(se_GetBucketLoggingCommand) + .de(de_GetBucketLoggingCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketMetadataTableConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketMetadataTableConfigurationCommand.js new file mode 100644 index 00000000..840bcddc --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketMetadataTableConfigurationCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetBucketMetadataTableConfigurationCommand, se_GetBucketMetadataTableConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketMetadataTableConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketMetadataTableConfiguration", {}) + .n("S3Client", "GetBucketMetadataTableConfigurationCommand") + .f(void 0, void 0) + .ser(se_GetBucketMetadataTableConfigurationCommand) + .de(de_GetBucketMetadataTableConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketMetricsConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketMetricsConfigurationCommand.js new file mode 100644 index 00000000..3f80d681 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketMetricsConfigurationCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetBucketMetricsConfigurationCommand, se_GetBucketMetricsConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketMetricsConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketMetricsConfiguration", {}) + .n("S3Client", "GetBucketMetricsConfigurationCommand") + .f(void 0, void 0) + .ser(se_GetBucketMetricsConfigurationCommand) + .de(de_GetBucketMetricsConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketNotificationConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketNotificationConfigurationCommand.js new file mode 100644 index 00000000..51f78e9d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketNotificationConfigurationCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetBucketNotificationConfigurationCommand, se_GetBucketNotificationConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketNotificationConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketNotificationConfiguration", {}) + .n("S3Client", "GetBucketNotificationConfigurationCommand") + .f(void 0, void 0) + .ser(se_GetBucketNotificationConfigurationCommand) + .de(de_GetBucketNotificationConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketOwnershipControlsCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketOwnershipControlsCommand.js new file mode 100644 index 00000000..5d159266 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketOwnershipControlsCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetBucketOwnershipControlsCommand, se_GetBucketOwnershipControlsCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketOwnershipControlsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketOwnershipControls", {}) + .n("S3Client", "GetBucketOwnershipControlsCommand") + .f(void 0, void 0) + .ser(se_GetBucketOwnershipControlsCommand) + .de(de_GetBucketOwnershipControlsCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketPolicyCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketPolicyCommand.js new file mode 100644 index 00000000..e558c81f --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketPolicyCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetBucketPolicyCommand, se_GetBucketPolicyCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketPolicyCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketPolicy", {}) + .n("S3Client", "GetBucketPolicyCommand") + .f(void 0, void 0) + .ser(se_GetBucketPolicyCommand) + .de(de_GetBucketPolicyCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketPolicyStatusCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketPolicyStatusCommand.js new file mode 100644 index 00000000..4ac9b175 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketPolicyStatusCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetBucketPolicyStatusCommand, se_GetBucketPolicyStatusCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketPolicyStatusCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketPolicyStatus", {}) + .n("S3Client", "GetBucketPolicyStatusCommand") + .f(void 0, void 0) + .ser(se_GetBucketPolicyStatusCommand) + .de(de_GetBucketPolicyStatusCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketReplicationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketReplicationCommand.js new file mode 100644 index 00000000..0d9e4136 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketReplicationCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetBucketReplicationCommand, se_GetBucketReplicationCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketReplicationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketReplication", {}) + .n("S3Client", "GetBucketReplicationCommand") + .f(void 0, void 0) + .ser(se_GetBucketReplicationCommand) + .de(de_GetBucketReplicationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketRequestPaymentCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketRequestPaymentCommand.js new file mode 100644 index 00000000..2453414a --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketRequestPaymentCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetBucketRequestPaymentCommand, se_GetBucketRequestPaymentCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketRequestPaymentCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketRequestPayment", {}) + .n("S3Client", "GetBucketRequestPaymentCommand") + .f(void 0, void 0) + .ser(se_GetBucketRequestPaymentCommand) + .de(de_GetBucketRequestPaymentCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketTaggingCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketTaggingCommand.js new file mode 100644 index 00000000..6ff7a6ef --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketTaggingCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetBucketTaggingCommand, se_GetBucketTaggingCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketTaggingCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketTagging", {}) + .n("S3Client", "GetBucketTaggingCommand") + .f(void 0, void 0) + .ser(se_GetBucketTaggingCommand) + .de(de_GetBucketTaggingCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketVersioningCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketVersioningCommand.js new file mode 100644 index 00000000..7165718f --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketVersioningCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetBucketVersioningCommand, se_GetBucketVersioningCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketVersioningCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketVersioning", {}) + .n("S3Client", "GetBucketVersioningCommand") + .f(void 0, void 0) + .ser(se_GetBucketVersioningCommand) + .de(de_GetBucketVersioningCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketWebsiteCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketWebsiteCommand.js new file mode 100644 index 00000000..e0c3d4de --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetBucketWebsiteCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetBucketWebsiteCommand, se_GetBucketWebsiteCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetBucketWebsiteCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetBucketWebsite", {}) + .n("S3Client", "GetBucketWebsiteCommand") + .f(void 0, void 0) + .ser(se_GetBucketWebsiteCommand) + .de(de_GetBucketWebsiteCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectAclCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectAclCommand.js new file mode 100644 index 00000000..90e8a04d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectAclCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetObjectAclCommand, se_GetObjectAclCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetObjectAclCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetObjectAcl", {}) + .n("S3Client", "GetObjectAclCommand") + .f(void 0, void 0) + .ser(se_GetObjectAclCommand) + .de(de_GetObjectAclCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectAttributesCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectAttributesCommand.js new file mode 100644 index 00000000..ceef7abd --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectAttributesCommand.js @@ -0,0 +1,30 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getSsecPlugin } from "@aws-sdk/middleware-ssec"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { GetObjectAttributesRequestFilterSensitiveLog, } from "../models/models_0"; +import { de_GetObjectAttributesCommand, se_GetObjectAttributesCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetObjectAttributesCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + getSsecPlugin(config), + ]; +}) + .s("AmazonS3", "GetObjectAttributes", {}) + .n("S3Client", "GetObjectAttributesCommand") + .f(GetObjectAttributesRequestFilterSensitiveLog, void 0) + .ser(se_GetObjectAttributesCommand) + .de(de_GetObjectAttributesCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectCommand.js new file mode 100644 index 00000000..05986cec --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectCommand.js @@ -0,0 +1,37 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getS3ExpiresMiddlewarePlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getSsecPlugin } from "@aws-sdk/middleware-ssec"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { GetObjectOutputFilterSensitiveLog, GetObjectRequestFilterSensitiveLog, } from "../models/models_0"; +import { de_GetObjectCommand, se_GetObjectCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetObjectCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestChecksumRequired: false, + requestValidationModeMember: "ChecksumMode", + responseAlgorithms: ["CRC64NVME", "CRC32", "CRC32C", "SHA256", "SHA1"], + }), + getSsecPlugin(config), + getS3ExpiresMiddlewarePlugin(config), + ]; +}) + .s("AmazonS3", "GetObject", {}) + .n("S3Client", "GetObjectCommand") + .f(GetObjectRequestFilterSensitiveLog, GetObjectOutputFilterSensitiveLog) + .ser(se_GetObjectCommand) + .de(de_GetObjectCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectLegalHoldCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectLegalHoldCommand.js new file mode 100644 index 00000000..d790ba7b --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectLegalHoldCommand.js @@ -0,0 +1,27 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetObjectLegalHoldCommand, se_GetObjectLegalHoldCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetObjectLegalHoldCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetObjectLegalHold", {}) + .n("S3Client", "GetObjectLegalHoldCommand") + .f(void 0, void 0) + .ser(se_GetObjectLegalHoldCommand) + .de(de_GetObjectLegalHoldCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectLockConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectLockConfigurationCommand.js new file mode 100644 index 00000000..fdff1650 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectLockConfigurationCommand.js @@ -0,0 +1,27 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetObjectLockConfigurationCommand, se_GetObjectLockConfigurationCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetObjectLockConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetObjectLockConfiguration", {}) + .n("S3Client", "GetObjectLockConfigurationCommand") + .f(void 0, void 0) + .ser(se_GetObjectLockConfigurationCommand) + .de(de_GetObjectLockConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectRetentionCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectRetentionCommand.js new file mode 100644 index 00000000..2d5aa14a --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectRetentionCommand.js @@ -0,0 +1,27 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetObjectRetentionCommand, se_GetObjectRetentionCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetObjectRetentionCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetObjectRetention", {}) + .n("S3Client", "GetObjectRetentionCommand") + .f(void 0, void 0) + .ser(se_GetObjectRetentionCommand) + .de(de_GetObjectRetentionCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectTaggingCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectTaggingCommand.js new file mode 100644 index 00000000..3c0fbf7e --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectTaggingCommand.js @@ -0,0 +1,27 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetObjectTaggingCommand, se_GetObjectTaggingCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetObjectTaggingCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetObjectTagging", {}) + .n("S3Client", "GetObjectTaggingCommand") + .f(void 0, void 0) + .ser(se_GetObjectTaggingCommand) + .de(de_GetObjectTaggingCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectTorrentCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectTorrentCommand.js new file mode 100644 index 00000000..77be6e2a --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetObjectTorrentCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { GetObjectTorrentOutputFilterSensitiveLog, } from "../models/models_0"; +import { de_GetObjectTorrentCommand, se_GetObjectTorrentCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetObjectTorrentCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "GetObjectTorrent", {}) + .n("S3Client", "GetObjectTorrentCommand") + .f(void 0, GetObjectTorrentOutputFilterSensitiveLog) + .ser(se_GetObjectTorrentCommand) + .de(de_GetObjectTorrentCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/GetPublicAccessBlockCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetPublicAccessBlockCommand.js new file mode 100644 index 00000000..79382d37 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/GetPublicAccessBlockCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetPublicAccessBlockCommand, se_GetPublicAccessBlockCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class GetPublicAccessBlockCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "GetPublicAccessBlock", {}) + .n("S3Client", "GetPublicAccessBlockCommand") + .f(void 0, void 0) + .ser(se_GetPublicAccessBlockCommand) + .de(de_GetPublicAccessBlockCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/HeadBucketCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/HeadBucketCommand.js new file mode 100644 index 00000000..2c42d51e --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/HeadBucketCommand.js @@ -0,0 +1,27 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_HeadBucketCommand, se_HeadBucketCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class HeadBucketCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "HeadBucket", {}) + .n("S3Client", "HeadBucketCommand") + .f(void 0, void 0) + .ser(se_HeadBucketCommand) + .de(de_HeadBucketCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/HeadObjectCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/HeadObjectCommand.js new file mode 100644 index 00000000..f28a5cc6 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/HeadObjectCommand.js @@ -0,0 +1,32 @@ +import { getS3ExpiresMiddlewarePlugin, getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getSsecPlugin } from "@aws-sdk/middleware-ssec"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { HeadObjectOutputFilterSensitiveLog, HeadObjectRequestFilterSensitiveLog, } from "../models/models_0"; +import { de_HeadObjectCommand, se_HeadObjectCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class HeadObjectCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + getSsecPlugin(config), + getS3ExpiresMiddlewarePlugin(config), + ]; +}) + .s("AmazonS3", "HeadObject", {}) + .n("S3Client", "HeadObjectCommand") + .f(HeadObjectRequestFilterSensitiveLog, HeadObjectOutputFilterSensitiveLog) + .ser(se_HeadObjectCommand) + .de(de_HeadObjectCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/ListBucketAnalyticsConfigurationsCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListBucketAnalyticsConfigurationsCommand.js new file mode 100644 index 00000000..4c9003db --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListBucketAnalyticsConfigurationsCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListBucketAnalyticsConfigurationsCommand, se_ListBucketAnalyticsConfigurationsCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class ListBucketAnalyticsConfigurationsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "ListBucketAnalyticsConfigurations", {}) + .n("S3Client", "ListBucketAnalyticsConfigurationsCommand") + .f(void 0, void 0) + .ser(se_ListBucketAnalyticsConfigurationsCommand) + .de(de_ListBucketAnalyticsConfigurationsCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/ListBucketIntelligentTieringConfigurationsCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListBucketIntelligentTieringConfigurationsCommand.js new file mode 100644 index 00000000..74f722d2 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListBucketIntelligentTieringConfigurationsCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListBucketIntelligentTieringConfigurationsCommand, se_ListBucketIntelligentTieringConfigurationsCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class ListBucketIntelligentTieringConfigurationsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "ListBucketIntelligentTieringConfigurations", {}) + .n("S3Client", "ListBucketIntelligentTieringConfigurationsCommand") + .f(void 0, void 0) + .ser(se_ListBucketIntelligentTieringConfigurationsCommand) + .de(de_ListBucketIntelligentTieringConfigurationsCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/ListBucketInventoryConfigurationsCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListBucketInventoryConfigurationsCommand.js new file mode 100644 index 00000000..123afdf0 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListBucketInventoryConfigurationsCommand.js @@ -0,0 +1,29 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { ListBucketInventoryConfigurationsOutputFilterSensitiveLog, } from "../models/models_0"; +import { de_ListBucketInventoryConfigurationsCommand, se_ListBucketInventoryConfigurationsCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class ListBucketInventoryConfigurationsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "ListBucketInventoryConfigurations", {}) + .n("S3Client", "ListBucketInventoryConfigurationsCommand") + .f(void 0, ListBucketInventoryConfigurationsOutputFilterSensitiveLog) + .ser(se_ListBucketInventoryConfigurationsCommand) + .de(de_ListBucketInventoryConfigurationsCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/ListBucketMetricsConfigurationsCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListBucketMetricsConfigurationsCommand.js new file mode 100644 index 00000000..d54ebbb7 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListBucketMetricsConfigurationsCommand.js @@ -0,0 +1,27 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListBucketMetricsConfigurationsCommand, se_ListBucketMetricsConfigurationsCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class ListBucketMetricsConfigurationsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "ListBucketMetricsConfigurations", {}) + .n("S3Client", "ListBucketMetricsConfigurationsCommand") + .f(void 0, void 0) + .ser(se_ListBucketMetricsConfigurationsCommand) + .de(de_ListBucketMetricsConfigurationsCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/ListBucketsCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListBucketsCommand.js new file mode 100644 index 00000000..8b4984ee --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListBucketsCommand.js @@ -0,0 +1,24 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListBucketsCommand, se_ListBucketsCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class ListBucketsCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "ListBuckets", {}) + .n("S3Client", "ListBucketsCommand") + .f(void 0, void 0) + .ser(se_ListBucketsCommand) + .de(de_ListBucketsCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/ListDirectoryBucketsCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListDirectoryBucketsCommand.js new file mode 100644 index 00000000..91b088fc --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListDirectoryBucketsCommand.js @@ -0,0 +1,27 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListDirectoryBucketsCommand, se_ListDirectoryBucketsCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class ListDirectoryBucketsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "ListDirectoryBuckets", {}) + .n("S3Client", "ListDirectoryBucketsCommand") + .f(void 0, void 0) + .ser(se_ListDirectoryBucketsCommand) + .de(de_ListDirectoryBucketsCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/ListMultipartUploadsCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListMultipartUploadsCommand.js new file mode 100644 index 00000000..90b30a41 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListMultipartUploadsCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListMultipartUploadsCommand, se_ListMultipartUploadsCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class ListMultipartUploadsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Prefix: { type: "contextParams", name: "Prefix" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "ListMultipartUploads", {}) + .n("S3Client", "ListMultipartUploadsCommand") + .f(void 0, void 0) + .ser(se_ListMultipartUploadsCommand) + .de(de_ListMultipartUploadsCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/ListObjectVersionsCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListObjectVersionsCommand.js new file mode 100644 index 00000000..89eb239c --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListObjectVersionsCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListObjectVersionsCommand, se_ListObjectVersionsCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class ListObjectVersionsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Prefix: { type: "contextParams", name: "Prefix" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "ListObjectVersions", {}) + .n("S3Client", "ListObjectVersionsCommand") + .f(void 0, void 0) + .ser(se_ListObjectVersionsCommand) + .de(de_ListObjectVersionsCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/ListObjectsCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListObjectsCommand.js new file mode 100644 index 00000000..0e8cffb7 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListObjectsCommand.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListObjectsCommand, se_ListObjectsCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class ListObjectsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Prefix: { type: "contextParams", name: "Prefix" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "ListObjects", {}) + .n("S3Client", "ListObjectsCommand") + .f(void 0, void 0) + .ser(se_ListObjectsCommand) + .de(de_ListObjectsCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/ListObjectsV2Command.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListObjectsV2Command.js new file mode 100644 index 00000000..47efa835 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListObjectsV2Command.js @@ -0,0 +1,28 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListObjectsV2Command, se_ListObjectsV2Command } from "../protocols/Aws_restXml"; +export { $Command }; +export class ListObjectsV2Command extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Prefix: { type: "contextParams", name: "Prefix" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "ListObjectsV2", {}) + .n("S3Client", "ListObjectsV2Command") + .f(void 0, void 0) + .ser(se_ListObjectsV2Command) + .de(de_ListObjectsV2Command) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/ListPartsCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListPartsCommand.js new file mode 100644 index 00000000..af77a5a7 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/ListPartsCommand.js @@ -0,0 +1,31 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getSsecPlugin } from "@aws-sdk/middleware-ssec"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { ListPartsRequestFilterSensitiveLog } from "../models/models_0"; +import { de_ListPartsCommand, se_ListPartsCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class ListPartsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + getSsecPlugin(config), + ]; +}) + .s("AmazonS3", "ListParts", {}) + .n("S3Client", "ListPartsCommand") + .f(ListPartsRequestFilterSensitiveLog, void 0) + .ser(se_ListPartsCommand) + .de(de_ListPartsCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketAccelerateConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketAccelerateConfigurationCommand.js new file mode 100644 index 00000000..10c07daf --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketAccelerateConfigurationCommand.js @@ -0,0 +1,31 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutBucketAccelerateConfigurationCommand, se_PutBucketAccelerateConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutBucketAccelerateConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: false, + }), + ]; +}) + .s("AmazonS3", "PutBucketAccelerateConfiguration", {}) + .n("S3Client", "PutBucketAccelerateConfigurationCommand") + .f(void 0, void 0) + .ser(se_PutBucketAccelerateConfigurationCommand) + .de(de_PutBucketAccelerateConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketAclCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketAclCommand.js new file mode 100644 index 00000000..acae3680 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketAclCommand.js @@ -0,0 +1,31 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutBucketAclCommand, se_PutBucketAclCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutBucketAclCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true, + }), + ]; +}) + .s("AmazonS3", "PutBucketAcl", {}) + .n("S3Client", "PutBucketAclCommand") + .f(void 0, void 0) + .ser(se_PutBucketAclCommand) + .de(de_PutBucketAclCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketAnalyticsConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketAnalyticsConfigurationCommand.js new file mode 100644 index 00000000..ae4c7466 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketAnalyticsConfigurationCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutBucketAnalyticsConfigurationCommand, se_PutBucketAnalyticsConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutBucketAnalyticsConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "PutBucketAnalyticsConfiguration", {}) + .n("S3Client", "PutBucketAnalyticsConfigurationCommand") + .f(void 0, void 0) + .ser(se_PutBucketAnalyticsConfigurationCommand) + .de(de_PutBucketAnalyticsConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketCorsCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketCorsCommand.js new file mode 100644 index 00000000..8527a182 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketCorsCommand.js @@ -0,0 +1,31 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutBucketCorsCommand, se_PutBucketCorsCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutBucketCorsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true, + }), + ]; +}) + .s("AmazonS3", "PutBucketCors", {}) + .n("S3Client", "PutBucketCorsCommand") + .f(void 0, void 0) + .ser(se_PutBucketCorsCommand) + .de(de_PutBucketCorsCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketEncryptionCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketEncryptionCommand.js new file mode 100644 index 00000000..c9e0ded9 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketEncryptionCommand.js @@ -0,0 +1,32 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { PutBucketEncryptionRequestFilterSensitiveLog } from "../models/models_1"; +import { de_PutBucketEncryptionCommand, se_PutBucketEncryptionCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutBucketEncryptionCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true, + }), + ]; +}) + .s("AmazonS3", "PutBucketEncryption", {}) + .n("S3Client", "PutBucketEncryptionCommand") + .f(PutBucketEncryptionRequestFilterSensitiveLog, void 0) + .ser(se_PutBucketEncryptionCommand) + .de(de_PutBucketEncryptionCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketIntelligentTieringConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketIntelligentTieringConfigurationCommand.js new file mode 100644 index 00000000..3b0e6a6d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketIntelligentTieringConfigurationCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutBucketIntelligentTieringConfigurationCommand, se_PutBucketIntelligentTieringConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutBucketIntelligentTieringConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "PutBucketIntelligentTieringConfiguration", {}) + .n("S3Client", "PutBucketIntelligentTieringConfigurationCommand") + .f(void 0, void 0) + .ser(se_PutBucketIntelligentTieringConfigurationCommand) + .de(de_PutBucketIntelligentTieringConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketInventoryConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketInventoryConfigurationCommand.js new file mode 100644 index 00000000..2a34e381 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketInventoryConfigurationCommand.js @@ -0,0 +1,27 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { PutBucketInventoryConfigurationRequestFilterSensitiveLog, } from "../models/models_1"; +import { de_PutBucketInventoryConfigurationCommand, se_PutBucketInventoryConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutBucketInventoryConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "PutBucketInventoryConfiguration", {}) + .n("S3Client", "PutBucketInventoryConfigurationCommand") + .f(PutBucketInventoryConfigurationRequestFilterSensitiveLog, void 0) + .ser(se_PutBucketInventoryConfigurationCommand) + .de(de_PutBucketInventoryConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketLifecycleConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketLifecycleConfigurationCommand.js new file mode 100644 index 00000000..654731d4 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketLifecycleConfigurationCommand.js @@ -0,0 +1,33 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutBucketLifecycleConfigurationCommand, se_PutBucketLifecycleConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutBucketLifecycleConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true, + }), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "PutBucketLifecycleConfiguration", {}) + .n("S3Client", "PutBucketLifecycleConfigurationCommand") + .f(void 0, void 0) + .ser(se_PutBucketLifecycleConfigurationCommand) + .de(de_PutBucketLifecycleConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketLoggingCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketLoggingCommand.js new file mode 100644 index 00000000..61b082c2 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketLoggingCommand.js @@ -0,0 +1,31 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutBucketLoggingCommand, se_PutBucketLoggingCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutBucketLoggingCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true, + }), + ]; +}) + .s("AmazonS3", "PutBucketLogging", {}) + .n("S3Client", "PutBucketLoggingCommand") + .f(void 0, void 0) + .ser(se_PutBucketLoggingCommand) + .de(de_PutBucketLoggingCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketMetricsConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketMetricsConfigurationCommand.js new file mode 100644 index 00000000..816fd30e --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketMetricsConfigurationCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutBucketMetricsConfigurationCommand, se_PutBucketMetricsConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutBucketMetricsConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "PutBucketMetricsConfiguration", {}) + .n("S3Client", "PutBucketMetricsConfigurationCommand") + .f(void 0, void 0) + .ser(se_PutBucketMetricsConfigurationCommand) + .de(de_PutBucketMetricsConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketNotificationConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketNotificationConfigurationCommand.js new file mode 100644 index 00000000..cc4936ec --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketNotificationConfigurationCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutBucketNotificationConfigurationCommand, se_PutBucketNotificationConfigurationCommand, } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutBucketNotificationConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "PutBucketNotificationConfiguration", {}) + .n("S3Client", "PutBucketNotificationConfigurationCommand") + .f(void 0, void 0) + .ser(se_PutBucketNotificationConfigurationCommand) + .de(de_PutBucketNotificationConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketOwnershipControlsCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketOwnershipControlsCommand.js new file mode 100644 index 00000000..ce225008 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketOwnershipControlsCommand.js @@ -0,0 +1,30 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutBucketOwnershipControlsCommand, se_PutBucketOwnershipControlsCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutBucketOwnershipControlsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestChecksumRequired: true, + }), + ]; +}) + .s("AmazonS3", "PutBucketOwnershipControls", {}) + .n("S3Client", "PutBucketOwnershipControlsCommand") + .f(void 0, void 0) + .ser(se_PutBucketOwnershipControlsCommand) + .de(de_PutBucketOwnershipControlsCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketPolicyCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketPolicyCommand.js new file mode 100644 index 00000000..231544fc --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketPolicyCommand.js @@ -0,0 +1,31 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutBucketPolicyCommand, se_PutBucketPolicyCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutBucketPolicyCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true, + }), + ]; +}) + .s("AmazonS3", "PutBucketPolicy", {}) + .n("S3Client", "PutBucketPolicyCommand") + .f(void 0, void 0) + .ser(se_PutBucketPolicyCommand) + .de(de_PutBucketPolicyCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketReplicationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketReplicationCommand.js new file mode 100644 index 00000000..d31abfec --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketReplicationCommand.js @@ -0,0 +1,31 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutBucketReplicationCommand, se_PutBucketReplicationCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutBucketReplicationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true, + }), + ]; +}) + .s("AmazonS3", "PutBucketReplication", {}) + .n("S3Client", "PutBucketReplicationCommand") + .f(void 0, void 0) + .ser(se_PutBucketReplicationCommand) + .de(de_PutBucketReplicationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketRequestPaymentCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketRequestPaymentCommand.js new file mode 100644 index 00000000..c496382e --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketRequestPaymentCommand.js @@ -0,0 +1,31 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutBucketRequestPaymentCommand, se_PutBucketRequestPaymentCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutBucketRequestPaymentCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true, + }), + ]; +}) + .s("AmazonS3", "PutBucketRequestPayment", {}) + .n("S3Client", "PutBucketRequestPaymentCommand") + .f(void 0, void 0) + .ser(se_PutBucketRequestPaymentCommand) + .de(de_PutBucketRequestPaymentCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketTaggingCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketTaggingCommand.js new file mode 100644 index 00000000..c4f963d0 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketTaggingCommand.js @@ -0,0 +1,31 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutBucketTaggingCommand, se_PutBucketTaggingCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutBucketTaggingCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true, + }), + ]; +}) + .s("AmazonS3", "PutBucketTagging", {}) + .n("S3Client", "PutBucketTaggingCommand") + .f(void 0, void 0) + .ser(se_PutBucketTaggingCommand) + .de(de_PutBucketTaggingCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketVersioningCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketVersioningCommand.js new file mode 100644 index 00000000..4e242b3d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketVersioningCommand.js @@ -0,0 +1,31 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutBucketVersioningCommand, se_PutBucketVersioningCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutBucketVersioningCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true, + }), + ]; +}) + .s("AmazonS3", "PutBucketVersioning", {}) + .n("S3Client", "PutBucketVersioningCommand") + .f(void 0, void 0) + .ser(se_PutBucketVersioningCommand) + .de(de_PutBucketVersioningCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketWebsiteCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketWebsiteCommand.js new file mode 100644 index 00000000..5e94aa71 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutBucketWebsiteCommand.js @@ -0,0 +1,31 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutBucketWebsiteCommand, se_PutBucketWebsiteCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutBucketWebsiteCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true, + }), + ]; +}) + .s("AmazonS3", "PutBucketWebsite", {}) + .n("S3Client", "PutBucketWebsiteCommand") + .f(void 0, void 0) + .ser(se_PutBucketWebsiteCommand) + .de(de_PutBucketWebsiteCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutObjectAclCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutObjectAclCommand.js new file mode 100644 index 00000000..90c3ae64 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutObjectAclCommand.js @@ -0,0 +1,33 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutObjectAclCommand, se_PutObjectAclCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutObjectAclCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true, + }), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "PutObjectAcl", {}) + .n("S3Client", "PutObjectAclCommand") + .f(void 0, void 0) + .ser(se_PutObjectAclCommand) + .de(de_PutObjectAclCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutObjectCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutObjectCommand.js new file mode 100644 index 00000000..008299e5 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutObjectCommand.js @@ -0,0 +1,37 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getCheckContentLengthHeaderPlugin, getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getSsecPlugin } from "@aws-sdk/middleware-ssec"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { PutObjectOutputFilterSensitiveLog, PutObjectRequestFilterSensitiveLog, } from "../models/models_1"; +import { de_PutObjectCommand, se_PutObjectCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutObjectCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: false, + }), + getCheckContentLengthHeaderPlugin(config), + getThrow200ExceptionsPlugin(config), + getSsecPlugin(config), + ]; +}) + .s("AmazonS3", "PutObject", {}) + .n("S3Client", "PutObjectCommand") + .f(PutObjectRequestFilterSensitiveLog, PutObjectOutputFilterSensitiveLog) + .ser(se_PutObjectCommand) + .de(de_PutObjectCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutObjectLegalHoldCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutObjectLegalHoldCommand.js new file mode 100644 index 00000000..4cfc2af8 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutObjectLegalHoldCommand.js @@ -0,0 +1,32 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutObjectLegalHoldCommand, se_PutObjectLegalHoldCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutObjectLegalHoldCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true, + }), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "PutObjectLegalHold", {}) + .n("S3Client", "PutObjectLegalHoldCommand") + .f(void 0, void 0) + .ser(se_PutObjectLegalHoldCommand) + .de(de_PutObjectLegalHoldCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutObjectLockConfigurationCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutObjectLockConfigurationCommand.js new file mode 100644 index 00000000..7f3c2b7f --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutObjectLockConfigurationCommand.js @@ -0,0 +1,32 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutObjectLockConfigurationCommand, se_PutObjectLockConfigurationCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutObjectLockConfigurationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true, + }), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "PutObjectLockConfiguration", {}) + .n("S3Client", "PutObjectLockConfigurationCommand") + .f(void 0, void 0) + .ser(se_PutObjectLockConfigurationCommand) + .de(de_PutObjectLockConfigurationCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutObjectRetentionCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutObjectRetentionCommand.js new file mode 100644 index 00000000..a0425499 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutObjectRetentionCommand.js @@ -0,0 +1,32 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutObjectRetentionCommand, se_PutObjectRetentionCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutObjectRetentionCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true, + }), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "PutObjectRetention", {}) + .n("S3Client", "PutObjectRetentionCommand") + .f(void 0, void 0) + .ser(se_PutObjectRetentionCommand) + .de(de_PutObjectRetentionCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutObjectTaggingCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutObjectTaggingCommand.js new file mode 100644 index 00000000..8c92f4fb --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutObjectTaggingCommand.js @@ -0,0 +1,32 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutObjectTaggingCommand, se_PutObjectTaggingCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutObjectTaggingCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true, + }), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "PutObjectTagging", {}) + .n("S3Client", "PutObjectTaggingCommand") + .f(void 0, void 0) + .ser(se_PutObjectTaggingCommand) + .de(de_PutObjectTaggingCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/PutPublicAccessBlockCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutPublicAccessBlockCommand.js new file mode 100644 index 00000000..704f6b55 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/PutPublicAccessBlockCommand.js @@ -0,0 +1,31 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutPublicAccessBlockCommand, se_PutPublicAccessBlockCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class PutPublicAccessBlockCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseS3ExpressControlEndpoint: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: true, + }), + ]; +}) + .s("AmazonS3", "PutPublicAccessBlock", {}) + .n("S3Client", "PutPublicAccessBlockCommand") + .f(void 0, void 0) + .ser(se_PutPublicAccessBlockCommand) + .de(de_PutPublicAccessBlockCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/RestoreObjectCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/RestoreObjectCommand.js new file mode 100644 index 00000000..59246157 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/RestoreObjectCommand.js @@ -0,0 +1,33 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { RestoreObjectRequestFilterSensitiveLog } from "../models/models_1"; +import { de_RestoreObjectCommand, se_RestoreObjectCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class RestoreObjectCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: false, + }), + getThrow200ExceptionsPlugin(config), + ]; +}) + .s("AmazonS3", "RestoreObject", {}) + .n("S3Client", "RestoreObjectCommand") + .f(RestoreObjectRequestFilterSensitiveLog, void 0) + .ser(se_RestoreObjectCommand) + .de(de_RestoreObjectCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/SelectObjectContentCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/SelectObjectContentCommand.js new file mode 100644 index 00000000..0671c54a --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/SelectObjectContentCommand.js @@ -0,0 +1,34 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getSsecPlugin } from "@aws-sdk/middleware-ssec"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { SelectObjectContentOutputFilterSensitiveLog, SelectObjectContentRequestFilterSensitiveLog, } from "../models/models_1"; +import { de_SelectObjectContentCommand, se_SelectObjectContentCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class SelectObjectContentCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + getSsecPlugin(config), + ]; +}) + .s("AmazonS3", "SelectObjectContent", { + eventStream: { + output: true, + }, +}) + .n("S3Client", "SelectObjectContentCommand") + .f(SelectObjectContentRequestFilterSensitiveLog, SelectObjectContentOutputFilterSensitiveLog) + .ser(se_SelectObjectContentCommand) + .de(de_SelectObjectContentCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/UploadPartCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/UploadPartCommand.js new file mode 100644 index 00000000..17728af3 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/UploadPartCommand.js @@ -0,0 +1,36 @@ +import { getFlexibleChecksumsPlugin } from "@aws-sdk/middleware-flexible-checksums"; +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getSsecPlugin } from "@aws-sdk/middleware-ssec"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { UploadPartOutputFilterSensitiveLog, UploadPartRequestFilterSensitiveLog, } from "../models/models_1"; +import { de_UploadPartCommand, se_UploadPartCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class UploadPartCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + Bucket: { type: "contextParams", name: "Bucket" }, + Key: { type: "contextParams", name: "Key" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getFlexibleChecksumsPlugin(config, { + requestAlgorithmMember: { httpHeader: "x-amz-sdk-checksum-algorithm", name: "ChecksumAlgorithm" }, + requestChecksumRequired: false, + }), + getThrow200ExceptionsPlugin(config), + getSsecPlugin(config), + ]; +}) + .s("AmazonS3", "UploadPart", {}) + .n("S3Client", "UploadPartCommand") + .f(UploadPartRequestFilterSensitiveLog, UploadPartOutputFilterSensitiveLog) + .ser(se_UploadPartCommand) + .de(de_UploadPartCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/UploadPartCopyCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/UploadPartCopyCommand.js new file mode 100644 index 00000000..cf5a3cea --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/UploadPartCopyCommand.js @@ -0,0 +1,31 @@ +import { getThrow200ExceptionsPlugin } from "@aws-sdk/middleware-sdk-s3"; +import { getSsecPlugin } from "@aws-sdk/middleware-ssec"; +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { UploadPartCopyOutputFilterSensitiveLog, UploadPartCopyRequestFilterSensitiveLog, } from "../models/models_1"; +import { de_UploadPartCopyCommand, se_UploadPartCopyCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class UploadPartCopyCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + DisableS3ExpressSessionAuth: { type: "staticContextParams", value: true }, + Bucket: { type: "contextParams", name: "Bucket" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + getThrow200ExceptionsPlugin(config), + getSsecPlugin(config), + ]; +}) + .s("AmazonS3", "UploadPartCopy", {}) + .n("S3Client", "UploadPartCopyCommand") + .f(UploadPartCopyRequestFilterSensitiveLog, UploadPartCopyOutputFilterSensitiveLog) + .ser(se_UploadPartCopyCommand) + .de(de_UploadPartCopyCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/WriteGetObjectResponseCommand.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/WriteGetObjectResponseCommand.js new file mode 100644 index 00000000..f2e18aaf --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/WriteGetObjectResponseCommand.js @@ -0,0 +1,26 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { WriteGetObjectResponseRequestFilterSensitiveLog } from "../models/models_1"; +import { de_WriteGetObjectResponseCommand, se_WriteGetObjectResponseCommand } from "../protocols/Aws_restXml"; +export { $Command }; +export class WriteGetObjectResponseCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + UseObjectLambdaEndpoint: { type: "staticContextParams", value: true }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AmazonS3", "WriteGetObjectResponse", {}) + .n("S3Client", "WriteGetObjectResponseCommand") + .f(WriteGetObjectResponseRequestFilterSensitiveLog, void 0) + .ser(se_WriteGetObjectResponseCommand) + .de(de_WriteGetObjectResponseCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/commands/index.js b/node_modules/@aws-sdk/client-s3/dist-es/commands/index.js new file mode 100644 index 00000000..a25a95d9 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/commands/index.js @@ -0,0 +1,98 @@ +export * from "./AbortMultipartUploadCommand"; +export * from "./CompleteMultipartUploadCommand"; +export * from "./CopyObjectCommand"; +export * from "./CreateBucketCommand"; +export * from "./CreateBucketMetadataTableConfigurationCommand"; +export * from "./CreateMultipartUploadCommand"; +export * from "./CreateSessionCommand"; +export * from "./DeleteBucketAnalyticsConfigurationCommand"; +export * from "./DeleteBucketCommand"; +export * from "./DeleteBucketCorsCommand"; +export * from "./DeleteBucketEncryptionCommand"; +export * from "./DeleteBucketIntelligentTieringConfigurationCommand"; +export * from "./DeleteBucketInventoryConfigurationCommand"; +export * from "./DeleteBucketLifecycleCommand"; +export * from "./DeleteBucketMetadataTableConfigurationCommand"; +export * from "./DeleteBucketMetricsConfigurationCommand"; +export * from "./DeleteBucketOwnershipControlsCommand"; +export * from "./DeleteBucketPolicyCommand"; +export * from "./DeleteBucketReplicationCommand"; +export * from "./DeleteBucketTaggingCommand"; +export * from "./DeleteBucketWebsiteCommand"; +export * from "./DeleteObjectCommand"; +export * from "./DeleteObjectTaggingCommand"; +export * from "./DeleteObjectsCommand"; +export * from "./DeletePublicAccessBlockCommand"; +export * from "./GetBucketAccelerateConfigurationCommand"; +export * from "./GetBucketAclCommand"; +export * from "./GetBucketAnalyticsConfigurationCommand"; +export * from "./GetBucketCorsCommand"; +export * from "./GetBucketEncryptionCommand"; +export * from "./GetBucketIntelligentTieringConfigurationCommand"; +export * from "./GetBucketInventoryConfigurationCommand"; +export * from "./GetBucketLifecycleConfigurationCommand"; +export * from "./GetBucketLocationCommand"; +export * from "./GetBucketLoggingCommand"; +export * from "./GetBucketMetadataTableConfigurationCommand"; +export * from "./GetBucketMetricsConfigurationCommand"; +export * from "./GetBucketNotificationConfigurationCommand"; +export * from "./GetBucketOwnershipControlsCommand"; +export * from "./GetBucketPolicyCommand"; +export * from "./GetBucketPolicyStatusCommand"; +export * from "./GetBucketReplicationCommand"; +export * from "./GetBucketRequestPaymentCommand"; +export * from "./GetBucketTaggingCommand"; +export * from "./GetBucketVersioningCommand"; +export * from "./GetBucketWebsiteCommand"; +export * from "./GetObjectAclCommand"; +export * from "./GetObjectAttributesCommand"; +export * from "./GetObjectCommand"; +export * from "./GetObjectLegalHoldCommand"; +export * from "./GetObjectLockConfigurationCommand"; +export * from "./GetObjectRetentionCommand"; +export * from "./GetObjectTaggingCommand"; +export * from "./GetObjectTorrentCommand"; +export * from "./GetPublicAccessBlockCommand"; +export * from "./HeadBucketCommand"; +export * from "./HeadObjectCommand"; +export * from "./ListBucketAnalyticsConfigurationsCommand"; +export * from "./ListBucketIntelligentTieringConfigurationsCommand"; +export * from "./ListBucketInventoryConfigurationsCommand"; +export * from "./ListBucketMetricsConfigurationsCommand"; +export * from "./ListBucketsCommand"; +export * from "./ListDirectoryBucketsCommand"; +export * from "./ListMultipartUploadsCommand"; +export * from "./ListObjectVersionsCommand"; +export * from "./ListObjectsCommand"; +export * from "./ListObjectsV2Command"; +export * from "./ListPartsCommand"; +export * from "./PutBucketAccelerateConfigurationCommand"; +export * from "./PutBucketAclCommand"; +export * from "./PutBucketAnalyticsConfigurationCommand"; +export * from "./PutBucketCorsCommand"; +export * from "./PutBucketEncryptionCommand"; +export * from "./PutBucketIntelligentTieringConfigurationCommand"; +export * from "./PutBucketInventoryConfigurationCommand"; +export * from "./PutBucketLifecycleConfigurationCommand"; +export * from "./PutBucketLoggingCommand"; +export * from "./PutBucketMetricsConfigurationCommand"; +export * from "./PutBucketNotificationConfigurationCommand"; +export * from "./PutBucketOwnershipControlsCommand"; +export * from "./PutBucketPolicyCommand"; +export * from "./PutBucketReplicationCommand"; +export * from "./PutBucketRequestPaymentCommand"; +export * from "./PutBucketTaggingCommand"; +export * from "./PutBucketVersioningCommand"; +export * from "./PutBucketWebsiteCommand"; +export * from "./PutObjectAclCommand"; +export * from "./PutObjectCommand"; +export * from "./PutObjectLegalHoldCommand"; +export * from "./PutObjectLockConfigurationCommand"; +export * from "./PutObjectRetentionCommand"; +export * from "./PutObjectTaggingCommand"; +export * from "./PutPublicAccessBlockCommand"; +export * from "./RestoreObjectCommand"; +export * from "./SelectObjectContentCommand"; +export * from "./UploadPartCommand"; +export * from "./UploadPartCopyCommand"; +export * from "./WriteGetObjectResponseCommand"; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/endpoint/EndpointParameters.js b/node_modules/@aws-sdk/client-s3/dist-es/endpoint/EndpointParameters.js new file mode 100644 index 00000000..3bd0d3a1 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/endpoint/EndpointParameters.js @@ -0,0 +1,23 @@ +export const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useFipsEndpoint: options.useFipsEndpoint ?? false, + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + forcePathStyle: options.forcePathStyle ?? false, + useAccelerateEndpoint: options.useAccelerateEndpoint ?? false, + useGlobalEndpoint: options.useGlobalEndpoint ?? false, + disableMultiregionAccessPoints: options.disableMultiregionAccessPoints ?? false, + defaultSigningName: "s3", + }); +}; +export const commonParams = { + ForcePathStyle: { type: "clientContextParams", name: "forcePathStyle" }, + UseArnRegion: { type: "clientContextParams", name: "useArnRegion" }, + DisableMultiRegionAccessPoints: { type: "clientContextParams", name: "disableMultiregionAccessPoints" }, + Accelerate: { type: "clientContextParams", name: "useAccelerateEndpoint" }, + DisableS3ExpressSessionAuth: { type: "clientContextParams", name: "disableS3ExpressSessionAuth" }, + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/endpoint/endpointResolver.js b/node_modules/@aws-sdk/client-s3/dist-es/endpoint/endpointResolver.js new file mode 100644 index 00000000..f415db60 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/endpoint/endpointResolver.js @@ -0,0 +1,29 @@ +import { awsEndpointFunctions } from "@aws-sdk/util-endpoints"; +import { customEndpointFunctions, EndpointCache, resolveEndpoint } from "@smithy/util-endpoints"; +import { ruleSet } from "./ruleset"; +const cache = new EndpointCache({ + size: 50, + params: [ + "Accelerate", + "Bucket", + "DisableAccessPoints", + "DisableMultiRegionAccessPoints", + "DisableS3ExpressSessionAuth", + "Endpoint", + "ForcePathStyle", + "Region", + "UseArnRegion", + "UseDualStack", + "UseFIPS", + "UseGlobalEndpoint", + "UseObjectLambdaEndpoint", + "UseS3ExpressControlEndpoint", + ], +}); +export const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => resolveEndpoint(ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/endpoint/ruleset.js b/node_modules/@aws-sdk/client-s3/dist-es/endpoint/ruleset.js new file mode 100644 index 00000000..019ffb5e --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/endpoint/ruleset.js @@ -0,0 +1,4 @@ +const cp = "required", cq = "type", cr = "rules", cs = "conditions", ct = "fn", cu = "argv", cv = "ref", cw = "assign", cx = "url", cy = "properties", cz = "backend", cA = "authSchemes", cB = "disableDoubleEncoding", cC = "signingName", cD = "signingRegion", cE = "headers", cF = "signingRegionSet"; +const a = 6, b = false, c = true, d = "isSet", e = "booleanEquals", f = "error", g = "aws.partition", h = "stringEquals", i = "getAttr", j = "name", k = "substring", l = "bucketSuffix", m = "parseURL", n = "endpoint", o = "tree", p = "aws.isVirtualHostableS3Bucket", q = "{url#scheme}://{Bucket}.{url#authority}{url#path}", r = "not", s = "accessPointSuffix", t = "{url#scheme}://{url#authority}{url#path}", u = "hardwareType", v = "regionPrefix", w = "bucketAliasSuffix", x = "outpostId", y = "isValidHostLabel", z = "sigv4a", A = "s3-outposts", B = "s3", C = "{url#scheme}://{url#authority}{url#normalizedPath}{Bucket}", D = "https://{Bucket}.s3-accelerate.{partitionResult#dnsSuffix}", E = "https://{Bucket}.s3.{partitionResult#dnsSuffix}", F = "aws.parseArn", G = "bucketArn", H = "arnType", I = "", J = "s3-object-lambda", K = "accesspoint", L = "accessPointName", M = "{url#scheme}://{accessPointName}-{bucketArn#accountId}.{url#authority}{url#path}", N = "mrapPartition", O = "outpostType", P = "arnPrefix", Q = "{url#scheme}://{url#authority}{url#normalizedPath}{uri_encoded_bucket}", R = "https://s3.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", S = "https://s3.{partitionResult#dnsSuffix}", T = { [cp]: false, [cq]: "String" }, U = { [cp]: true, "default": false, [cq]: "Boolean" }, V = { [cp]: false, [cq]: "Boolean" }, W = { [ct]: e, [cu]: [{ [cv]: "Accelerate" }, true] }, X = { [ct]: e, [cu]: [{ [cv]: "UseFIPS" }, true] }, Y = { [ct]: e, [cu]: [{ [cv]: "UseDualStack" }, true] }, Z = { [ct]: d, [cu]: [{ [cv]: "Endpoint" }] }, aa = { [ct]: g, [cu]: [{ [cv]: "Region" }], [cw]: "partitionResult" }, ab = { [ct]: h, [cu]: [{ [ct]: i, [cu]: [{ [cv]: "partitionResult" }, j] }, "aws-cn"] }, ac = { [ct]: d, [cu]: [{ [cv]: "Bucket" }] }, ad = { [cv]: "Bucket" }, ae = { [cs]: [Y], [f]: "S3Express does not support Dual-stack.", [cq]: f }, af = { [cs]: [W], [f]: "S3Express does not support S3 Accelerate.", [cq]: f }, ag = { [cs]: [Z, { [ct]: m, [cu]: [{ [cv]: "Endpoint" }], [cw]: "url" }], [cr]: [{ [cs]: [{ [ct]: d, [cu]: [{ [cv]: "DisableS3ExpressSessionAuth" }] }, { [ct]: e, [cu]: [{ [cv]: "DisableS3ExpressSessionAuth" }, true] }], [cr]: [{ [cs]: [{ [ct]: e, [cu]: [{ [ct]: i, [cu]: [{ [cv]: "url" }, "isIp"] }, true] }], [cr]: [{ [cs]: [{ [ct]: "uriEncode", [cu]: [ad], [cw]: "uri_encoded_bucket" }], [cr]: [{ [n]: { [cx]: "{url#scheme}://{url#authority}/{uri_encoded_bucket}{url#path}", [cy]: { [cz]: "S3Express", [cA]: [{ [cB]: true, [j]: "sigv4", [cC]: "s3express", [cD]: "{Region}" }] }, [cE]: {} }, [cq]: n }], [cq]: o }], [cq]: o }, { [cs]: [{ [ct]: p, [cu]: [ad, false] }], [cr]: [{ [n]: { [cx]: q, [cy]: { [cz]: "S3Express", [cA]: [{ [cB]: true, [j]: "sigv4", [cC]: "s3express", [cD]: "{Region}" }] }, [cE]: {} }, [cq]: n }], [cq]: o }, { [f]: "S3Express bucket name is not a valid virtual hostable name.", [cq]: f }], [cq]: o }, { [cs]: [{ [ct]: e, [cu]: [{ [ct]: i, [cu]: [{ [cv]: "url" }, "isIp"] }, true] }], [cr]: [{ [cs]: [{ [ct]: "uriEncode", [cu]: [ad], [cw]: "uri_encoded_bucket" }], [cr]: [{ [n]: { [cx]: "{url#scheme}://{url#authority}/{uri_encoded_bucket}{url#path}", [cy]: { [cz]: "S3Express", [cA]: [{ [cB]: true, [j]: "sigv4-s3express", [cC]: "s3express", [cD]: "{Region}" }] }, [cE]: {} }, [cq]: n }], [cq]: o }], [cq]: o }, { [cs]: [{ [ct]: p, [cu]: [ad, false] }], [cr]: [{ [n]: { [cx]: q, [cy]: { [cz]: "S3Express", [cA]: [{ [cB]: true, [j]: "sigv4-s3express", [cC]: "s3express", [cD]: "{Region}" }] }, [cE]: {} }, [cq]: n }], [cq]: o }, { [f]: "S3Express bucket name is not a valid virtual hostable name.", [cq]: f }], [cq]: o }, ah = { [ct]: m, [cu]: [{ [cv]: "Endpoint" }], [cw]: "url" }, ai = { [ct]: e, [cu]: [{ [ct]: i, [cu]: [{ [cv]: "url" }, "isIp"] }, true] }, aj = { [cv]: "url" }, ak = { [ct]: "uriEncode", [cu]: [ad], [cw]: "uri_encoded_bucket" }, al = { [cz]: "S3Express", [cA]: [{ [cB]: true, [j]: "sigv4", [cC]: "s3express", [cD]: "{Region}" }] }, am = {}, an = { [ct]: p, [cu]: [ad, false] }, ao = { [f]: "S3Express bucket name is not a valid virtual hostable name.", [cq]: f }, ap = { [ct]: d, [cu]: [{ [cv]: "UseS3ExpressControlEndpoint" }] }, aq = { [ct]: e, [cu]: [{ [cv]: "UseS3ExpressControlEndpoint" }, true] }, ar = { [ct]: r, [cu]: [Z] }, as = { [f]: "Unrecognized S3Express bucket name format.", [cq]: f }, at = { [ct]: r, [cu]: [ac] }, au = { [cv]: u }, av = { [cs]: [ar], [f]: "Expected a endpoint to be specified but no endpoint was found", [cq]: f }, aw = { [cA]: [{ [cB]: true, [j]: z, [cC]: A, [cF]: ["*"] }, { [cB]: true, [j]: "sigv4", [cC]: A, [cD]: "{Region}" }] }, ax = { [ct]: e, [cu]: [{ [cv]: "ForcePathStyle" }, false] }, ay = { [cv]: "ForcePathStyle" }, az = { [ct]: e, [cu]: [{ [cv]: "Accelerate" }, false] }, aA = { [ct]: h, [cu]: [{ [cv]: "Region" }, "aws-global"] }, aB = { [cA]: [{ [cB]: true, [j]: "sigv4", [cC]: B, [cD]: "us-east-1" }] }, aC = { [ct]: r, [cu]: [aA] }, aD = { [ct]: e, [cu]: [{ [cv]: "UseGlobalEndpoint" }, true] }, aE = { [cx]: "https://{Bucket}.s3-fips.dualstack.{Region}.{partitionResult#dnsSuffix}", [cy]: { [cA]: [{ [cB]: true, [j]: "sigv4", [cC]: B, [cD]: "{Region}" }] }, [cE]: {} }, aF = { [cA]: [{ [cB]: true, [j]: "sigv4", [cC]: B, [cD]: "{Region}" }] }, aG = { [ct]: e, [cu]: [{ [cv]: "UseGlobalEndpoint" }, false] }, aH = { [ct]: e, [cu]: [{ [cv]: "UseDualStack" }, false] }, aI = { [cx]: "https://{Bucket}.s3-fips.{Region}.{partitionResult#dnsSuffix}", [cy]: aF, [cE]: {} }, aJ = { [ct]: e, [cu]: [{ [cv]: "UseFIPS" }, false] }, aK = { [cx]: "https://{Bucket}.s3-accelerate.dualstack.{partitionResult#dnsSuffix}", [cy]: aF, [cE]: {} }, aL = { [cx]: "https://{Bucket}.s3.dualstack.{Region}.{partitionResult#dnsSuffix}", [cy]: aF, [cE]: {} }, aM = { [ct]: e, [cu]: [{ [ct]: i, [cu]: [aj, "isIp"] }, false] }, aN = { [cx]: C, [cy]: aF, [cE]: {} }, aO = { [cx]: q, [cy]: aF, [cE]: {} }, aP = { [n]: aO, [cq]: n }, aQ = { [cx]: D, [cy]: aF, [cE]: {} }, aR = { [cx]: "https://{Bucket}.s3.{Region}.{partitionResult#dnsSuffix}", [cy]: aF, [cE]: {} }, aS = { [f]: "Invalid region: region was not a valid DNS name.", [cq]: f }, aT = { [cv]: G }, aU = { [cv]: H }, aV = { [ct]: i, [cu]: [aT, "service"] }, aW = { [cv]: L }, aX = { [cs]: [Y], [f]: "S3 Object Lambda does not support Dual-stack", [cq]: f }, aY = { [cs]: [W], [f]: "S3 Object Lambda does not support S3 Accelerate", [cq]: f }, aZ = { [cs]: [{ [ct]: d, [cu]: [{ [cv]: "DisableAccessPoints" }] }, { [ct]: e, [cu]: [{ [cv]: "DisableAccessPoints" }, true] }], [f]: "Access points are not supported for this operation", [cq]: f }, ba = { [cs]: [{ [ct]: d, [cu]: [{ [cv]: "UseArnRegion" }] }, { [ct]: e, [cu]: [{ [cv]: "UseArnRegion" }, false] }, { [ct]: r, [cu]: [{ [ct]: h, [cu]: [{ [ct]: i, [cu]: [aT, "region"] }, "{Region}"] }] }], [f]: "Invalid configuration: region from ARN `{bucketArn#region}` does not match client region `{Region}` and UseArnRegion is `false`", [cq]: f }, bb = { [ct]: i, [cu]: [{ [cv]: "bucketPartition" }, j] }, bc = { [ct]: i, [cu]: [aT, "accountId"] }, bd = { [cA]: [{ [cB]: true, [j]: "sigv4", [cC]: J, [cD]: "{bucketArn#region}" }] }, be = { [f]: "Invalid ARN: The access point name may only contain a-z, A-Z, 0-9 and `-`. Found: `{accessPointName}`", [cq]: f }, bf = { [f]: "Invalid ARN: The account id may only contain a-z, A-Z, 0-9 and `-`. Found: `{bucketArn#accountId}`", [cq]: f }, bg = { [f]: "Invalid region in ARN: `{bucketArn#region}` (invalid DNS name)", [cq]: f }, bh = { [f]: "Client was configured for partition `{partitionResult#name}` but ARN (`{Bucket}`) has `{bucketPartition#name}`", [cq]: f }, bi = { [f]: "Invalid ARN: The ARN may only contain a single resource component after `accesspoint`.", [cq]: f }, bj = { [f]: "Invalid ARN: Expected a resource of the format `accesspoint:` but no name was provided", [cq]: f }, bk = { [cA]: [{ [cB]: true, [j]: "sigv4", [cC]: B, [cD]: "{bucketArn#region}" }] }, bl = { [cA]: [{ [cB]: true, [j]: z, [cC]: A, [cF]: ["*"] }, { [cB]: true, [j]: "sigv4", [cC]: A, [cD]: "{bucketArn#region}" }] }, bm = { [ct]: F, [cu]: [ad] }, bn = { [cx]: "https://s3-fips.dualstack.{Region}.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", [cy]: aF, [cE]: {} }, bo = { [cx]: "https://s3-fips.{Region}.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", [cy]: aF, [cE]: {} }, bp = { [cx]: "https://s3.dualstack.{Region}.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", [cy]: aF, [cE]: {} }, bq = { [cx]: Q, [cy]: aF, [cE]: {} }, br = { [cx]: "https://s3.{Region}.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", [cy]: aF, [cE]: {} }, bs = { [cv]: "UseObjectLambdaEndpoint" }, bt = { [cA]: [{ [cB]: true, [j]: "sigv4", [cC]: J, [cD]: "{Region}" }] }, bu = { [cx]: "https://s3-fips.dualstack.{Region}.{partitionResult#dnsSuffix}", [cy]: aF, [cE]: {} }, bv = { [cx]: "https://s3-fips.{Region}.{partitionResult#dnsSuffix}", [cy]: aF, [cE]: {} }, bw = { [cx]: "https://s3.dualstack.{Region}.{partitionResult#dnsSuffix}", [cy]: aF, [cE]: {} }, bx = { [cx]: t, [cy]: aF, [cE]: {} }, by = { [cx]: "https://s3.{Region}.{partitionResult#dnsSuffix}", [cy]: aF, [cE]: {} }, bz = [{ [cv]: "Region" }], bA = [{ [cv]: "Endpoint" }], bB = [ad], bC = [Y], bD = [W], bE = [Z, ah], bF = [{ [ct]: d, [cu]: [{ [cv]: "DisableS3ExpressSessionAuth" }] }, { [ct]: e, [cu]: [{ [cv]: "DisableS3ExpressSessionAuth" }, true] }], bG = [ak], bH = [an], bI = [aa], bJ = [X], bK = [{ [ct]: k, [cu]: [ad, 6, 14, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 14, 16, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bL = [{ [cs]: [X], [n]: { [cx]: "https://{Bucket}.s3express-fips-{s3expressAvailabilityZoneId}.{Region}.{partitionResult#dnsSuffix}", [cy]: al, [cE]: {} }, [cq]: n }, { [n]: { [cx]: "https://{Bucket}.s3express-{s3expressAvailabilityZoneId}.{Region}.{partitionResult#dnsSuffix}", [cy]: al, [cE]: {} }, [cq]: n }], bM = [{ [ct]: k, [cu]: [ad, 6, 15, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 15, 17, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bN = [{ [ct]: k, [cu]: [ad, 6, 19, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 19, 21, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bO = [{ [ct]: k, [cu]: [ad, 6, 20, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 20, 22, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bP = [{ [ct]: k, [cu]: [ad, 6, 26, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 26, 28, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bQ = [{ [cs]: [X], [n]: { [cx]: "https://{Bucket}.s3express-fips-{s3expressAvailabilityZoneId}.{Region}.{partitionResult#dnsSuffix}", [cy]: { [cz]: "S3Express", [cA]: [{ [cB]: true, [j]: "sigv4-s3express", [cC]: "s3express", [cD]: "{Region}" }] }, [cE]: {} }, [cq]: n }, { [n]: { [cx]: "https://{Bucket}.s3express-{s3expressAvailabilityZoneId}.{Region}.{partitionResult#dnsSuffix}", [cy]: { [cz]: "S3Express", [cA]: [{ [cB]: true, [j]: "sigv4-s3express", [cC]: "s3express", [cD]: "{Region}" }] }, [cE]: {} }, [cq]: n }], bR = [ad, 0, 7, true], bS = [{ [ct]: k, [cu]: [ad, 7, 15, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 15, 17, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bT = [{ [ct]: k, [cu]: [ad, 7, 16, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 16, 18, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bU = [{ [ct]: k, [cu]: [ad, 7, 20, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 20, 22, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bV = [{ [ct]: k, [cu]: [ad, 7, 21, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 21, 23, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bW = [{ [ct]: k, [cu]: [ad, 7, 27, true], [cw]: "s3expressAvailabilityZoneId" }, { [ct]: k, [cu]: [ad, 27, 29, true], [cw]: "s3expressAvailabilityZoneDelim" }, { [ct]: h, [cu]: [{ [cv]: "s3expressAvailabilityZoneDelim" }, "--"] }], bX = [ac], bY = [{ [ct]: y, [cu]: [{ [cv]: x }, false] }], bZ = [{ [ct]: h, [cu]: [{ [cv]: v }, "beta"] }], ca = ["*"], cb = [{ [ct]: y, [cu]: [{ [cv]: "Region" }, false] }], cc = [{ [ct]: h, [cu]: [{ [cv]: "Region" }, "us-east-1"] }], cd = [{ [ct]: h, [cu]: [aU, K] }], ce = [{ [ct]: i, [cu]: [aT, "resourceId[1]"], [cw]: L }, { [ct]: r, [cu]: [{ [ct]: h, [cu]: [aW, I] }] }], cf = [aT, "resourceId[1]"], cg = [{ [ct]: r, [cu]: [{ [ct]: h, [cu]: [{ [ct]: i, [cu]: [aT, "region"] }, I] }] }], ch = [{ [ct]: r, [cu]: [{ [ct]: d, [cu]: [{ [ct]: i, [cu]: [aT, "resourceId[2]"] }] }] }], ci = [aT, "resourceId[2]"], cj = [{ [ct]: g, [cu]: [{ [ct]: i, [cu]: [aT, "region"] }], [cw]: "bucketPartition" }], ck = [{ [ct]: h, [cu]: [bb, { [ct]: i, [cu]: [{ [cv]: "partitionResult" }, j] }] }], cl = [{ [ct]: y, [cu]: [{ [ct]: i, [cu]: [aT, "region"] }, true] }], cm = [{ [ct]: y, [cu]: [bc, false] }], cn = [{ [ct]: y, [cu]: [aW, false] }], co = [{ [ct]: y, [cu]: [{ [cv]: "Region" }, true] }]; +const _data = { version: "1.0", parameters: { Bucket: T, Region: T, UseFIPS: U, UseDualStack: U, Endpoint: T, ForcePathStyle: U, Accelerate: U, UseGlobalEndpoint: U, UseObjectLambdaEndpoint: V, Key: T, Prefix: T, CopySource: T, DisableAccessPoints: V, DisableMultiRegionAccessPoints: U, UseArnRegion: V, UseS3ExpressControlEndpoint: V, DisableS3ExpressSessionAuth: V }, [cr]: [{ [cs]: [{ [ct]: d, [cu]: bz }], [cr]: [{ [cs]: [W, X], error: "Accelerate cannot be used with FIPS", [cq]: f }, { [cs]: [Y, Z], error: "Cannot set dual-stack in combination with a custom endpoint.", [cq]: f }, { [cs]: [Z, X], error: "A custom endpoint cannot be combined with FIPS", [cq]: f }, { [cs]: [Z, W], error: "A custom endpoint cannot be combined with S3 Accelerate", [cq]: f }, { [cs]: [X, aa, ab], error: "Partition does not support FIPS", [cq]: f }, { [cs]: [ac, { [ct]: k, [cu]: [ad, 0, a, c], [cw]: l }, { [ct]: h, [cu]: [{ [cv]: l }, "--x-s3"] }], [cr]: [ae, af, ag, { [cs]: [ap, aq], [cr]: [{ [cs]: bI, [cr]: [{ [cs]: [ak, ar], [cr]: [{ [cs]: bJ, endpoint: { [cx]: "https://s3express-control-fips.{Region}.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", [cy]: al, [cE]: am }, [cq]: n }, { endpoint: { [cx]: "https://s3express-control.{Region}.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", [cy]: al, [cE]: am }, [cq]: n }], [cq]: o }], [cq]: o }], [cq]: o }, { [cs]: bH, [cr]: [{ [cs]: bI, [cr]: [{ [cs]: bF, [cr]: [{ [cs]: bK, [cr]: bL, [cq]: o }, { [cs]: bM, [cr]: bL, [cq]: o }, { [cs]: bN, [cr]: bL, [cq]: o }, { [cs]: bO, [cr]: bL, [cq]: o }, { [cs]: bP, [cr]: bL, [cq]: o }, as], [cq]: o }, { [cs]: bK, [cr]: bQ, [cq]: o }, { [cs]: bM, [cr]: bQ, [cq]: o }, { [cs]: bN, [cr]: bQ, [cq]: o }, { [cs]: bO, [cr]: bQ, [cq]: o }, { [cs]: bP, [cr]: bQ, [cq]: o }, as], [cq]: o }], [cq]: o }, ao], [cq]: o }, { [cs]: [ac, { [ct]: k, [cu]: bR, [cw]: s }, { [ct]: h, [cu]: [{ [cv]: s }, "--xa-s3"] }], [cr]: [ae, af, ag, { [cs]: bH, [cr]: [{ [cs]: bI, [cr]: [{ [cs]: bF, [cr]: [{ [cs]: bS, [cr]: bL, [cq]: o }, { [cs]: bT, [cr]: bL, [cq]: o }, { [cs]: bU, [cr]: bL, [cq]: o }, { [cs]: bV, [cr]: bL, [cq]: o }, { [cs]: bW, [cr]: bL, [cq]: o }, as], [cq]: o }, { [cs]: bS, [cr]: bQ, [cq]: o }, { [cs]: bT, [cr]: bQ, [cq]: o }, { [cs]: bU, [cr]: bQ, [cq]: o }, { [cs]: bV, [cr]: bQ, [cq]: o }, { [cs]: bW, [cr]: bQ, [cq]: o }, as], [cq]: o }], [cq]: o }, ao], [cq]: o }, { [cs]: [at, ap, aq], [cr]: [{ [cs]: bI, [cr]: [{ [cs]: bE, endpoint: { [cx]: t, [cy]: al, [cE]: am }, [cq]: n }, { [cs]: bJ, endpoint: { [cx]: "https://s3express-control-fips.{Region}.{partitionResult#dnsSuffix}", [cy]: al, [cE]: am }, [cq]: n }, { endpoint: { [cx]: "https://s3express-control.{Region}.{partitionResult#dnsSuffix}", [cy]: al, [cE]: am }, [cq]: n }], [cq]: o }], [cq]: o }, { [cs]: [ac, { [ct]: k, [cu]: [ad, 49, 50, c], [cw]: u }, { [ct]: k, [cu]: [ad, 8, 12, c], [cw]: v }, { [ct]: k, [cu]: bR, [cw]: w }, { [ct]: k, [cu]: [ad, 32, 49, c], [cw]: x }, { [ct]: g, [cu]: bz, [cw]: "regionPartition" }, { [ct]: h, [cu]: [{ [cv]: w }, "--op-s3"] }], [cr]: [{ [cs]: bY, [cr]: [{ [cs]: [{ [ct]: h, [cu]: [au, "e"] }], [cr]: [{ [cs]: bZ, [cr]: [av, { [cs]: bE, endpoint: { [cx]: "https://{Bucket}.ec2.{url#authority}", [cy]: aw, [cE]: am }, [cq]: n }], [cq]: o }, { endpoint: { [cx]: "https://{Bucket}.ec2.s3-outposts.{Region}.{regionPartition#dnsSuffix}", [cy]: aw, [cE]: am }, [cq]: n }], [cq]: o }, { [cs]: [{ [ct]: h, [cu]: [au, "o"] }], [cr]: [{ [cs]: bZ, [cr]: [av, { [cs]: bE, endpoint: { [cx]: "https://{Bucket}.op-{outpostId}.{url#authority}", [cy]: aw, [cE]: am }, [cq]: n }], [cq]: o }, { endpoint: { [cx]: "https://{Bucket}.op-{outpostId}.s3-outposts.{Region}.{regionPartition#dnsSuffix}", [cy]: aw, [cE]: am }, [cq]: n }], [cq]: o }, { error: "Unrecognized hardware type: \"Expected hardware type o or e but got {hardwareType}\"", [cq]: f }], [cq]: o }, { error: "Invalid ARN: The outpost Id must only contain a-z, A-Z, 0-9 and `-`.", [cq]: f }], [cq]: o }, { [cs]: bX, [cr]: [{ [cs]: [Z, { [ct]: r, [cu]: [{ [ct]: d, [cu]: [{ [ct]: m, [cu]: bA }] }] }], error: "Custom endpoint `{Endpoint}` was not a valid URI", [cq]: f }, { [cs]: [ax, an], [cr]: [{ [cs]: bI, [cr]: [{ [cs]: cb, [cr]: [{ [cs]: [W, ab], error: "S3 Accelerate cannot be used in this region", [cq]: f }, { [cs]: [Y, X, az, ar, aA], endpoint: { [cx]: "https://{Bucket}.s3-fips.dualstack.us-east-1.{partitionResult#dnsSuffix}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [Y, X, az, ar, aC, aD], [cr]: [{ endpoint: aE, [cq]: n }], [cq]: o }, { [cs]: [Y, X, az, ar, aC, aG], endpoint: aE, [cq]: n }, { [cs]: [aH, X, az, ar, aA], endpoint: { [cx]: "https://{Bucket}.s3-fips.us-east-1.{partitionResult#dnsSuffix}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aH, X, az, ar, aC, aD], [cr]: [{ endpoint: aI, [cq]: n }], [cq]: o }, { [cs]: [aH, X, az, ar, aC, aG], endpoint: aI, [cq]: n }, { [cs]: [Y, aJ, W, ar, aA], endpoint: { [cx]: "https://{Bucket}.s3-accelerate.dualstack.us-east-1.{partitionResult#dnsSuffix}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [Y, aJ, W, ar, aC, aD], [cr]: [{ endpoint: aK, [cq]: n }], [cq]: o }, { [cs]: [Y, aJ, W, ar, aC, aG], endpoint: aK, [cq]: n }, { [cs]: [Y, aJ, az, ar, aA], endpoint: { [cx]: "https://{Bucket}.s3.dualstack.us-east-1.{partitionResult#dnsSuffix}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [Y, aJ, az, ar, aC, aD], [cr]: [{ endpoint: aL, [cq]: n }], [cq]: o }, { [cs]: [Y, aJ, az, ar, aC, aG], endpoint: aL, [cq]: n }, { [cs]: [aH, aJ, az, Z, ah, ai, aA], endpoint: { [cx]: C, [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aH, aJ, az, Z, ah, aM, aA], endpoint: { [cx]: q, [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aH, aJ, az, Z, ah, ai, aC, aD], [cr]: [{ [cs]: cc, endpoint: aN, [cq]: n }, { endpoint: aN, [cq]: n }], [cq]: o }, { [cs]: [aH, aJ, az, Z, ah, aM, aC, aD], [cr]: [{ [cs]: cc, endpoint: aO, [cq]: n }, aP], [cq]: o }, { [cs]: [aH, aJ, az, Z, ah, ai, aC, aG], endpoint: aN, [cq]: n }, { [cs]: [aH, aJ, az, Z, ah, aM, aC, aG], endpoint: aO, [cq]: n }, { [cs]: [aH, aJ, W, ar, aA], endpoint: { [cx]: D, [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aH, aJ, W, ar, aC, aD], [cr]: [{ [cs]: cc, endpoint: aQ, [cq]: n }, { endpoint: aQ, [cq]: n }], [cq]: o }, { [cs]: [aH, aJ, W, ar, aC, aG], endpoint: aQ, [cq]: n }, { [cs]: [aH, aJ, az, ar, aA], endpoint: { [cx]: E, [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aH, aJ, az, ar, aC, aD], [cr]: [{ [cs]: cc, endpoint: { [cx]: E, [cy]: aF, [cE]: am }, [cq]: n }, { endpoint: aR, [cq]: n }], [cq]: o }, { [cs]: [aH, aJ, az, ar, aC, aG], endpoint: aR, [cq]: n }], [cq]: o }, aS], [cq]: o }], [cq]: o }, { [cs]: [Z, ah, { [ct]: h, [cu]: [{ [ct]: i, [cu]: [aj, "scheme"] }, "http"] }, { [ct]: p, [cu]: [ad, c] }, ax, aJ, aH, az], [cr]: [{ [cs]: bI, [cr]: [{ [cs]: cb, [cr]: [aP], [cq]: o }, aS], [cq]: o }], [cq]: o }, { [cs]: [ax, { [ct]: F, [cu]: bB, [cw]: G }], [cr]: [{ [cs]: [{ [ct]: i, [cu]: [aT, "resourceId[0]"], [cw]: H }, { [ct]: r, [cu]: [{ [ct]: h, [cu]: [aU, I] }] }], [cr]: [{ [cs]: [{ [ct]: h, [cu]: [aV, J] }], [cr]: [{ [cs]: cd, [cr]: [{ [cs]: ce, [cr]: [aX, aY, { [cs]: cg, [cr]: [aZ, { [cs]: ch, [cr]: [ba, { [cs]: cj, [cr]: [{ [cs]: bI, [cr]: [{ [cs]: ck, [cr]: [{ [cs]: cl, [cr]: [{ [cs]: [{ [ct]: h, [cu]: [bc, I] }], error: "Invalid ARN: Missing account id", [cq]: f }, { [cs]: cm, [cr]: [{ [cs]: cn, [cr]: [{ [cs]: bE, endpoint: { [cx]: M, [cy]: bd, [cE]: am }, [cq]: n }, { [cs]: bJ, endpoint: { [cx]: "https://{accessPointName}-{bucketArn#accountId}.s3-object-lambda-fips.{bucketArn#region}.{bucketPartition#dnsSuffix}", [cy]: bd, [cE]: am }, [cq]: n }, { endpoint: { [cx]: "https://{accessPointName}-{bucketArn#accountId}.s3-object-lambda.{bucketArn#region}.{bucketPartition#dnsSuffix}", [cy]: bd, [cE]: am }, [cq]: n }], [cq]: o }, be], [cq]: o }, bf], [cq]: o }, bg], [cq]: o }, bh], [cq]: o }], [cq]: o }], [cq]: o }, bi], [cq]: o }, { error: "Invalid ARN: bucket ARN is missing a region", [cq]: f }], [cq]: o }, bj], [cq]: o }, { error: "Invalid ARN: Object Lambda ARNs only support `accesspoint` arn types, but found: `{arnType}`", [cq]: f }], [cq]: o }, { [cs]: cd, [cr]: [{ [cs]: ce, [cr]: [{ [cs]: cg, [cr]: [{ [cs]: cd, [cr]: [{ [cs]: cg, [cr]: [aZ, { [cs]: ch, [cr]: [ba, { [cs]: cj, [cr]: [{ [cs]: bI, [cr]: [{ [cs]: [{ [ct]: h, [cu]: [bb, "{partitionResult#name}"] }], [cr]: [{ [cs]: cl, [cr]: [{ [cs]: [{ [ct]: h, [cu]: [aV, B] }], [cr]: [{ [cs]: cm, [cr]: [{ [cs]: cn, [cr]: [{ [cs]: bD, error: "Access Points do not support S3 Accelerate", [cq]: f }, { [cs]: [X, Y], endpoint: { [cx]: "https://{accessPointName}-{bucketArn#accountId}.s3-accesspoint-fips.dualstack.{bucketArn#region}.{bucketPartition#dnsSuffix}", [cy]: bk, [cE]: am }, [cq]: n }, { [cs]: [X, aH], endpoint: { [cx]: "https://{accessPointName}-{bucketArn#accountId}.s3-accesspoint-fips.{bucketArn#region}.{bucketPartition#dnsSuffix}", [cy]: bk, [cE]: am }, [cq]: n }, { [cs]: [aJ, Y], endpoint: { [cx]: "https://{accessPointName}-{bucketArn#accountId}.s3-accesspoint.dualstack.{bucketArn#region}.{bucketPartition#dnsSuffix}", [cy]: bk, [cE]: am }, [cq]: n }, { [cs]: [aJ, aH, Z, ah], endpoint: { [cx]: M, [cy]: bk, [cE]: am }, [cq]: n }, { [cs]: [aJ, aH], endpoint: { [cx]: "https://{accessPointName}-{bucketArn#accountId}.s3-accesspoint.{bucketArn#region}.{bucketPartition#dnsSuffix}", [cy]: bk, [cE]: am }, [cq]: n }], [cq]: o }, be], [cq]: o }, bf], [cq]: o }, { error: "Invalid ARN: The ARN was not for the S3 service, found: {bucketArn#service}", [cq]: f }], [cq]: o }, bg], [cq]: o }, bh], [cq]: o }], [cq]: o }], [cq]: o }, bi], [cq]: o }], [cq]: o }], [cq]: o }, { [cs]: [{ [ct]: y, [cu]: [aW, c] }], [cr]: [{ [cs]: bC, error: "S3 MRAP does not support dual-stack", [cq]: f }, { [cs]: bJ, error: "S3 MRAP does not support FIPS", [cq]: f }, { [cs]: bD, error: "S3 MRAP does not support S3 Accelerate", [cq]: f }, { [cs]: [{ [ct]: e, [cu]: [{ [cv]: "DisableMultiRegionAccessPoints" }, c] }], error: "Invalid configuration: Multi-Region Access Point ARNs are disabled.", [cq]: f }, { [cs]: [{ [ct]: g, [cu]: bz, [cw]: N }], [cr]: [{ [cs]: [{ [ct]: h, [cu]: [{ [ct]: i, [cu]: [{ [cv]: N }, j] }, { [ct]: i, [cu]: [aT, "partition"] }] }], [cr]: [{ endpoint: { [cx]: "https://{accessPointName}.accesspoint.s3-global.{mrapPartition#dnsSuffix}", [cy]: { [cA]: [{ [cB]: c, name: z, [cC]: B, [cF]: ca }] }, [cE]: am }, [cq]: n }], [cq]: o }, { error: "Client was configured for partition `{mrapPartition#name}` but bucket referred to partition `{bucketArn#partition}`", [cq]: f }], [cq]: o }], [cq]: o }, { error: "Invalid Access Point Name", [cq]: f }], [cq]: o }, bj], [cq]: o }, { [cs]: [{ [ct]: h, [cu]: [aV, A] }], [cr]: [{ [cs]: bC, error: "S3 Outposts does not support Dual-stack", [cq]: f }, { [cs]: bJ, error: "S3 Outposts does not support FIPS", [cq]: f }, { [cs]: bD, error: "S3 Outposts does not support S3 Accelerate", [cq]: f }, { [cs]: [{ [ct]: d, [cu]: [{ [ct]: i, [cu]: [aT, "resourceId[4]"] }] }], error: "Invalid Arn: Outpost Access Point ARN contains sub resources", [cq]: f }, { [cs]: [{ [ct]: i, [cu]: cf, [cw]: x }], [cr]: [{ [cs]: bY, [cr]: [ba, { [cs]: cj, [cr]: [{ [cs]: bI, [cr]: [{ [cs]: ck, [cr]: [{ [cs]: cl, [cr]: [{ [cs]: cm, [cr]: [{ [cs]: [{ [ct]: i, [cu]: ci, [cw]: O }], [cr]: [{ [cs]: [{ [ct]: i, [cu]: [aT, "resourceId[3]"], [cw]: L }], [cr]: [{ [cs]: [{ [ct]: h, [cu]: [{ [cv]: O }, K] }], [cr]: [{ [cs]: bE, endpoint: { [cx]: "https://{accessPointName}-{bucketArn#accountId}.{outpostId}.{url#authority}", [cy]: bl, [cE]: am }, [cq]: n }, { endpoint: { [cx]: "https://{accessPointName}-{bucketArn#accountId}.{outpostId}.s3-outposts.{bucketArn#region}.{bucketPartition#dnsSuffix}", [cy]: bl, [cE]: am }, [cq]: n }], [cq]: o }, { error: "Expected an outpost type `accesspoint`, found {outpostType}", [cq]: f }], [cq]: o }, { error: "Invalid ARN: expected an access point name", [cq]: f }], [cq]: o }, { error: "Invalid ARN: Expected a 4-component resource", [cq]: f }], [cq]: o }, bf], [cq]: o }, bg], [cq]: o }, bh], [cq]: o }], [cq]: o }], [cq]: o }, { error: "Invalid ARN: The outpost Id may only contain a-z, A-Z, 0-9 and `-`. Found: `{outpostId}`", [cq]: f }], [cq]: o }, { error: "Invalid ARN: The Outpost Id was not set", [cq]: f }], [cq]: o }, { error: "Invalid ARN: Unrecognized format: {Bucket} (type: {arnType})", [cq]: f }], [cq]: o }, { error: "Invalid ARN: No ARN type specified", [cq]: f }], [cq]: o }, { [cs]: [{ [ct]: k, [cu]: [ad, 0, 4, b], [cw]: P }, { [ct]: h, [cu]: [{ [cv]: P }, "arn:"] }, { [ct]: r, [cu]: [{ [ct]: d, [cu]: [bm] }] }], error: "Invalid ARN: `{Bucket}` was not a valid ARN", [cq]: f }, { [cs]: [{ [ct]: e, [cu]: [ay, c] }, bm], error: "Path-style addressing cannot be used with ARN buckets", [cq]: f }, { [cs]: bG, [cr]: [{ [cs]: bI, [cr]: [{ [cs]: [az], [cr]: [{ [cs]: [Y, ar, X, aA], endpoint: { [cx]: "https://s3-fips.dualstack.us-east-1.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [Y, ar, X, aC, aD], [cr]: [{ endpoint: bn, [cq]: n }], [cq]: o }, { [cs]: [Y, ar, X, aC, aG], endpoint: bn, [cq]: n }, { [cs]: [aH, ar, X, aA], endpoint: { [cx]: "https://s3-fips.us-east-1.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aH, ar, X, aC, aD], [cr]: [{ endpoint: bo, [cq]: n }], [cq]: o }, { [cs]: [aH, ar, X, aC, aG], endpoint: bo, [cq]: n }, { [cs]: [Y, ar, aJ, aA], endpoint: { [cx]: "https://s3.dualstack.us-east-1.{partitionResult#dnsSuffix}/{uri_encoded_bucket}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [Y, ar, aJ, aC, aD], [cr]: [{ endpoint: bp, [cq]: n }], [cq]: o }, { [cs]: [Y, ar, aJ, aC, aG], endpoint: bp, [cq]: n }, { [cs]: [aH, Z, ah, aJ, aA], endpoint: { [cx]: Q, [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aH, Z, ah, aJ, aC, aD], [cr]: [{ [cs]: cc, endpoint: bq, [cq]: n }, { endpoint: bq, [cq]: n }], [cq]: o }, { [cs]: [aH, Z, ah, aJ, aC, aG], endpoint: bq, [cq]: n }, { [cs]: [aH, ar, aJ, aA], endpoint: { [cx]: R, [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aH, ar, aJ, aC, aD], [cr]: [{ [cs]: cc, endpoint: { [cx]: R, [cy]: aF, [cE]: am }, [cq]: n }, { endpoint: br, [cq]: n }], [cq]: o }, { [cs]: [aH, ar, aJ, aC, aG], endpoint: br, [cq]: n }], [cq]: o }, { error: "Path-style addressing cannot be used with S3 Accelerate", [cq]: f }], [cq]: o }], [cq]: o }], [cq]: o }, { [cs]: [{ [ct]: d, [cu]: [bs] }, { [ct]: e, [cu]: [bs, c] }], [cr]: [{ [cs]: bI, [cr]: [{ [cs]: co, [cr]: [aX, aY, { [cs]: bE, endpoint: { [cx]: t, [cy]: bt, [cE]: am }, [cq]: n }, { [cs]: bJ, endpoint: { [cx]: "https://s3-object-lambda-fips.{Region}.{partitionResult#dnsSuffix}", [cy]: bt, [cE]: am }, [cq]: n }, { endpoint: { [cx]: "https://s3-object-lambda.{Region}.{partitionResult#dnsSuffix}", [cy]: bt, [cE]: am }, [cq]: n }], [cq]: o }, aS], [cq]: o }], [cq]: o }, { [cs]: [at], [cr]: [{ [cs]: bI, [cr]: [{ [cs]: co, [cr]: [{ [cs]: [X, Y, ar, aA], endpoint: { [cx]: "https://s3-fips.dualstack.us-east-1.{partitionResult#dnsSuffix}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [X, Y, ar, aC, aD], [cr]: [{ endpoint: bu, [cq]: n }], [cq]: o }, { [cs]: [X, Y, ar, aC, aG], endpoint: bu, [cq]: n }, { [cs]: [X, aH, ar, aA], endpoint: { [cx]: "https://s3-fips.us-east-1.{partitionResult#dnsSuffix}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [X, aH, ar, aC, aD], [cr]: [{ endpoint: bv, [cq]: n }], [cq]: o }, { [cs]: [X, aH, ar, aC, aG], endpoint: bv, [cq]: n }, { [cs]: [aJ, Y, ar, aA], endpoint: { [cx]: "https://s3.dualstack.us-east-1.{partitionResult#dnsSuffix}", [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aJ, Y, ar, aC, aD], [cr]: [{ endpoint: bw, [cq]: n }], [cq]: o }, { [cs]: [aJ, Y, ar, aC, aG], endpoint: bw, [cq]: n }, { [cs]: [aJ, aH, Z, ah, aA], endpoint: { [cx]: t, [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aJ, aH, Z, ah, aC, aD], [cr]: [{ [cs]: cc, endpoint: bx, [cq]: n }, { endpoint: bx, [cq]: n }], [cq]: o }, { [cs]: [aJ, aH, Z, ah, aC, aG], endpoint: bx, [cq]: n }, { [cs]: [aJ, aH, ar, aA], endpoint: { [cx]: S, [cy]: aB, [cE]: am }, [cq]: n }, { [cs]: [aJ, aH, ar, aC, aD], [cr]: [{ [cs]: cc, endpoint: { [cx]: S, [cy]: aF, [cE]: am }, [cq]: n }, { endpoint: by, [cq]: n }], [cq]: o }, { [cs]: [aJ, aH, ar, aC, aG], endpoint: by, [cq]: n }], [cq]: o }, aS], [cq]: o }], [cq]: o }], [cq]: o }, { error: "A region must be set when sending requests to S3.", [cq]: f }] }; +export const ruleSet = _data; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/extensionConfiguration.js b/node_modules/@aws-sdk/client-s3/dist-es/extensionConfiguration.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/extensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/index.js b/node_modules/@aws-sdk/client-s3/dist-es/index.js new file mode 100644 index 00000000..fffe2db6 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/index.js @@ -0,0 +1,7 @@ +export * from "./S3Client"; +export * from "./S3"; +export * from "./commands"; +export * from "./pagination"; +export * from "./waiters"; +export * from "./models"; +export { S3ServiceException } from "./models/S3ServiceException"; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/models/S3ServiceException.js b/node_modules/@aws-sdk/client-s3/dist-es/models/S3ServiceException.js new file mode 100644 index 00000000..cd434cda --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/models/S3ServiceException.js @@ -0,0 +1,8 @@ +import { ServiceException as __ServiceException, } from "@smithy/smithy-client"; +export { __ServiceException }; +export class S3ServiceException extends __ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, S3ServiceException.prototype); + } +} diff --git a/node_modules/@aws-sdk/client-s3/dist-es/models/index.js b/node_modules/@aws-sdk/client-s3/dist-es/models/index.js new file mode 100644 index 00000000..ae1cfffa --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/models/index.js @@ -0,0 +1,2 @@ +export * from "./models_0"; +export * from "./models_1"; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/models/models_0.js b/node_modules/@aws-sdk/client-s3/dist-es/models/models_0.js new file mode 100644 index 00000000..53f4222a --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/models/models_0.js @@ -0,0 +1,592 @@ +import { SENSITIVE_STRING } from "@smithy/smithy-client"; +import { S3ServiceException as __BaseException } from "./S3ServiceException"; +export const RequestCharged = { + requester: "requester", +}; +export const RequestPayer = { + requester: "requester", +}; +export class NoSuchUpload extends __BaseException { + name = "NoSuchUpload"; + $fault = "client"; + constructor(opts) { + super({ + name: "NoSuchUpload", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, NoSuchUpload.prototype); + } +} +export const BucketAccelerateStatus = { + Enabled: "Enabled", + Suspended: "Suspended", +}; +export const Type = { + AmazonCustomerByEmail: "AmazonCustomerByEmail", + CanonicalUser: "CanonicalUser", + Group: "Group", +}; +export const Permission = { + FULL_CONTROL: "FULL_CONTROL", + READ: "READ", + READ_ACP: "READ_ACP", + WRITE: "WRITE", + WRITE_ACP: "WRITE_ACP", +}; +export const OwnerOverride = { + Destination: "Destination", +}; +export const ChecksumType = { + COMPOSITE: "COMPOSITE", + FULL_OBJECT: "FULL_OBJECT", +}; +export const ServerSideEncryption = { + AES256: "AES256", + aws_kms: "aws:kms", + aws_kms_dsse: "aws:kms:dsse", +}; +export const ObjectCannedACL = { + authenticated_read: "authenticated-read", + aws_exec_read: "aws-exec-read", + bucket_owner_full_control: "bucket-owner-full-control", + bucket_owner_read: "bucket-owner-read", + private: "private", + public_read: "public-read", + public_read_write: "public-read-write", +}; +export const ChecksumAlgorithm = { + CRC32: "CRC32", + CRC32C: "CRC32C", + CRC64NVME: "CRC64NVME", + SHA1: "SHA1", + SHA256: "SHA256", +}; +export const MetadataDirective = { + COPY: "COPY", + REPLACE: "REPLACE", +}; +export const ObjectLockLegalHoldStatus = { + OFF: "OFF", + ON: "ON", +}; +export const ObjectLockMode = { + COMPLIANCE: "COMPLIANCE", + GOVERNANCE: "GOVERNANCE", +}; +export const StorageClass = { + DEEP_ARCHIVE: "DEEP_ARCHIVE", + EXPRESS_ONEZONE: "EXPRESS_ONEZONE", + GLACIER: "GLACIER", + GLACIER_IR: "GLACIER_IR", + INTELLIGENT_TIERING: "INTELLIGENT_TIERING", + ONEZONE_IA: "ONEZONE_IA", + OUTPOSTS: "OUTPOSTS", + REDUCED_REDUNDANCY: "REDUCED_REDUNDANCY", + SNOW: "SNOW", + STANDARD: "STANDARD", + STANDARD_IA: "STANDARD_IA", +}; +export const TaggingDirective = { + COPY: "COPY", + REPLACE: "REPLACE", +}; +export class ObjectNotInActiveTierError extends __BaseException { + name = "ObjectNotInActiveTierError"; + $fault = "client"; + constructor(opts) { + super({ + name: "ObjectNotInActiveTierError", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ObjectNotInActiveTierError.prototype); + } +} +export class BucketAlreadyExists extends __BaseException { + name = "BucketAlreadyExists"; + $fault = "client"; + constructor(opts) { + super({ + name: "BucketAlreadyExists", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, BucketAlreadyExists.prototype); + } +} +export class BucketAlreadyOwnedByYou extends __BaseException { + name = "BucketAlreadyOwnedByYou"; + $fault = "client"; + constructor(opts) { + super({ + name: "BucketAlreadyOwnedByYou", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, BucketAlreadyOwnedByYou.prototype); + } +} +export const BucketCannedACL = { + authenticated_read: "authenticated-read", + private: "private", + public_read: "public-read", + public_read_write: "public-read-write", +}; +export const DataRedundancy = { + SingleAvailabilityZone: "SingleAvailabilityZone", + SingleLocalZone: "SingleLocalZone", +}; +export const BucketType = { + Directory: "Directory", +}; +export const LocationType = { + AvailabilityZone: "AvailabilityZone", + LocalZone: "LocalZone", +}; +export const BucketLocationConstraint = { + EU: "EU", + af_south_1: "af-south-1", + ap_east_1: "ap-east-1", + ap_northeast_1: "ap-northeast-1", + ap_northeast_2: "ap-northeast-2", + ap_northeast_3: "ap-northeast-3", + ap_south_1: "ap-south-1", + ap_south_2: "ap-south-2", + ap_southeast_1: "ap-southeast-1", + ap_southeast_2: "ap-southeast-2", + ap_southeast_3: "ap-southeast-3", + ap_southeast_4: "ap-southeast-4", + ap_southeast_5: "ap-southeast-5", + ca_central_1: "ca-central-1", + cn_north_1: "cn-north-1", + cn_northwest_1: "cn-northwest-1", + eu_central_1: "eu-central-1", + eu_central_2: "eu-central-2", + eu_north_1: "eu-north-1", + eu_south_1: "eu-south-1", + eu_south_2: "eu-south-2", + eu_west_1: "eu-west-1", + eu_west_2: "eu-west-2", + eu_west_3: "eu-west-3", + il_central_1: "il-central-1", + me_central_1: "me-central-1", + me_south_1: "me-south-1", + sa_east_1: "sa-east-1", + us_east_2: "us-east-2", + us_gov_east_1: "us-gov-east-1", + us_gov_west_1: "us-gov-west-1", + us_west_1: "us-west-1", + us_west_2: "us-west-2", +}; +export const ObjectOwnership = { + BucketOwnerEnforced: "BucketOwnerEnforced", + BucketOwnerPreferred: "BucketOwnerPreferred", + ObjectWriter: "ObjectWriter", +}; +export const SessionMode = { + ReadOnly: "ReadOnly", + ReadWrite: "ReadWrite", +}; +export class NoSuchBucket extends __BaseException { + name = "NoSuchBucket"; + $fault = "client"; + constructor(opts) { + super({ + name: "NoSuchBucket", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, NoSuchBucket.prototype); + } +} +export var AnalyticsFilter; +(function (AnalyticsFilter) { + AnalyticsFilter.visit = (value, visitor) => { + if (value.Prefix !== undefined) + return visitor.Prefix(value.Prefix); + if (value.Tag !== undefined) + return visitor.Tag(value.Tag); + if (value.And !== undefined) + return visitor.And(value.And); + return visitor._(value.$unknown[0], value.$unknown[1]); + }; +})(AnalyticsFilter || (AnalyticsFilter = {})); +export const AnalyticsS3ExportFileFormat = { + CSV: "CSV", +}; +export const StorageClassAnalysisSchemaVersion = { + V_1: "V_1", +}; +export const IntelligentTieringStatus = { + Disabled: "Disabled", + Enabled: "Enabled", +}; +export const IntelligentTieringAccessTier = { + ARCHIVE_ACCESS: "ARCHIVE_ACCESS", + DEEP_ARCHIVE_ACCESS: "DEEP_ARCHIVE_ACCESS", +}; +export const InventoryFormat = { + CSV: "CSV", + ORC: "ORC", + Parquet: "Parquet", +}; +export const InventoryIncludedObjectVersions = { + All: "All", + Current: "Current", +}; +export const InventoryOptionalField = { + BucketKeyStatus: "BucketKeyStatus", + ChecksumAlgorithm: "ChecksumAlgorithm", + ETag: "ETag", + EncryptionStatus: "EncryptionStatus", + IntelligentTieringAccessTier: "IntelligentTieringAccessTier", + IsMultipartUploaded: "IsMultipartUploaded", + LastModifiedDate: "LastModifiedDate", + ObjectAccessControlList: "ObjectAccessControlList", + ObjectLockLegalHoldStatus: "ObjectLockLegalHoldStatus", + ObjectLockMode: "ObjectLockMode", + ObjectLockRetainUntilDate: "ObjectLockRetainUntilDate", + ObjectOwner: "ObjectOwner", + ReplicationStatus: "ReplicationStatus", + Size: "Size", + StorageClass: "StorageClass", +}; +export const InventoryFrequency = { + Daily: "Daily", + Weekly: "Weekly", +}; +export const TransitionStorageClass = { + DEEP_ARCHIVE: "DEEP_ARCHIVE", + GLACIER: "GLACIER", + GLACIER_IR: "GLACIER_IR", + INTELLIGENT_TIERING: "INTELLIGENT_TIERING", + ONEZONE_IA: "ONEZONE_IA", + STANDARD_IA: "STANDARD_IA", +}; +export const ExpirationStatus = { + Disabled: "Disabled", + Enabled: "Enabled", +}; +export const TransitionDefaultMinimumObjectSize = { + all_storage_classes_128K: "all_storage_classes_128K", + varies_by_storage_class: "varies_by_storage_class", +}; +export const BucketLogsPermission = { + FULL_CONTROL: "FULL_CONTROL", + READ: "READ", + WRITE: "WRITE", +}; +export const PartitionDateSource = { + DeliveryTime: "DeliveryTime", + EventTime: "EventTime", +}; +export var MetricsFilter; +(function (MetricsFilter) { + MetricsFilter.visit = (value, visitor) => { + if (value.Prefix !== undefined) + return visitor.Prefix(value.Prefix); + if (value.Tag !== undefined) + return visitor.Tag(value.Tag); + if (value.AccessPointArn !== undefined) + return visitor.AccessPointArn(value.AccessPointArn); + if (value.And !== undefined) + return visitor.And(value.And); + return visitor._(value.$unknown[0], value.$unknown[1]); + }; +})(MetricsFilter || (MetricsFilter = {})); +export const Event = { + s3_IntelligentTiering: "s3:IntelligentTiering", + s3_LifecycleExpiration_: "s3:LifecycleExpiration:*", + s3_LifecycleExpiration_Delete: "s3:LifecycleExpiration:Delete", + s3_LifecycleExpiration_DeleteMarkerCreated: "s3:LifecycleExpiration:DeleteMarkerCreated", + s3_LifecycleTransition: "s3:LifecycleTransition", + s3_ObjectAcl_Put: "s3:ObjectAcl:Put", + s3_ObjectCreated_: "s3:ObjectCreated:*", + s3_ObjectCreated_CompleteMultipartUpload: "s3:ObjectCreated:CompleteMultipartUpload", + s3_ObjectCreated_Copy: "s3:ObjectCreated:Copy", + s3_ObjectCreated_Post: "s3:ObjectCreated:Post", + s3_ObjectCreated_Put: "s3:ObjectCreated:Put", + s3_ObjectRemoved_: "s3:ObjectRemoved:*", + s3_ObjectRemoved_Delete: "s3:ObjectRemoved:Delete", + s3_ObjectRemoved_DeleteMarkerCreated: "s3:ObjectRemoved:DeleteMarkerCreated", + s3_ObjectRestore_: "s3:ObjectRestore:*", + s3_ObjectRestore_Completed: "s3:ObjectRestore:Completed", + s3_ObjectRestore_Delete: "s3:ObjectRestore:Delete", + s3_ObjectRestore_Post: "s3:ObjectRestore:Post", + s3_ObjectTagging_: "s3:ObjectTagging:*", + s3_ObjectTagging_Delete: "s3:ObjectTagging:Delete", + s3_ObjectTagging_Put: "s3:ObjectTagging:Put", + s3_ReducedRedundancyLostObject: "s3:ReducedRedundancyLostObject", + s3_Replication_: "s3:Replication:*", + s3_Replication_OperationFailedReplication: "s3:Replication:OperationFailedReplication", + s3_Replication_OperationMissedThreshold: "s3:Replication:OperationMissedThreshold", + s3_Replication_OperationNotTracked: "s3:Replication:OperationNotTracked", + s3_Replication_OperationReplicatedAfterThreshold: "s3:Replication:OperationReplicatedAfterThreshold", +}; +export const FilterRuleName = { + prefix: "prefix", + suffix: "suffix", +}; +export const DeleteMarkerReplicationStatus = { + Disabled: "Disabled", + Enabled: "Enabled", +}; +export const MetricsStatus = { + Disabled: "Disabled", + Enabled: "Enabled", +}; +export const ReplicationTimeStatus = { + Disabled: "Disabled", + Enabled: "Enabled", +}; +export const ExistingObjectReplicationStatus = { + Disabled: "Disabled", + Enabled: "Enabled", +}; +export const ReplicaModificationsStatus = { + Disabled: "Disabled", + Enabled: "Enabled", +}; +export const SseKmsEncryptedObjectsStatus = { + Disabled: "Disabled", + Enabled: "Enabled", +}; +export const ReplicationRuleStatus = { + Disabled: "Disabled", + Enabled: "Enabled", +}; +export const Payer = { + BucketOwner: "BucketOwner", + Requester: "Requester", +}; +export const MFADeleteStatus = { + Disabled: "Disabled", + Enabled: "Enabled", +}; +export const BucketVersioningStatus = { + Enabled: "Enabled", + Suspended: "Suspended", +}; +export const Protocol = { + http: "http", + https: "https", +}; +export const ReplicationStatus = { + COMPLETE: "COMPLETE", + COMPLETED: "COMPLETED", + FAILED: "FAILED", + PENDING: "PENDING", + REPLICA: "REPLICA", +}; +export const ChecksumMode = { + ENABLED: "ENABLED", +}; +export class InvalidObjectState extends __BaseException { + name = "InvalidObjectState"; + $fault = "client"; + StorageClass; + AccessTier; + constructor(opts) { + super({ + name: "InvalidObjectState", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidObjectState.prototype); + this.StorageClass = opts.StorageClass; + this.AccessTier = opts.AccessTier; + } +} +export class NoSuchKey extends __BaseException { + name = "NoSuchKey"; + $fault = "client"; + constructor(opts) { + super({ + name: "NoSuchKey", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, NoSuchKey.prototype); + } +} +export const ObjectAttributes = { + CHECKSUM: "Checksum", + ETAG: "ETag", + OBJECT_PARTS: "ObjectParts", + OBJECT_SIZE: "ObjectSize", + STORAGE_CLASS: "StorageClass", +}; +export const ObjectLockEnabled = { + Enabled: "Enabled", +}; +export const ObjectLockRetentionMode = { + COMPLIANCE: "COMPLIANCE", + GOVERNANCE: "GOVERNANCE", +}; +export class NotFound extends __BaseException { + name = "NotFound"; + $fault = "client"; + constructor(opts) { + super({ + name: "NotFound", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, NotFound.prototype); + } +} +export const ArchiveStatus = { + ARCHIVE_ACCESS: "ARCHIVE_ACCESS", + DEEP_ARCHIVE_ACCESS: "DEEP_ARCHIVE_ACCESS", +}; +export const EncodingType = { + url: "url", +}; +export const ObjectStorageClass = { + DEEP_ARCHIVE: "DEEP_ARCHIVE", + EXPRESS_ONEZONE: "EXPRESS_ONEZONE", + GLACIER: "GLACIER", + GLACIER_IR: "GLACIER_IR", + INTELLIGENT_TIERING: "INTELLIGENT_TIERING", + ONEZONE_IA: "ONEZONE_IA", + OUTPOSTS: "OUTPOSTS", + REDUCED_REDUNDANCY: "REDUCED_REDUNDANCY", + SNOW: "SNOW", + STANDARD: "STANDARD", + STANDARD_IA: "STANDARD_IA", +}; +export const OptionalObjectAttributes = { + RESTORE_STATUS: "RestoreStatus", +}; +export const ObjectVersionStorageClass = { + STANDARD: "STANDARD", +}; +export const CompleteMultipartUploadOutputFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSEKMSKeyId && { SSEKMSKeyId: SENSITIVE_STRING }), +}); +export const CompleteMultipartUploadRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSECustomerKey && { SSECustomerKey: SENSITIVE_STRING }), +}); +export const CopyObjectOutputFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSEKMSKeyId && { SSEKMSKeyId: SENSITIVE_STRING }), + ...(obj.SSEKMSEncryptionContext && { SSEKMSEncryptionContext: SENSITIVE_STRING }), +}); +export const CopyObjectRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSECustomerKey && { SSECustomerKey: SENSITIVE_STRING }), + ...(obj.SSEKMSKeyId && { SSEKMSKeyId: SENSITIVE_STRING }), + ...(obj.SSEKMSEncryptionContext && { SSEKMSEncryptionContext: SENSITIVE_STRING }), + ...(obj.CopySourceSSECustomerKey && { CopySourceSSECustomerKey: SENSITIVE_STRING }), +}); +export const CreateMultipartUploadOutputFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSEKMSKeyId && { SSEKMSKeyId: SENSITIVE_STRING }), + ...(obj.SSEKMSEncryptionContext && { SSEKMSEncryptionContext: SENSITIVE_STRING }), +}); +export const CreateMultipartUploadRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSECustomerKey && { SSECustomerKey: SENSITIVE_STRING }), + ...(obj.SSEKMSKeyId && { SSEKMSKeyId: SENSITIVE_STRING }), + ...(obj.SSEKMSEncryptionContext && { SSEKMSEncryptionContext: SENSITIVE_STRING }), +}); +export const SessionCredentialsFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SecretAccessKey && { SecretAccessKey: SENSITIVE_STRING }), + ...(obj.SessionToken && { SessionToken: SENSITIVE_STRING }), +}); +export const CreateSessionOutputFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSEKMSKeyId && { SSEKMSKeyId: SENSITIVE_STRING }), + ...(obj.SSEKMSEncryptionContext && { SSEKMSEncryptionContext: SENSITIVE_STRING }), + ...(obj.Credentials && { Credentials: SessionCredentialsFilterSensitiveLog(obj.Credentials) }), +}); +export const CreateSessionRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSEKMSKeyId && { SSEKMSKeyId: SENSITIVE_STRING }), + ...(obj.SSEKMSEncryptionContext && { SSEKMSEncryptionContext: SENSITIVE_STRING }), +}); +export const ServerSideEncryptionByDefaultFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.KMSMasterKeyID && { KMSMasterKeyID: SENSITIVE_STRING }), +}); +export const ServerSideEncryptionRuleFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.ApplyServerSideEncryptionByDefault && { + ApplyServerSideEncryptionByDefault: ServerSideEncryptionByDefaultFilterSensitiveLog(obj.ApplyServerSideEncryptionByDefault), + }), +}); +export const ServerSideEncryptionConfigurationFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Rules && { Rules: obj.Rules.map((item) => ServerSideEncryptionRuleFilterSensitiveLog(item)) }), +}); +export const GetBucketEncryptionOutputFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.ServerSideEncryptionConfiguration && { + ServerSideEncryptionConfiguration: ServerSideEncryptionConfigurationFilterSensitiveLog(obj.ServerSideEncryptionConfiguration), + }), +}); +export const SSEKMSFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.KeyId && { KeyId: SENSITIVE_STRING }), +}); +export const InventoryEncryptionFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSEKMS && { SSEKMS: SSEKMSFilterSensitiveLog(obj.SSEKMS) }), +}); +export const InventoryS3BucketDestinationFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Encryption && { Encryption: InventoryEncryptionFilterSensitiveLog(obj.Encryption) }), +}); +export const InventoryDestinationFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.S3BucketDestination && { + S3BucketDestination: InventoryS3BucketDestinationFilterSensitiveLog(obj.S3BucketDestination), + }), +}); +export const InventoryConfigurationFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Destination && { Destination: InventoryDestinationFilterSensitiveLog(obj.Destination) }), +}); +export const GetBucketInventoryConfigurationOutputFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.InventoryConfiguration && { + InventoryConfiguration: InventoryConfigurationFilterSensitiveLog(obj.InventoryConfiguration), + }), +}); +export const GetObjectOutputFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSEKMSKeyId && { SSEKMSKeyId: SENSITIVE_STRING }), +}); +export const GetObjectRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSECustomerKey && { SSECustomerKey: SENSITIVE_STRING }), +}); +export const GetObjectAttributesRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSECustomerKey && { SSECustomerKey: SENSITIVE_STRING }), +}); +export const GetObjectTorrentOutputFilterSensitiveLog = (obj) => ({ + ...obj, +}); +export const HeadObjectOutputFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSEKMSKeyId && { SSEKMSKeyId: SENSITIVE_STRING }), +}); +export const HeadObjectRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSECustomerKey && { SSECustomerKey: SENSITIVE_STRING }), +}); +export const ListBucketInventoryConfigurationsOutputFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.InventoryConfigurationList && { + InventoryConfigurationList: obj.InventoryConfigurationList.map((item) => InventoryConfigurationFilterSensitiveLog(item)), + }), +}); +export const ListPartsRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSECustomerKey && { SSECustomerKey: SENSITIVE_STRING }), +}); diff --git a/node_modules/@aws-sdk/client-s3/dist-es/models/models_1.js b/node_modules/@aws-sdk/client-s3/dist-es/models/models_1.js new file mode 100644 index 00000000..c374364c --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/models/models_1.js @@ -0,0 +1,198 @@ +import { SENSITIVE_STRING } from "@smithy/smithy-client"; +import { InventoryConfigurationFilterSensitiveLog, ServerSideEncryptionConfigurationFilterSensitiveLog, } from "./models_0"; +import { S3ServiceException as __BaseException } from "./S3ServiceException"; +export const MFADelete = { + Disabled: "Disabled", + Enabled: "Enabled", +}; +export class EncryptionTypeMismatch extends __BaseException { + name = "EncryptionTypeMismatch"; + $fault = "client"; + constructor(opts) { + super({ + name: "EncryptionTypeMismatch", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, EncryptionTypeMismatch.prototype); + } +} +export class InvalidRequest extends __BaseException { + name = "InvalidRequest"; + $fault = "client"; + constructor(opts) { + super({ + name: "InvalidRequest", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidRequest.prototype); + } +} +export class InvalidWriteOffset extends __BaseException { + name = "InvalidWriteOffset"; + $fault = "client"; + constructor(opts) { + super({ + name: "InvalidWriteOffset", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidWriteOffset.prototype); + } +} +export class TooManyParts extends __BaseException { + name = "TooManyParts"; + $fault = "client"; + constructor(opts) { + super({ + name: "TooManyParts", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TooManyParts.prototype); + } +} +export class ObjectAlreadyInActiveTierError extends __BaseException { + name = "ObjectAlreadyInActiveTierError"; + $fault = "client"; + constructor(opts) { + super({ + name: "ObjectAlreadyInActiveTierError", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ObjectAlreadyInActiveTierError.prototype); + } +} +export const Tier = { + Bulk: "Bulk", + Expedited: "Expedited", + Standard: "Standard", +}; +export const ExpressionType = { + SQL: "SQL", +}; +export const CompressionType = { + BZIP2: "BZIP2", + GZIP: "GZIP", + NONE: "NONE", +}; +export const FileHeaderInfo = { + IGNORE: "IGNORE", + NONE: "NONE", + USE: "USE", +}; +export const JSONType = { + DOCUMENT: "DOCUMENT", + LINES: "LINES", +}; +export const QuoteFields = { + ALWAYS: "ALWAYS", + ASNEEDED: "ASNEEDED", +}; +export const RestoreRequestType = { + SELECT: "SELECT", +}; +export var SelectObjectContentEventStream; +(function (SelectObjectContentEventStream) { + SelectObjectContentEventStream.visit = (value, visitor) => { + if (value.Records !== undefined) + return visitor.Records(value.Records); + if (value.Stats !== undefined) + return visitor.Stats(value.Stats); + if (value.Progress !== undefined) + return visitor.Progress(value.Progress); + if (value.Cont !== undefined) + return visitor.Cont(value.Cont); + if (value.End !== undefined) + return visitor.End(value.End); + return visitor._(value.$unknown[0], value.$unknown[1]); + }; +})(SelectObjectContentEventStream || (SelectObjectContentEventStream = {})); +export const PutBucketEncryptionRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.ServerSideEncryptionConfiguration && { + ServerSideEncryptionConfiguration: ServerSideEncryptionConfigurationFilterSensitiveLog(obj.ServerSideEncryptionConfiguration), + }), +}); +export const PutBucketInventoryConfigurationRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.InventoryConfiguration && { + InventoryConfiguration: InventoryConfigurationFilterSensitiveLog(obj.InventoryConfiguration), + }), +}); +export const PutObjectOutputFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSEKMSKeyId && { SSEKMSKeyId: SENSITIVE_STRING }), + ...(obj.SSEKMSEncryptionContext && { SSEKMSEncryptionContext: SENSITIVE_STRING }), +}); +export const PutObjectRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSECustomerKey && { SSECustomerKey: SENSITIVE_STRING }), + ...(obj.SSEKMSKeyId && { SSEKMSKeyId: SENSITIVE_STRING }), + ...(obj.SSEKMSEncryptionContext && { SSEKMSEncryptionContext: SENSITIVE_STRING }), +}); +export const EncryptionFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.KMSKeyId && { KMSKeyId: SENSITIVE_STRING }), +}); +export const S3LocationFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Encryption && { Encryption: EncryptionFilterSensitiveLog(obj.Encryption) }), +}); +export const OutputLocationFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.S3 && { S3: S3LocationFilterSensitiveLog(obj.S3) }), +}); +export const RestoreRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.OutputLocation && { OutputLocation: OutputLocationFilterSensitiveLog(obj.OutputLocation) }), +}); +export const RestoreObjectRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.RestoreRequest && { RestoreRequest: RestoreRequestFilterSensitiveLog(obj.RestoreRequest) }), +}); +export const SelectObjectContentEventStreamFilterSensitiveLog = (obj) => { + if (obj.Records !== undefined) + return { Records: obj.Records }; + if (obj.Stats !== undefined) + return { Stats: obj.Stats }; + if (obj.Progress !== undefined) + return { Progress: obj.Progress }; + if (obj.Cont !== undefined) + return { Cont: obj.Cont }; + if (obj.End !== undefined) + return { End: obj.End }; + if (obj.$unknown !== undefined) + return { [obj.$unknown[0]]: "UNKNOWN" }; +}; +export const SelectObjectContentOutputFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Payload && { Payload: "STREAMING_CONTENT" }), +}); +export const SelectObjectContentRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSECustomerKey && { SSECustomerKey: SENSITIVE_STRING }), +}); +export const UploadPartOutputFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSEKMSKeyId && { SSEKMSKeyId: SENSITIVE_STRING }), +}); +export const UploadPartRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSECustomerKey && { SSECustomerKey: SENSITIVE_STRING }), +}); +export const UploadPartCopyOutputFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSEKMSKeyId && { SSEKMSKeyId: SENSITIVE_STRING }), +}); +export const UploadPartCopyRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSECustomerKey && { SSECustomerKey: SENSITIVE_STRING }), + ...(obj.CopySourceSSECustomerKey && { CopySourceSSECustomerKey: SENSITIVE_STRING }), +}); +export const WriteGetObjectResponseRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SSEKMSKeyId && { SSEKMSKeyId: SENSITIVE_STRING }), +}); diff --git a/node_modules/@aws-sdk/client-s3/dist-es/pagination/Interfaces.js b/node_modules/@aws-sdk/client-s3/dist-es/pagination/Interfaces.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/pagination/Interfaces.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/pagination/ListBucketsPaginator.js b/node_modules/@aws-sdk/client-s3/dist-es/pagination/ListBucketsPaginator.js new file mode 100644 index 00000000..83d33059 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/pagination/ListBucketsPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListBucketsCommand } from "../commands/ListBucketsCommand"; +import { S3Client } from "../S3Client"; +export const paginateListBuckets = createPaginator(S3Client, ListBucketsCommand, "ContinuationToken", "ContinuationToken", "MaxBuckets"); diff --git a/node_modules/@aws-sdk/client-s3/dist-es/pagination/ListDirectoryBucketsPaginator.js b/node_modules/@aws-sdk/client-s3/dist-es/pagination/ListDirectoryBucketsPaginator.js new file mode 100644 index 00000000..e01aa45f --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/pagination/ListDirectoryBucketsPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListDirectoryBucketsCommand, } from "../commands/ListDirectoryBucketsCommand"; +import { S3Client } from "../S3Client"; +export const paginateListDirectoryBuckets = createPaginator(S3Client, ListDirectoryBucketsCommand, "ContinuationToken", "ContinuationToken", "MaxDirectoryBuckets"); diff --git a/node_modules/@aws-sdk/client-s3/dist-es/pagination/ListObjectsV2Paginator.js b/node_modules/@aws-sdk/client-s3/dist-es/pagination/ListObjectsV2Paginator.js new file mode 100644 index 00000000..dfabccc4 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/pagination/ListObjectsV2Paginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListObjectsV2Command, } from "../commands/ListObjectsV2Command"; +import { S3Client } from "../S3Client"; +export const paginateListObjectsV2 = createPaginator(S3Client, ListObjectsV2Command, "ContinuationToken", "NextContinuationToken", "MaxKeys"); diff --git a/node_modules/@aws-sdk/client-s3/dist-es/pagination/ListPartsPaginator.js b/node_modules/@aws-sdk/client-s3/dist-es/pagination/ListPartsPaginator.js new file mode 100644 index 00000000..0c1e60af --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/pagination/ListPartsPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListPartsCommand } from "../commands/ListPartsCommand"; +import { S3Client } from "../S3Client"; +export const paginateListParts = createPaginator(S3Client, ListPartsCommand, "PartNumberMarker", "NextPartNumberMarker", "MaxParts"); diff --git a/node_modules/@aws-sdk/client-s3/dist-es/pagination/index.js b/node_modules/@aws-sdk/client-s3/dist-es/pagination/index.js new file mode 100644 index 00000000..9438ebe6 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/pagination/index.js @@ -0,0 +1,5 @@ +export * from "./Interfaces"; +export * from "./ListBucketsPaginator"; +export * from "./ListDirectoryBucketsPaginator"; +export * from "./ListObjectsV2Paginator"; +export * from "./ListPartsPaginator"; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/protocols/Aws_restXml.js b/node_modules/@aws-sdk/client-s3/dist-es/protocols/Aws_restXml.js new file mode 100644 index 00000000..06e0f70a --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/protocols/Aws_restXml.js @@ -0,0 +1,7698 @@ +import { loadRestXmlErrorCode, parseXmlBody as parseBody, parseXmlErrorBody as parseErrorBody } from "@aws-sdk/core"; +import { XmlNode as __XmlNode, XmlText as __XmlText } from "@aws-sdk/xml-builder"; +import { requestBuilder as rb } from "@smithy/core"; +import { isValidHostname as __isValidHostname, } from "@smithy/protocol-http"; +import { collectBody, dateToUtcString as __dateToUtcString, decorateServiceException as __decorateServiceException, expectNonNull as __expectNonNull, expectObject as __expectObject, expectString as __expectString, expectUnion as __expectUnion, getArrayIfSingleItem as __getArrayIfSingleItem, isSerializableHeaderValue, map, parseBoolean as __parseBoolean, parseRfc3339DateTimeWithOffset as __parseRfc3339DateTimeWithOffset, parseRfc7231DateTime as __parseRfc7231DateTime, quoteHeader as __quoteHeader, serializeDateTime as __serializeDateTime, strictParseInt32 as __strictParseInt32, strictParseLong as __strictParseLong, withBaseException, } from "@smithy/smithy-client"; +import { AnalyticsFilter, BucketAlreadyExists, BucketAlreadyOwnedByYou, InvalidObjectState, MetricsFilter, NoSuchBucket, NoSuchKey, NoSuchUpload, NotFound, ObjectNotInActiveTierError, } from "../models/models_0"; +import { EncryptionTypeMismatch, InvalidRequest, InvalidWriteOffset, ObjectAlreadyInActiveTierError, TooManyParts, } from "../models/models_1"; +import { S3ServiceException as __BaseException } from "../models/S3ServiceException"; +export const se_AbortMultipartUploadCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + [_xaimit]: [() => isSerializableHeaderValue(input[_IMIT]), () => __dateToUtcString(input[_IMIT]).toString()], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_xi]: [, "AbortMultipartUpload"], + [_uI]: [, __expectNonNull(input[_UI], `UploadId`)], + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}; +export const se_CompleteMultipartUploadCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_xacc]: input[_CCRC], + [_xacc_]: input[_CCRCC], + [_xacc__]: input[_CCRCNVME], + [_xacs]: input[_CSHA], + [_xacs_]: input[_CSHAh], + [_xact]: input[_CT], + [_xamos]: [() => isSerializableHeaderValue(input[_MOS]), () => input[_MOS].toString()], + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + [_im]: input[_IM], + [_inm]: input[_INM], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_uI]: [, __expectNonNull(input[_UI], `UploadId`)], + }); + let body; + let contents; + if (input.MultipartUpload !== undefined) { + contents = se_CompletedMultipartUpload(input.MultipartUpload, context); + contents = contents.n("CompleteMultipartUpload"); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("POST").h(headers).q(query).b(body); + return b.build(); +}; +export const se_CopyObjectCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + ...(input.Metadata !== undefined && + Object.keys(input.Metadata).reduce((acc, suffix) => { + acc[`x-amz-meta-${suffix.toLowerCase()}`] = input.Metadata[suffix]; + return acc; + }, {})), + [_xaa]: input[_ACL], + [_cc]: input[_CC], + [_xaca]: input[_CA], + [_cd]: input[_CD], + [_ce]: input[_CE], + [_cl]: input[_CL], + [_ct]: input[_CTo], + [_xacs__]: input[_CS], + [_xacsim]: input[_CSIM], + [_xacsims]: [() => isSerializableHeaderValue(input[_CSIMS]), () => __dateToUtcString(input[_CSIMS]).toString()], + [_xacsinm]: input[_CSINM], + [_xacsius]: [() => isSerializableHeaderValue(input[_CSIUS]), () => __dateToUtcString(input[_CSIUS]).toString()], + [_e]: [() => isSerializableHeaderValue(input[_E]), () => __dateToUtcString(input[_E]).toString()], + [_xagfc]: input[_GFC], + [_xagr]: input[_GR], + [_xagra]: input[_GRACP], + [_xagwa]: input[_GWACP], + [_xamd]: input[_MD], + [_xatd]: input[_TD], + [_xasse]: input[_SSE], + [_xasc]: input[_SC], + [_xawrl]: input[_WRL], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + [_xasseakki]: input[_SSEKMSKI], + [_xassec]: input[_SSEKMSEC], + [_xassebke]: [() => isSerializableHeaderValue(input[_BKE]), () => input[_BKE].toString()], + [_xacssseca]: input[_CSSSECA], + [_xacssseck]: input[_CSSSECK], + [_xacssseckm]: input[_CSSSECKMD], + [_xarp]: input[_RP], + [_xat]: input[_T], + [_xaolm]: input[_OLM], + [_xaolrud]: [() => isSerializableHeaderValue(input[_OLRUD]), () => __serializeDateTime(input[_OLRUD]).toString()], + [_xaollh]: input[_OLLHS], + [_xaebo]: input[_EBO], + [_xasebo]: input[_ESBO], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_xi]: [, "CopyObject"], + }); + let body; + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_CreateBucketCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_xaa]: input[_ACL], + [_xagfc]: input[_GFC], + [_xagr]: input[_GR], + [_xagra]: input[_GRACP], + [_xagw]: input[_GW], + [_xagwa]: input[_GWACP], + [_xabole]: [() => isSerializableHeaderValue(input[_OLEFB]), () => input[_OLEFB].toString()], + [_xaoo]: input[_OO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + let body; + let contents; + if (input.CreateBucketConfiguration !== undefined) { + contents = se_CreateBucketConfiguration(input.CreateBucketConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).b(body); + return b.build(); +}; +export const se_CreateBucketMetadataTableConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_mT]: [, ""], + }); + let body; + let contents; + if (input.MetadataTableConfiguration !== undefined) { + contents = se_MetadataTableConfiguration(input.MetadataTableConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("POST").h(headers).q(query).b(body); + return b.build(); +}; +export const se_CreateMultipartUploadCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + ...(input.Metadata !== undefined && + Object.keys(input.Metadata).reduce((acc, suffix) => { + acc[`x-amz-meta-${suffix.toLowerCase()}`] = input.Metadata[suffix]; + return acc; + }, {})), + [_xaa]: input[_ACL], + [_cc]: input[_CC], + [_cd]: input[_CD], + [_ce]: input[_CE], + [_cl]: input[_CL], + [_ct]: input[_CTo], + [_e]: [() => isSerializableHeaderValue(input[_E]), () => __dateToUtcString(input[_E]).toString()], + [_xagfc]: input[_GFC], + [_xagr]: input[_GR], + [_xagra]: input[_GRACP], + [_xagwa]: input[_GWACP], + [_xasse]: input[_SSE], + [_xasc]: input[_SC], + [_xawrl]: input[_WRL], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + [_xasseakki]: input[_SSEKMSKI], + [_xassec]: input[_SSEKMSEC], + [_xassebke]: [() => isSerializableHeaderValue(input[_BKE]), () => input[_BKE].toString()], + [_xarp]: input[_RP], + [_xat]: input[_T], + [_xaolm]: input[_OLM], + [_xaolrud]: [() => isSerializableHeaderValue(input[_OLRUD]), () => __serializeDateTime(input[_OLRUD]).toString()], + [_xaollh]: input[_OLLHS], + [_xaebo]: input[_EBO], + [_xaca]: input[_CA], + [_xact]: input[_CT], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_u]: [, ""], + }); + let body; + b.m("POST").h(headers).q(query).b(body); + return b.build(); +}; +export const se_CreateSessionCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xacsm]: input[_SM], + [_xasse]: input[_SSE], + [_xasseakki]: input[_SSEKMSKI], + [_xassec]: input[_SSEKMSEC], + [_xassebke]: [() => isSerializableHeaderValue(input[_BKE]), () => input[_BKE].toString()], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_s]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_DeleteBucketCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + let body; + b.m("DELETE").h(headers).b(body); + return b.build(); +}; +export const se_DeleteBucketAnalyticsConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_a]: [, ""], + [_i]: [, __expectNonNull(input[_I], `Id`)], + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}; +export const se_DeleteBucketCorsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_c]: [, ""], + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}; +export const se_DeleteBucketEncryptionCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_en]: [, ""], + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}; +export const se_DeleteBucketIntelligentTieringConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = {}; + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_it]: [, ""], + [_i]: [, __expectNonNull(input[_I], `Id`)], + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}; +export const se_DeleteBucketInventoryConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_in]: [, ""], + [_i]: [, __expectNonNull(input[_I], `Id`)], + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}; +export const se_DeleteBucketLifecycleCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_l]: [, ""], + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}; +export const se_DeleteBucketMetadataTableConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_mT]: [, ""], + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}; +export const se_DeleteBucketMetricsConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_m]: [, ""], + [_i]: [, __expectNonNull(input[_I], `Id`)], + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}; +export const se_DeleteBucketOwnershipControlsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_oC]: [, ""], + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}; +export const se_DeleteBucketPolicyCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_p]: [, ""], + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}; +export const se_DeleteBucketReplicationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_r]: [, ""], + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}; +export const se_DeleteBucketTaggingCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_t]: [, ""], + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}; +export const se_DeleteBucketWebsiteCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_w]: [, ""], + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}; +export const se_DeleteObjectCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xam]: input[_MFA], + [_xarp]: input[_RP], + [_xabgr]: [() => isSerializableHeaderValue(input[_BGR]), () => input[_BGR].toString()], + [_xaebo]: input[_EBO], + [_im]: input[_IM], + [_xaimlmt]: [() => isSerializableHeaderValue(input[_IMLMT]), () => __dateToUtcString(input[_IMLMT]).toString()], + [_xaims]: [() => isSerializableHeaderValue(input[_IMS]), () => input[_IMS].toString()], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_xi]: [, "DeleteObject"], + [_vI]: [, input[_VI]], + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}; +export const se_DeleteObjectsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_xam]: input[_MFA], + [_xarp]: input[_RP], + [_xabgr]: [() => isSerializableHeaderValue(input[_BGR]), () => input[_BGR].toString()], + [_xaebo]: input[_EBO], + [_xasca]: input[_CA], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_d]: [, ""], + }); + let body; + let contents; + if (input.Delete !== undefined) { + contents = se_Delete(input.Delete, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("POST").h(headers).q(query).b(body); + return b.build(); +}; +export const se_DeleteObjectTaggingCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_t]: [, ""], + [_vI]: [, input[_VI]], + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}; +export const se_DeletePublicAccessBlockCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_pAB]: [, ""], + }); + let body; + b.m("DELETE").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketAccelerateConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + [_xarp]: input[_RP], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_ac]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketAclCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_acl]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketAnalyticsConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_a]: [, ""], + [_xi]: [, "GetBucketAnalyticsConfiguration"], + [_i]: [, __expectNonNull(input[_I], `Id`)], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketCorsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_c]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketEncryptionCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_en]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketIntelligentTieringConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = {}; + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_it]: [, ""], + [_xi]: [, "GetBucketIntelligentTieringConfiguration"], + [_i]: [, __expectNonNull(input[_I], `Id`)], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketInventoryConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_in]: [, ""], + [_xi]: [, "GetBucketInventoryConfiguration"], + [_i]: [, __expectNonNull(input[_I], `Id`)], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketLifecycleConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_l]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketLocationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_lo]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketLoggingCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_log]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketMetadataTableConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_mT]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketMetricsConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_m]: [, ""], + [_xi]: [, "GetBucketMetricsConfiguration"], + [_i]: [, __expectNonNull(input[_I], `Id`)], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketNotificationConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_n]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketOwnershipControlsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_oC]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketPolicyCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_p]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketPolicyStatusCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_pS]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketReplicationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_r]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketRequestPaymentCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_rP]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketTaggingCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_t]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketVersioningCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_v]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetBucketWebsiteCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_w]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetObjectCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_im]: input[_IM], + [_ims]: [() => isSerializableHeaderValue(input[_IMSf]), () => __dateToUtcString(input[_IMSf]).toString()], + [_inm]: input[_INM], + [_ius]: [() => isSerializableHeaderValue(input[_IUS]), () => __dateToUtcString(input[_IUS]).toString()], + [_ra]: input[_R], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + [_xacm]: input[_CM], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_xi]: [, "GetObject"], + [_rcc]: [, input[_RCC]], + [_rcd]: [, input[_RCD]], + [_rce]: [, input[_RCE]], + [_rcl]: [, input[_RCL]], + [_rct]: [, input[_RCT]], + [_re]: [() => input.ResponseExpires !== void 0, () => __dateToUtcString(input[_RE]).toString()], + [_vI]: [, input[_VI]], + [_pN]: [() => input.PartNumber !== void 0, () => input[_PN].toString()], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetObjectAclCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_acl]: [, ""], + [_vI]: [, input[_VI]], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetObjectAttributesCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xamp]: [() => isSerializableHeaderValue(input[_MP]), () => input[_MP].toString()], + [_xapnm]: input[_PNM], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + [_xaoa]: [() => isSerializableHeaderValue(input[_OA]), () => (input[_OA] || []).map(__quoteHeader).join(", ")], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_at]: [, ""], + [_vI]: [, input[_VI]], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetObjectLegalHoldCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_lh]: [, ""], + [_vI]: [, input[_VI]], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetObjectLockConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_ol]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetObjectRetentionCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_ret]: [, ""], + [_vI]: [, input[_VI]], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetObjectTaggingCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + [_xarp]: input[_RP], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_t]: [, ""], + [_vI]: [, input[_VI]], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetObjectTorrentCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_to]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_GetPublicAccessBlockCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_pAB]: [, ""], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_HeadBucketCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + let body; + b.m("HEAD").h(headers).b(body); + return b.build(); +}; +export const se_HeadObjectCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_im]: input[_IM], + [_ims]: [() => isSerializableHeaderValue(input[_IMSf]), () => __dateToUtcString(input[_IMSf]).toString()], + [_inm]: input[_INM], + [_ius]: [() => isSerializableHeaderValue(input[_IUS]), () => __dateToUtcString(input[_IUS]).toString()], + [_ra]: input[_R], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + [_xacm]: input[_CM], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_rcc]: [, input[_RCC]], + [_rcd]: [, input[_RCD]], + [_rce]: [, input[_RCE]], + [_rcl]: [, input[_RCL]], + [_rct]: [, input[_RCT]], + [_re]: [() => input.ResponseExpires !== void 0, () => __dateToUtcString(input[_RE]).toString()], + [_vI]: [, input[_VI]], + [_pN]: [() => input.PartNumber !== void 0, () => input[_PN].toString()], + }); + let body; + b.m("HEAD").h(headers).q(query).b(body); + return b.build(); +}; +export const se_ListBucketAnalyticsConfigurationsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_a]: [, ""], + [_xi]: [, "ListBucketAnalyticsConfigurations"], + [_ct_]: [, input[_CTon]], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_ListBucketIntelligentTieringConfigurationsCommand = async (input, context) => { + const b = rb(input, context); + const headers = {}; + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_it]: [, ""], + [_xi]: [, "ListBucketIntelligentTieringConfigurations"], + [_ct_]: [, input[_CTon]], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_ListBucketInventoryConfigurationsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_in]: [, ""], + [_xi]: [, "ListBucketInventoryConfigurations"], + [_ct_]: [, input[_CTon]], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_ListBucketMetricsConfigurationsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_m]: [, ""], + [_xi]: [, "ListBucketMetricsConfigurations"], + [_ct_]: [, input[_CTon]], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_ListBucketsCommand = async (input, context) => { + const b = rb(input, context); + const headers = {}; + b.bp("/"); + const query = map({ + [_xi]: [, "ListBuckets"], + [_mb]: [() => input.MaxBuckets !== void 0, () => input[_MB].toString()], + [_ct_]: [, input[_CTon]], + [_pr]: [, input[_P]], + [_br]: [, input[_BR]], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_ListDirectoryBucketsCommand = async (input, context) => { + const b = rb(input, context); + const headers = {}; + b.bp("/"); + const query = map({ + [_xi]: [, "ListDirectoryBuckets"], + [_ct_]: [, input[_CTon]], + [_mdb]: [() => input.MaxDirectoryBuckets !== void 0, () => input[_MDB].toString()], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_ListMultipartUploadsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + [_xarp]: input[_RP], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_u]: [, ""], + [_de]: [, input[_D]], + [_et]: [, input[_ET]], + [_km]: [, input[_KM]], + [_mu]: [() => input.MaxUploads !== void 0, () => input[_MU].toString()], + [_pr]: [, input[_P]], + [_uim]: [, input[_UIM]], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_ListObjectsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + [_xaooa]: [() => isSerializableHeaderValue(input[_OOA]), () => (input[_OOA] || []).map(__quoteHeader).join(", ")], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_de]: [, input[_D]], + [_et]: [, input[_ET]], + [_ma]: [, input[_M]], + [_mk]: [() => input.MaxKeys !== void 0, () => input[_MK].toString()], + [_pr]: [, input[_P]], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_ListObjectsV2Command = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + [_xaooa]: [() => isSerializableHeaderValue(input[_OOA]), () => (input[_OOA] || []).map(__quoteHeader).join(", ")], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_lt]: [, "2"], + [_de]: [, input[_D]], + [_et]: [, input[_ET]], + [_mk]: [() => input.MaxKeys !== void 0, () => input[_MK].toString()], + [_pr]: [, input[_P]], + [_ct_]: [, input[_CTon]], + [_fo]: [() => input.FetchOwner !== void 0, () => input[_FO].toString()], + [_sa]: [, input[_SA]], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_ListObjectVersionsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xaebo]: input[_EBO], + [_xarp]: input[_RP], + [_xaooa]: [() => isSerializableHeaderValue(input[_OOA]), () => (input[_OOA] || []).map(__quoteHeader).join(", ")], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_ver]: [, ""], + [_de]: [, input[_D]], + [_et]: [, input[_ET]], + [_km]: [, input[_KM]], + [_mk]: [() => input.MaxKeys !== void 0, () => input[_MK].toString()], + [_pr]: [, input[_P]], + [_vim]: [, input[_VIM]], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_ListPartsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_xi]: [, "ListParts"], + [_mp]: [() => input.MaxParts !== void 0, () => input[_MP].toString()], + [_pnm]: [, input[_PNM]], + [_uI]: [, __expectNonNull(input[_UI], `UploadId`)], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutBucketAccelerateConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_xaebo]: input[_EBO], + [_xasca]: input[_CA], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_ac]: [, ""], + }); + let body; + let contents; + if (input.AccelerateConfiguration !== undefined) { + contents = se_AccelerateConfiguration(input.AccelerateConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutBucketAclCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_xaa]: input[_ACL], + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xagfc]: input[_GFC], + [_xagr]: input[_GR], + [_xagra]: input[_GRACP], + [_xagw]: input[_GW], + [_xagwa]: input[_GWACP], + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_acl]: [, ""], + }); + let body; + let contents; + if (input.AccessControlPolicy !== undefined) { + contents = se_AccessControlPolicy(input.AccessControlPolicy, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutBucketAnalyticsConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_a]: [, ""], + [_i]: [, __expectNonNull(input[_I], `Id`)], + }); + let body; + let contents; + if (input.AnalyticsConfiguration !== undefined) { + contents = se_AnalyticsConfiguration(input.AnalyticsConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutBucketCorsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_c]: [, ""], + }); + let body; + let contents; + if (input.CORSConfiguration !== undefined) { + contents = se_CORSConfiguration(input.CORSConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutBucketEncryptionCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_en]: [, ""], + }); + let body; + let contents; + if (input.ServerSideEncryptionConfiguration !== undefined) { + contents = se_ServerSideEncryptionConfiguration(input.ServerSideEncryptionConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutBucketIntelligentTieringConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = { + "content-type": "application/xml", + }; + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_it]: [, ""], + [_i]: [, __expectNonNull(input[_I], `Id`)], + }); + let body; + let contents; + if (input.IntelligentTieringConfiguration !== undefined) { + contents = se_IntelligentTieringConfiguration(input.IntelligentTieringConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutBucketInventoryConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_in]: [, ""], + [_i]: [, __expectNonNull(input[_I], `Id`)], + }); + let body; + let contents; + if (input.InventoryConfiguration !== undefined) { + contents = se_InventoryConfiguration(input.InventoryConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutBucketLifecycleConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_xasca]: input[_CA], + [_xaebo]: input[_EBO], + [_xatdmos]: input[_TDMOS], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_l]: [, ""], + }); + let body; + let contents; + if (input.LifecycleConfiguration !== undefined) { + contents = se_BucketLifecycleConfiguration(input.LifecycleConfiguration, context); + contents = contents.n("LifecycleConfiguration"); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutBucketLoggingCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_log]: [, ""], + }); + let body; + let contents; + if (input.BucketLoggingStatus !== undefined) { + contents = se_BucketLoggingStatus(input.BucketLoggingStatus, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutBucketMetricsConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_m]: [, ""], + [_i]: [, __expectNonNull(input[_I], `Id`)], + }); + let body; + let contents; + if (input.MetricsConfiguration !== undefined) { + contents = se_MetricsConfiguration(input.MetricsConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutBucketNotificationConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_xaebo]: input[_EBO], + [_xasdv]: [() => isSerializableHeaderValue(input[_SDV]), () => input[_SDV].toString()], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_n]: [, ""], + }); + let body; + let contents; + if (input.NotificationConfiguration !== undefined) { + contents = se_NotificationConfiguration(input.NotificationConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutBucketOwnershipControlsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_oC]: [, ""], + }); + let body; + let contents; + if (input.OwnershipControls !== undefined) { + contents = se_OwnershipControls(input.OwnershipControls, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutBucketPolicyCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "text/plain", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xacrsba]: [() => isSerializableHeaderValue(input[_CRSBA]), () => input[_CRSBA].toString()], + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_p]: [, ""], + }); + let body; + let contents; + if (input.Policy !== undefined) { + contents = input.Policy; + body = contents; + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutBucketReplicationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xabolt]: input[_To], + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_r]: [, ""], + }); + let body; + let contents; + if (input.ReplicationConfiguration !== undefined) { + contents = se_ReplicationConfiguration(input.ReplicationConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutBucketRequestPaymentCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_rP]: [, ""], + }); + let body; + let contents; + if (input.RequestPaymentConfiguration !== undefined) { + contents = se_RequestPaymentConfiguration(input.RequestPaymentConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutBucketTaggingCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_t]: [, ""], + }); + let body; + let contents; + if (input.Tagging !== undefined) { + contents = se_Tagging(input.Tagging, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutBucketVersioningCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xam]: input[_MFA], + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_v]: [, ""], + }); + let body; + let contents; + if (input.VersioningConfiguration !== undefined) { + contents = se_VersioningConfiguration(input.VersioningConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutBucketWebsiteCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_w]: [, ""], + }); + let body; + let contents; + if (input.WebsiteConfiguration !== undefined) { + contents = se_WebsiteConfiguration(input.WebsiteConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutObjectCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + ...(input.Metadata !== undefined && + Object.keys(input.Metadata).reduce((acc, suffix) => { + acc[`x-amz-meta-${suffix.toLowerCase()}`] = input.Metadata[suffix]; + return acc; + }, {})), + [_ct]: input[_CTo] || "application/octet-stream", + [_xaa]: input[_ACL], + [_cc]: input[_CC], + [_cd]: input[_CD], + [_ce]: input[_CE], + [_cl]: input[_CL], + [_cl_]: [() => isSerializableHeaderValue(input[_CLo]), () => input[_CLo].toString()], + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xacc]: input[_CCRC], + [_xacc_]: input[_CCRCC], + [_xacc__]: input[_CCRCNVME], + [_xacs]: input[_CSHA], + [_xacs_]: input[_CSHAh], + [_e]: [() => isSerializableHeaderValue(input[_E]), () => __dateToUtcString(input[_E]).toString()], + [_im]: input[_IM], + [_inm]: input[_INM], + [_xagfc]: input[_GFC], + [_xagr]: input[_GR], + [_xagra]: input[_GRACP], + [_xagwa]: input[_GWACP], + [_xawob]: [() => isSerializableHeaderValue(input[_WOB]), () => input[_WOB].toString()], + [_xasse]: input[_SSE], + [_xasc]: input[_SC], + [_xawrl]: input[_WRL], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + [_xasseakki]: input[_SSEKMSKI], + [_xassec]: input[_SSEKMSEC], + [_xassebke]: [() => isSerializableHeaderValue(input[_BKE]), () => input[_BKE].toString()], + [_xarp]: input[_RP], + [_xat]: input[_T], + [_xaolm]: input[_OLM], + [_xaolrud]: [() => isSerializableHeaderValue(input[_OLRUD]), () => __serializeDateTime(input[_OLRUD]).toString()], + [_xaollh]: input[_OLLHS], + [_xaebo]: input[_EBO], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_xi]: [, "PutObject"], + }); + let body; + let contents; + if (input.Body !== undefined) { + contents = input.Body; + body = contents; + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutObjectAclCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_xaa]: input[_ACL], + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xagfc]: input[_GFC], + [_xagr]: input[_GR], + [_xagra]: input[_GRACP], + [_xagw]: input[_GW], + [_xagwa]: input[_GWACP], + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_acl]: [, ""], + [_vI]: [, input[_VI]], + }); + let body; + let contents; + if (input.AccessControlPolicy !== undefined) { + contents = se_AccessControlPolicy(input.AccessControlPolicy, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutObjectLegalHoldCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_xarp]: input[_RP], + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_lh]: [, ""], + [_vI]: [, input[_VI]], + }); + let body; + let contents; + if (input.LegalHold !== undefined) { + contents = se_ObjectLockLegalHold(input.LegalHold, context); + contents = contents.n("LegalHold"); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutObjectLockConfigurationCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_xarp]: input[_RP], + [_xabolt]: input[_To], + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_ol]: [, ""], + }); + let body; + let contents; + if (input.ObjectLockConfiguration !== undefined) { + contents = se_ObjectLockConfiguration(input.ObjectLockConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutObjectRetentionCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_xarp]: input[_RP], + [_xabgr]: [() => isSerializableHeaderValue(input[_BGR]), () => input[_BGR].toString()], + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_ret]: [, ""], + [_vI]: [, input[_VI]], + }); + let body; + let contents; + if (input.Retention !== undefined) { + contents = se_ObjectLockRetention(input.Retention, context); + contents = contents.n("Retention"); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutObjectTaggingCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO], + [_xarp]: input[_RP], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_t]: [, ""], + [_vI]: [, input[_VI]], + }); + let body; + let contents; + if (input.Tagging !== undefined) { + contents = se_Tagging(input.Tagging, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_PutPublicAccessBlockCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO], + }); + b.bp("/"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + const query = map({ + [_pAB]: [, ""], + }); + let body; + let contents; + if (input.PublicAccessBlockConfiguration !== undefined) { + contents = se_PublicAccessBlockConfiguration(input.PublicAccessBlockConfiguration, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_RestoreObjectCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_xarp]: input[_RP], + [_xasca]: input[_CA], + [_xaebo]: input[_EBO], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_res]: [, ""], + [_vI]: [, input[_VI]], + }); + let body; + let contents; + if (input.RestoreRequest !== undefined) { + contents = se_RestoreRequest(input.RestoreRequest, context); + body = _ve; + contents.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + body += contents.toString(); + } + b.m("POST").h(headers).q(query).b(body); + return b.build(); +}; +export const se_SelectObjectContentCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/xml", + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + [_xaebo]: input[_EBO], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_se]: [, ""], + [_st]: [, "2"], + }); + let body; + body = _ve; + const bn = new __XmlNode(_SOCR); + bn.a("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"); + bn.cc(input, _Ex); + bn.cc(input, _ETx); + if (input[_IS] != null) { + bn.c(se_InputSerialization(input[_IS], context).n(_IS)); + } + if (input[_OS] != null) { + bn.c(se_OutputSerialization(input[_OS], context).n(_OS)); + } + if (input[_RPe] != null) { + bn.c(se_RequestProgress(input[_RPe], context).n(_RPe)); + } + if (input[_SR] != null) { + bn.c(se_ScanRange(input[_SR], context).n(_SR)); + } + body += bn.toString(); + b.m("POST").h(headers).q(query).b(body); + return b.build(); +}; +export const se_UploadPartCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "content-type": "application/octet-stream", + [_cl_]: [() => isSerializableHeaderValue(input[_CLo]), () => input[_CLo].toString()], + [_cm]: input[_CMD], + [_xasca]: input[_CA], + [_xacc]: input[_CCRC], + [_xacc_]: input[_CCRCC], + [_xacc__]: input[_CCRCNVME], + [_xacs]: input[_CSHA], + [_xacs_]: input[_CSHAh], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_xi]: [, "UploadPart"], + [_pN]: [__expectNonNull(input.PartNumber, `PartNumber`) != null, () => input[_PN].toString()], + [_uI]: [, __expectNonNull(input[_UI], `UploadId`)], + }); + let body; + let contents; + if (input.Body !== undefined) { + contents = input.Body; + body = contents; + } + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_UploadPartCopyCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xacs__]: input[_CS], + [_xacsim]: input[_CSIM], + [_xacsims]: [() => isSerializableHeaderValue(input[_CSIMS]), () => __dateToUtcString(input[_CSIMS]).toString()], + [_xacsinm]: input[_CSINM], + [_xacsius]: [() => isSerializableHeaderValue(input[_CSIUS]), () => __dateToUtcString(input[_CSIUS]).toString()], + [_xacsr]: input[_CSR], + [_xasseca]: input[_SSECA], + [_xasseck]: input[_SSECK], + [_xasseckm]: input[_SSECKMD], + [_xacssseca]: input[_CSSSECA], + [_xacssseck]: input[_CSSSECK], + [_xacssseckm]: input[_CSSSECKMD], + [_xarp]: input[_RP], + [_xaebo]: input[_EBO], + [_xasebo]: input[_ESBO], + }); + b.bp("/{Key+}"); + b.p("Bucket", () => input.Bucket, "{Bucket}", false); + b.p("Key", () => input.Key, "{Key+}", true); + const query = map({ + [_xi]: [, "UploadPartCopy"], + [_pN]: [__expectNonNull(input.PartNumber, `PartNumber`) != null, () => input[_PN].toString()], + [_uI]: [, __expectNonNull(input[_UI], `UploadId`)], + }); + let body; + b.m("PUT").h(headers).q(query).b(body); + return b.build(); +}; +export const se_WriteGetObjectResponseCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + "x-amz-content-sha256": "UNSIGNED-PAYLOAD", + ...(input.Metadata !== undefined && + Object.keys(input.Metadata).reduce((acc, suffix) => { + acc[`x-amz-meta-${suffix.toLowerCase()}`] = input.Metadata[suffix]; + return acc; + }, {})), + "content-type": "application/octet-stream", + [_xarr]: input[_RR], + [_xart]: input[_RT], + [_xafs]: [() => isSerializableHeaderValue(input[_SCt]), () => input[_SCt].toString()], + [_xafec]: input[_EC], + [_xafem]: input[_EM], + [_xafhar]: input[_AR], + [_xafhcc]: input[_CC], + [_xafhcd]: input[_CD], + [_xafhce]: input[_CE], + [_xafhcl]: input[_CL], + [_cl_]: [() => isSerializableHeaderValue(input[_CLo]), () => input[_CLo].toString()], + [_xafhcr]: input[_CR], + [_xafhct]: input[_CTo], + [_xafhxacc]: input[_CCRC], + [_xafhxacc_]: input[_CCRCC], + [_xafhxacc__]: input[_CCRCNVME], + [_xafhxacs]: input[_CSHA], + [_xafhxacs_]: input[_CSHAh], + [_xafhxadm]: [() => isSerializableHeaderValue(input[_DM]), () => input[_DM].toString()], + [_xafhe]: input[_ETa], + [_xafhe_]: [() => isSerializableHeaderValue(input[_E]), () => __dateToUtcString(input[_E]).toString()], + [_xafhxae]: input[_Exp], + [_xafhlm]: [() => isSerializableHeaderValue(input[_LM]), () => __dateToUtcString(input[_LM]).toString()], + [_xafhxamm]: [() => isSerializableHeaderValue(input[_MM]), () => input[_MM].toString()], + [_xafhxaolm]: input[_OLM], + [_xafhxaollh]: input[_OLLHS], + [_xafhxaolrud]: [ + () => isSerializableHeaderValue(input[_OLRUD]), + () => __serializeDateTime(input[_OLRUD]).toString(), + ], + [_xafhxampc]: [() => isSerializableHeaderValue(input[_PC]), () => input[_PC].toString()], + [_xafhxars]: input[_RS], + [_xafhxarc]: input[_RC], + [_xafhxar]: input[_Re], + [_xafhxasse]: input[_SSE], + [_xafhxasseca]: input[_SSECA], + [_xafhxasseakki]: input[_SSEKMSKI], + [_xafhxasseckm]: input[_SSECKMD], + [_xafhxasc]: input[_SC], + [_xafhxatc]: [() => isSerializableHeaderValue(input[_TC]), () => input[_TC].toString()], + [_xafhxavi]: input[_VI], + [_xafhxassebke]: [() => isSerializableHeaderValue(input[_BKE]), () => input[_BKE].toString()], + }); + b.bp("/WriteGetObjectResponse"); + let body; + let contents; + if (input.Body !== undefined) { + contents = input.Body; + body = contents; + } + let { hostname: resolvedHostname } = await context.endpoint(); + if (context.disableHostPrefix !== true) { + resolvedHostname = "{RequestRoute}." + resolvedHostname; + if (input.RequestRoute === undefined) { + throw new Error("Empty value provided for input host prefix: RequestRoute."); + } + resolvedHostname = resolvedHostname.replace("{RequestRoute}", input.RequestRoute); + if (!__isValidHostname(resolvedHostname)) { + throw new Error("ValidationError: prefixed hostname must be hostname compatible."); + } + } + b.hn(resolvedHostname); + b.m("POST").h(headers).b(body); + return b.build(); +}; +export const de_AbortMultipartUploadCommand = async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]], + }); + await collectBody(output.body, context); + return contents; +}; +export const de_CompleteMultipartUploadCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_Exp]: [, output.headers[_xae]], + [_SSE]: [, output.headers[_xasse]], + [_VI]: [, output.headers[_xavi]], + [_SSEKMSKI]: [, output.headers[_xasseakki]], + [_BKE]: [() => void 0 !== output.headers[_xassebke], () => __parseBoolean(output.headers[_xassebke])], + [_RC]: [, output.headers[_xarc]], + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data[_B] != null) { + contents[_B] = __expectString(data[_B]); + } + if (data[_CCRC] != null) { + contents[_CCRC] = __expectString(data[_CCRC]); + } + if (data[_CCRCC] != null) { + contents[_CCRCC] = __expectString(data[_CCRCC]); + } + if (data[_CCRCNVME] != null) { + contents[_CCRCNVME] = __expectString(data[_CCRCNVME]); + } + if (data[_CSHA] != null) { + contents[_CSHA] = __expectString(data[_CSHA]); + } + if (data[_CSHAh] != null) { + contents[_CSHAh] = __expectString(data[_CSHAh]); + } + if (data[_CT] != null) { + contents[_CT] = __expectString(data[_CT]); + } + if (data[_ETa] != null) { + contents[_ETa] = __expectString(data[_ETa]); + } + if (data[_K] != null) { + contents[_K] = __expectString(data[_K]); + } + if (data[_L] != null) { + contents[_L] = __expectString(data[_L]); + } + return contents; +}; +export const de_CopyObjectCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_Exp]: [, output.headers[_xae]], + [_CSVI]: [, output.headers[_xacsvi]], + [_VI]: [, output.headers[_xavi]], + [_SSE]: [, output.headers[_xasse]], + [_SSECA]: [, output.headers[_xasseca]], + [_SSECKMD]: [, output.headers[_xasseckm]], + [_SSEKMSKI]: [, output.headers[_xasseakki]], + [_SSEKMSEC]: [, output.headers[_xassec]], + [_BKE]: [() => void 0 !== output.headers[_xassebke], () => __parseBoolean(output.headers[_xassebke])], + [_RC]: [, output.headers[_xarc]], + }); + const data = __expectObject(await parseBody(output.body, context)); + contents.CopyObjectResult = de_CopyObjectResult(data, context); + return contents; +}; +export const de_CreateBucketCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_L]: [, output.headers[_lo]], + }); + await collectBody(output.body, context); + return contents; +}; +export const de_CreateBucketMetadataTableConfigurationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_CreateMultipartUploadCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_AD]: [ + () => void 0 !== output.headers[_xaad], + () => __expectNonNull(__parseRfc7231DateTime(output.headers[_xaad])), + ], + [_ARI]: [, output.headers[_xaari]], + [_SSE]: [, output.headers[_xasse]], + [_SSECA]: [, output.headers[_xasseca]], + [_SSECKMD]: [, output.headers[_xasseckm]], + [_SSEKMSKI]: [, output.headers[_xasseakki]], + [_SSEKMSEC]: [, output.headers[_xassec]], + [_BKE]: [() => void 0 !== output.headers[_xassebke], () => __parseBoolean(output.headers[_xassebke])], + [_RC]: [, output.headers[_xarc]], + [_CA]: [, output.headers[_xaca]], + [_CT]: [, output.headers[_xact]], + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data[_B] != null) { + contents[_B] = __expectString(data[_B]); + } + if (data[_K] != null) { + contents[_K] = __expectString(data[_K]); + } + if (data[_UI] != null) { + contents[_UI] = __expectString(data[_UI]); + } + return contents; +}; +export const de_CreateSessionCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_SSE]: [, output.headers[_xasse]], + [_SSEKMSKI]: [, output.headers[_xasseakki]], + [_SSEKMSEC]: [, output.headers[_xassec]], + [_BKE]: [() => void 0 !== output.headers[_xassebke], () => __parseBoolean(output.headers[_xassebke])], + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data[_C] != null) { + contents[_C] = de_SessionCredentials(data[_C], context); + } + return contents; +}; +export const de_DeleteBucketCommand = async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_DeleteBucketAnalyticsConfigurationCommand = async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_DeleteBucketCorsCommand = async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_DeleteBucketEncryptionCommand = async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_DeleteBucketIntelligentTieringConfigurationCommand = async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_DeleteBucketInventoryConfigurationCommand = async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_DeleteBucketLifecycleCommand = async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_DeleteBucketMetadataTableConfigurationCommand = async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_DeleteBucketMetricsConfigurationCommand = async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_DeleteBucketOwnershipControlsCommand = async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_DeleteBucketPolicyCommand = async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_DeleteBucketReplicationCommand = async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_DeleteBucketTaggingCommand = async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_DeleteBucketWebsiteCommand = async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_DeleteObjectCommand = async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_DM]: [() => void 0 !== output.headers[_xadm], () => __parseBoolean(output.headers[_xadm])], + [_VI]: [, output.headers[_xavi]], + [_RC]: [, output.headers[_xarc]], + }); + await collectBody(output.body, context); + return contents; +}; +export const de_DeleteObjectsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]], + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data.Deleted === "") { + contents[_De] = []; + } + else if (data[_De] != null) { + contents[_De] = de_DeletedObjects(__getArrayIfSingleItem(data[_De]), context); + } + if (data.Error === "") { + contents[_Err] = []; + } + else if (data[_Er] != null) { + contents[_Err] = de_Errors(__getArrayIfSingleItem(data[_Er]), context); + } + return contents; +}; +export const de_DeleteObjectTaggingCommand = async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_VI]: [, output.headers[_xavi]], + }); + await collectBody(output.body, context); + return contents; +}; +export const de_DeletePublicAccessBlockCommand = async (output, context) => { + if (output.statusCode !== 204 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_GetBucketAccelerateConfigurationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]], + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data[_S] != null) { + contents[_S] = __expectString(data[_S]); + } + return contents; +}; +export const de_GetBucketAclCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data.AccessControlList === "") { + contents[_Gr] = []; + } + else if (data[_ACLc] != null && data[_ACLc][_G] != null) { + contents[_Gr] = de_Grants(__getArrayIfSingleItem(data[_ACLc][_G]), context); + } + if (data[_O] != null) { + contents[_O] = de_Owner(data[_O], context); + } + return contents; +}; +export const de_GetBucketAnalyticsConfigurationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectObject(await parseBody(output.body, context)); + contents.AnalyticsConfiguration = de_AnalyticsConfiguration(data, context); + return contents; +}; +export const de_GetBucketCorsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data.CORSRule === "") { + contents[_CORSRu] = []; + } + else if (data[_CORSR] != null) { + contents[_CORSRu] = de_CORSRules(__getArrayIfSingleItem(data[_CORSR]), context); + } + return contents; +}; +export const de_GetBucketEncryptionCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectObject(await parseBody(output.body, context)); + contents.ServerSideEncryptionConfiguration = de_ServerSideEncryptionConfiguration(data, context); + return contents; +}; +export const de_GetBucketIntelligentTieringConfigurationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectObject(await parseBody(output.body, context)); + contents.IntelligentTieringConfiguration = de_IntelligentTieringConfiguration(data, context); + return contents; +}; +export const de_GetBucketInventoryConfigurationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectObject(await parseBody(output.body, context)); + contents.InventoryConfiguration = de_InventoryConfiguration(data, context); + return contents; +}; +export const de_GetBucketLifecycleConfigurationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_TDMOS]: [, output.headers[_xatdmos]], + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data.Rule === "") { + contents[_Rul] = []; + } + else if (data[_Ru] != null) { + contents[_Rul] = de_LifecycleRules(__getArrayIfSingleItem(data[_Ru]), context); + } + return contents; +}; +export const de_GetBucketLocationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data[_LC] != null) { + contents[_LC] = __expectString(data[_LC]); + } + return contents; +}; +export const de_GetBucketLoggingCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data[_LE] != null) { + contents[_LE] = de_LoggingEnabled(data[_LE], context); + } + return contents; +}; +export const de_GetBucketMetadataTableConfigurationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectObject(await parseBody(output.body, context)); + contents.GetBucketMetadataTableConfigurationResult = de_GetBucketMetadataTableConfigurationResult(data, context); + return contents; +}; +export const de_GetBucketMetricsConfigurationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectObject(await parseBody(output.body, context)); + contents.MetricsConfiguration = de_MetricsConfiguration(data, context); + return contents; +}; +export const de_GetBucketNotificationConfigurationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data[_EBC] != null) { + contents[_EBC] = de_EventBridgeConfiguration(data[_EBC], context); + } + if (data.CloudFunctionConfiguration === "") { + contents[_LFC] = []; + } + else if (data[_CFC] != null) { + contents[_LFC] = de_LambdaFunctionConfigurationList(__getArrayIfSingleItem(data[_CFC]), context); + } + if (data.QueueConfiguration === "") { + contents[_QCu] = []; + } + else if (data[_QC] != null) { + contents[_QCu] = de_QueueConfigurationList(__getArrayIfSingleItem(data[_QC]), context); + } + if (data.TopicConfiguration === "") { + contents[_TCop] = []; + } + else if (data[_TCo] != null) { + contents[_TCop] = de_TopicConfigurationList(__getArrayIfSingleItem(data[_TCo]), context); + } + return contents; +}; +export const de_GetBucketOwnershipControlsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectObject(await parseBody(output.body, context)); + contents.OwnershipControls = de_OwnershipControls(data, context); + return contents; +}; +export const de_GetBucketPolicyCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = await collectBodyString(output.body, context); + contents.Policy = __expectString(data); + return contents; +}; +export const de_GetBucketPolicyStatusCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectObject(await parseBody(output.body, context)); + contents.PolicyStatus = de_PolicyStatus(data, context); + return contents; +}; +export const de_GetBucketReplicationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectObject(await parseBody(output.body, context)); + contents.ReplicationConfiguration = de_ReplicationConfiguration(data, context); + return contents; +}; +export const de_GetBucketRequestPaymentCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data[_Pa] != null) { + contents[_Pa] = __expectString(data[_Pa]); + } + return contents; +}; +export const de_GetBucketTaggingCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data.TagSet === "") { + contents[_TS] = []; + } + else if (data[_TS] != null && data[_TS][_Ta] != null) { + contents[_TS] = de_TagSet(__getArrayIfSingleItem(data[_TS][_Ta]), context); + } + return contents; +}; +export const de_GetBucketVersioningCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data[_MDf] != null) { + contents[_MFAD] = __expectString(data[_MDf]); + } + if (data[_S] != null) { + contents[_S] = __expectString(data[_S]); + } + return contents; +}; +export const de_GetBucketWebsiteCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data[_ED] != null) { + contents[_ED] = de_ErrorDocument(data[_ED], context); + } + if (data[_ID] != null) { + contents[_ID] = de_IndexDocument(data[_ID], context); + } + if (data[_RART] != null) { + contents[_RART] = de_RedirectAllRequestsTo(data[_RART], context); + } + if (data.RoutingRules === "") { + contents[_RRo] = []; + } + else if (data[_RRo] != null && data[_RRo][_RRou] != null) { + contents[_RRo] = de_RoutingRules(__getArrayIfSingleItem(data[_RRo][_RRou]), context); + } + return contents; +}; +export const de_GetObjectCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_DM]: [() => void 0 !== output.headers[_xadm], () => __parseBoolean(output.headers[_xadm])], + [_AR]: [, output.headers[_ar]], + [_Exp]: [, output.headers[_xae]], + [_Re]: [, output.headers[_xar]], + [_LM]: [() => void 0 !== output.headers[_lm], () => __expectNonNull(__parseRfc7231DateTime(output.headers[_lm]))], + [_CLo]: [() => void 0 !== output.headers[_cl_], () => __strictParseLong(output.headers[_cl_])], + [_ETa]: [, output.headers[_eta]], + [_CCRC]: [, output.headers[_xacc]], + [_CCRCC]: [, output.headers[_xacc_]], + [_CCRCNVME]: [, output.headers[_xacc__]], + [_CSHA]: [, output.headers[_xacs]], + [_CSHAh]: [, output.headers[_xacs_]], + [_CT]: [, output.headers[_xact]], + [_MM]: [() => void 0 !== output.headers[_xamm], () => __strictParseInt32(output.headers[_xamm])], + [_VI]: [, output.headers[_xavi]], + [_CC]: [, output.headers[_cc]], + [_CD]: [, output.headers[_cd]], + [_CE]: [, output.headers[_ce]], + [_CL]: [, output.headers[_cl]], + [_CR]: [, output.headers[_cr]], + [_CTo]: [, output.headers[_ct]], + [_E]: [() => void 0 !== output.headers[_e], () => __expectNonNull(__parseRfc7231DateTime(output.headers[_e]))], + [_ES]: [, output.headers[_ex]], + [_WRL]: [, output.headers[_xawrl]], + [_SSE]: [, output.headers[_xasse]], + [_SSECA]: [, output.headers[_xasseca]], + [_SSECKMD]: [, output.headers[_xasseckm]], + [_SSEKMSKI]: [, output.headers[_xasseakki]], + [_BKE]: [() => void 0 !== output.headers[_xassebke], () => __parseBoolean(output.headers[_xassebke])], + [_SC]: [, output.headers[_xasc]], + [_RC]: [, output.headers[_xarc]], + [_RS]: [, output.headers[_xars]], + [_PC]: [() => void 0 !== output.headers[_xampc], () => __strictParseInt32(output.headers[_xampc])], + [_TC]: [() => void 0 !== output.headers[_xatc], () => __strictParseInt32(output.headers[_xatc])], + [_OLM]: [, output.headers[_xaolm]], + [_OLRUD]: [ + () => void 0 !== output.headers[_xaolrud], + () => __expectNonNull(__parseRfc3339DateTimeWithOffset(output.headers[_xaolrud])), + ], + [_OLLHS]: [, output.headers[_xaollh]], + Metadata: [ + , + Object.keys(output.headers) + .filter((header) => header.startsWith("x-amz-meta-")) + .reduce((acc, header) => { + acc[header.substring(11)] = output.headers[header]; + return acc; + }, {}), + ], + }); + const data = output.body; + context.sdkStreamMixin(data); + contents.Body = data; + return contents; +}; +export const de_GetObjectAclCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]], + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data.AccessControlList === "") { + contents[_Gr] = []; + } + else if (data[_ACLc] != null && data[_ACLc][_G] != null) { + contents[_Gr] = de_Grants(__getArrayIfSingleItem(data[_ACLc][_G]), context); + } + if (data[_O] != null) { + contents[_O] = de_Owner(data[_O], context); + } + return contents; +}; +export const de_GetObjectAttributesCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_DM]: [() => void 0 !== output.headers[_xadm], () => __parseBoolean(output.headers[_xadm])], + [_LM]: [() => void 0 !== output.headers[_lm], () => __expectNonNull(__parseRfc7231DateTime(output.headers[_lm]))], + [_VI]: [, output.headers[_xavi]], + [_RC]: [, output.headers[_xarc]], + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data[_Ch] != null) { + contents[_Ch] = de_Checksum(data[_Ch], context); + } + if (data[_ETa] != null) { + contents[_ETa] = __expectString(data[_ETa]); + } + if (data[_OP] != null) { + contents[_OP] = de_GetObjectAttributesParts(data[_OP], context); + } + if (data[_OSb] != null) { + contents[_OSb] = __strictParseLong(data[_OSb]); + } + if (data[_SC] != null) { + contents[_SC] = __expectString(data[_SC]); + } + return contents; +}; +export const de_GetObjectLegalHoldCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectObject(await parseBody(output.body, context)); + contents.LegalHold = de_ObjectLockLegalHold(data, context); + return contents; +}; +export const de_GetObjectLockConfigurationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectObject(await parseBody(output.body, context)); + contents.ObjectLockConfiguration = de_ObjectLockConfiguration(data, context); + return contents; +}; +export const de_GetObjectRetentionCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectObject(await parseBody(output.body, context)); + contents.Retention = de_ObjectLockRetention(data, context); + return contents; +}; +export const de_GetObjectTaggingCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_VI]: [, output.headers[_xavi]], + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data.TagSet === "") { + contents[_TS] = []; + } + else if (data[_TS] != null && data[_TS][_Ta] != null) { + contents[_TS] = de_TagSet(__getArrayIfSingleItem(data[_TS][_Ta]), context); + } + return contents; +}; +export const de_GetObjectTorrentCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]], + }); + const data = output.body; + context.sdkStreamMixin(data); + contents.Body = data; + return contents; +}; +export const de_GetPublicAccessBlockCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectObject(await parseBody(output.body, context)); + contents.PublicAccessBlockConfiguration = de_PublicAccessBlockConfiguration(data, context); + return contents; +}; +export const de_HeadBucketCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_BLT]: [, output.headers[_xablt]], + [_BLN]: [, output.headers[_xabln]], + [_BR]: [, output.headers[_xabr]], + [_APA]: [() => void 0 !== output.headers[_xaapa], () => __parseBoolean(output.headers[_xaapa])], + }); + await collectBody(output.body, context); + return contents; +}; +export const de_HeadObjectCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_DM]: [() => void 0 !== output.headers[_xadm], () => __parseBoolean(output.headers[_xadm])], + [_AR]: [, output.headers[_ar]], + [_Exp]: [, output.headers[_xae]], + [_Re]: [, output.headers[_xar]], + [_AS]: [, output.headers[_xaas]], + [_LM]: [() => void 0 !== output.headers[_lm], () => __expectNonNull(__parseRfc7231DateTime(output.headers[_lm]))], + [_CLo]: [() => void 0 !== output.headers[_cl_], () => __strictParseLong(output.headers[_cl_])], + [_CCRC]: [, output.headers[_xacc]], + [_CCRCC]: [, output.headers[_xacc_]], + [_CCRCNVME]: [, output.headers[_xacc__]], + [_CSHA]: [, output.headers[_xacs]], + [_CSHAh]: [, output.headers[_xacs_]], + [_CT]: [, output.headers[_xact]], + [_ETa]: [, output.headers[_eta]], + [_MM]: [() => void 0 !== output.headers[_xamm], () => __strictParseInt32(output.headers[_xamm])], + [_VI]: [, output.headers[_xavi]], + [_CC]: [, output.headers[_cc]], + [_CD]: [, output.headers[_cd]], + [_CE]: [, output.headers[_ce]], + [_CL]: [, output.headers[_cl]], + [_CTo]: [, output.headers[_ct]], + [_CR]: [, output.headers[_cr]], + [_E]: [() => void 0 !== output.headers[_e], () => __expectNonNull(__parseRfc7231DateTime(output.headers[_e]))], + [_ES]: [, output.headers[_ex]], + [_WRL]: [, output.headers[_xawrl]], + [_SSE]: [, output.headers[_xasse]], + [_SSECA]: [, output.headers[_xasseca]], + [_SSECKMD]: [, output.headers[_xasseckm]], + [_SSEKMSKI]: [, output.headers[_xasseakki]], + [_BKE]: [() => void 0 !== output.headers[_xassebke], () => __parseBoolean(output.headers[_xassebke])], + [_SC]: [, output.headers[_xasc]], + [_RC]: [, output.headers[_xarc]], + [_RS]: [, output.headers[_xars]], + [_PC]: [() => void 0 !== output.headers[_xampc], () => __strictParseInt32(output.headers[_xampc])], + [_OLM]: [, output.headers[_xaolm]], + [_OLRUD]: [ + () => void 0 !== output.headers[_xaolrud], + () => __expectNonNull(__parseRfc3339DateTimeWithOffset(output.headers[_xaolrud])), + ], + [_OLLHS]: [, output.headers[_xaollh]], + Metadata: [ + , + Object.keys(output.headers) + .filter((header) => header.startsWith("x-amz-meta-")) + .reduce((acc, header) => { + acc[header.substring(11)] = output.headers[header]; + return acc; + }, {}), + ], + }); + await collectBody(output.body, context); + return contents; +}; +export const de_ListBucketAnalyticsConfigurationsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data.AnalyticsConfiguration === "") { + contents[_ACLn] = []; + } + else if (data[_AC] != null) { + contents[_ACLn] = de_AnalyticsConfigurationList(__getArrayIfSingleItem(data[_AC]), context); + } + if (data[_CTon] != null) { + contents[_CTon] = __expectString(data[_CTon]); + } + if (data[_IT] != null) { + contents[_IT] = __parseBoolean(data[_IT]); + } + if (data[_NCT] != null) { + contents[_NCT] = __expectString(data[_NCT]); + } + return contents; +}; +export const de_ListBucketIntelligentTieringConfigurationsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data[_CTon] != null) { + contents[_CTon] = __expectString(data[_CTon]); + } + if (data.IntelligentTieringConfiguration === "") { + contents[_ITCL] = []; + } + else if (data[_ITC] != null) { + contents[_ITCL] = de_IntelligentTieringConfigurationList(__getArrayIfSingleItem(data[_ITC]), context); + } + if (data[_IT] != null) { + contents[_IT] = __parseBoolean(data[_IT]); + } + if (data[_NCT] != null) { + contents[_NCT] = __expectString(data[_NCT]); + } + return contents; +}; +export const de_ListBucketInventoryConfigurationsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data[_CTon] != null) { + contents[_CTon] = __expectString(data[_CTon]); + } + if (data.InventoryConfiguration === "") { + contents[_ICL] = []; + } + else if (data[_IC] != null) { + contents[_ICL] = de_InventoryConfigurationList(__getArrayIfSingleItem(data[_IC]), context); + } + if (data[_IT] != null) { + contents[_IT] = __parseBoolean(data[_IT]); + } + if (data[_NCT] != null) { + contents[_NCT] = __expectString(data[_NCT]); + } + return contents; +}; +export const de_ListBucketMetricsConfigurationsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data[_CTon] != null) { + contents[_CTon] = __expectString(data[_CTon]); + } + if (data[_IT] != null) { + contents[_IT] = __parseBoolean(data[_IT]); + } + if (data.MetricsConfiguration === "") { + contents[_MCL] = []; + } + else if (data[_MC] != null) { + contents[_MCL] = de_MetricsConfigurationList(__getArrayIfSingleItem(data[_MC]), context); + } + if (data[_NCT] != null) { + contents[_NCT] = __expectString(data[_NCT]); + } + return contents; +}; +export const de_ListBucketsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data.Buckets === "") { + contents[_Bu] = []; + } + else if (data[_Bu] != null && data[_Bu][_B] != null) { + contents[_Bu] = de_Buckets(__getArrayIfSingleItem(data[_Bu][_B]), context); + } + if (data[_CTon] != null) { + contents[_CTon] = __expectString(data[_CTon]); + } + if (data[_O] != null) { + contents[_O] = de_Owner(data[_O], context); + } + if (data[_P] != null) { + contents[_P] = __expectString(data[_P]); + } + return contents; +}; +export const de_ListDirectoryBucketsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data.Buckets === "") { + contents[_Bu] = []; + } + else if (data[_Bu] != null && data[_Bu][_B] != null) { + contents[_Bu] = de_Buckets(__getArrayIfSingleItem(data[_Bu][_B]), context); + } + if (data[_CTon] != null) { + contents[_CTon] = __expectString(data[_CTon]); + } + return contents; +}; +export const de_ListMultipartUploadsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]], + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data[_B] != null) { + contents[_B] = __expectString(data[_B]); + } + if (data.CommonPrefixes === "") { + contents[_CP] = []; + } + else if (data[_CP] != null) { + contents[_CP] = de_CommonPrefixList(__getArrayIfSingleItem(data[_CP]), context); + } + if (data[_D] != null) { + contents[_D] = __expectString(data[_D]); + } + if (data[_ET] != null) { + contents[_ET] = __expectString(data[_ET]); + } + if (data[_IT] != null) { + contents[_IT] = __parseBoolean(data[_IT]); + } + if (data[_KM] != null) { + contents[_KM] = __expectString(data[_KM]); + } + if (data[_MU] != null) { + contents[_MU] = __strictParseInt32(data[_MU]); + } + if (data[_NKM] != null) { + contents[_NKM] = __expectString(data[_NKM]); + } + if (data[_NUIM] != null) { + contents[_NUIM] = __expectString(data[_NUIM]); + } + if (data[_P] != null) { + contents[_P] = __expectString(data[_P]); + } + if (data[_UIM] != null) { + contents[_UIM] = __expectString(data[_UIM]); + } + if (data.Upload === "") { + contents[_Up] = []; + } + else if (data[_U] != null) { + contents[_Up] = de_MultipartUploadList(__getArrayIfSingleItem(data[_U]), context); + } + return contents; +}; +export const de_ListObjectsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]], + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data.CommonPrefixes === "") { + contents[_CP] = []; + } + else if (data[_CP] != null) { + contents[_CP] = de_CommonPrefixList(__getArrayIfSingleItem(data[_CP]), context); + } + if (data.Contents === "") { + contents[_Co] = []; + } + else if (data[_Co] != null) { + contents[_Co] = de_ObjectList(__getArrayIfSingleItem(data[_Co]), context); + } + if (data[_D] != null) { + contents[_D] = __expectString(data[_D]); + } + if (data[_ET] != null) { + contents[_ET] = __expectString(data[_ET]); + } + if (data[_IT] != null) { + contents[_IT] = __parseBoolean(data[_IT]); + } + if (data[_M] != null) { + contents[_M] = __expectString(data[_M]); + } + if (data[_MK] != null) { + contents[_MK] = __strictParseInt32(data[_MK]); + } + if (data[_N] != null) { + contents[_N] = __expectString(data[_N]); + } + if (data[_NM] != null) { + contents[_NM] = __expectString(data[_NM]); + } + if (data[_P] != null) { + contents[_P] = __expectString(data[_P]); + } + return contents; +}; +export const de_ListObjectsV2Command = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]], + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data.CommonPrefixes === "") { + contents[_CP] = []; + } + else if (data[_CP] != null) { + contents[_CP] = de_CommonPrefixList(__getArrayIfSingleItem(data[_CP]), context); + } + if (data.Contents === "") { + contents[_Co] = []; + } + else if (data[_Co] != null) { + contents[_Co] = de_ObjectList(__getArrayIfSingleItem(data[_Co]), context); + } + if (data[_CTon] != null) { + contents[_CTon] = __expectString(data[_CTon]); + } + if (data[_D] != null) { + contents[_D] = __expectString(data[_D]); + } + if (data[_ET] != null) { + contents[_ET] = __expectString(data[_ET]); + } + if (data[_IT] != null) { + contents[_IT] = __parseBoolean(data[_IT]); + } + if (data[_KC] != null) { + contents[_KC] = __strictParseInt32(data[_KC]); + } + if (data[_MK] != null) { + contents[_MK] = __strictParseInt32(data[_MK]); + } + if (data[_N] != null) { + contents[_N] = __expectString(data[_N]); + } + if (data[_NCT] != null) { + contents[_NCT] = __expectString(data[_NCT]); + } + if (data[_P] != null) { + contents[_P] = __expectString(data[_P]); + } + if (data[_SA] != null) { + contents[_SA] = __expectString(data[_SA]); + } + return contents; +}; +export const de_ListObjectVersionsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]], + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data.CommonPrefixes === "") { + contents[_CP] = []; + } + else if (data[_CP] != null) { + contents[_CP] = de_CommonPrefixList(__getArrayIfSingleItem(data[_CP]), context); + } + if (data.DeleteMarker === "") { + contents[_DMe] = []; + } + else if (data[_DM] != null) { + contents[_DMe] = de_DeleteMarkers(__getArrayIfSingleItem(data[_DM]), context); + } + if (data[_D] != null) { + contents[_D] = __expectString(data[_D]); + } + if (data[_ET] != null) { + contents[_ET] = __expectString(data[_ET]); + } + if (data[_IT] != null) { + contents[_IT] = __parseBoolean(data[_IT]); + } + if (data[_KM] != null) { + contents[_KM] = __expectString(data[_KM]); + } + if (data[_MK] != null) { + contents[_MK] = __strictParseInt32(data[_MK]); + } + if (data[_N] != null) { + contents[_N] = __expectString(data[_N]); + } + if (data[_NKM] != null) { + contents[_NKM] = __expectString(data[_NKM]); + } + if (data[_NVIM] != null) { + contents[_NVIM] = __expectString(data[_NVIM]); + } + if (data[_P] != null) { + contents[_P] = __expectString(data[_P]); + } + if (data[_VIM] != null) { + contents[_VIM] = __expectString(data[_VIM]); + } + if (data.Version === "") { + contents[_Ve] = []; + } + else if (data[_V] != null) { + contents[_Ve] = de_ObjectVersionList(__getArrayIfSingleItem(data[_V]), context); + } + return contents; +}; +export const de_ListPartsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_AD]: [ + () => void 0 !== output.headers[_xaad], + () => __expectNonNull(__parseRfc7231DateTime(output.headers[_xaad])), + ], + [_ARI]: [, output.headers[_xaari]], + [_RC]: [, output.headers[_xarc]], + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data[_B] != null) { + contents[_B] = __expectString(data[_B]); + } + if (data[_CA] != null) { + contents[_CA] = __expectString(data[_CA]); + } + if (data[_CT] != null) { + contents[_CT] = __expectString(data[_CT]); + } + if (data[_In] != null) { + contents[_In] = de_Initiator(data[_In], context); + } + if (data[_IT] != null) { + contents[_IT] = __parseBoolean(data[_IT]); + } + if (data[_K] != null) { + contents[_K] = __expectString(data[_K]); + } + if (data[_MP] != null) { + contents[_MP] = __strictParseInt32(data[_MP]); + } + if (data[_NPNM] != null) { + contents[_NPNM] = __expectString(data[_NPNM]); + } + if (data[_O] != null) { + contents[_O] = de_Owner(data[_O], context); + } + if (data[_PNM] != null) { + contents[_PNM] = __expectString(data[_PNM]); + } + if (data.Part === "") { + contents[_Part] = []; + } + else if (data[_Par] != null) { + contents[_Part] = de_Parts(__getArrayIfSingleItem(data[_Par]), context); + } + if (data[_SC] != null) { + contents[_SC] = __expectString(data[_SC]); + } + if (data[_UI] != null) { + contents[_UI] = __expectString(data[_UI]); + } + return contents; +}; +export const de_PutBucketAccelerateConfigurationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutBucketAclCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutBucketAnalyticsConfigurationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutBucketCorsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutBucketEncryptionCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutBucketIntelligentTieringConfigurationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutBucketInventoryConfigurationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutBucketLifecycleConfigurationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_TDMOS]: [, output.headers[_xatdmos]], + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutBucketLoggingCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutBucketMetricsConfigurationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutBucketNotificationConfigurationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutBucketOwnershipControlsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutBucketPolicyCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutBucketReplicationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutBucketRequestPaymentCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutBucketTaggingCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutBucketVersioningCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutBucketWebsiteCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutObjectCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_Exp]: [, output.headers[_xae]], + [_ETa]: [, output.headers[_eta]], + [_CCRC]: [, output.headers[_xacc]], + [_CCRCC]: [, output.headers[_xacc_]], + [_CCRCNVME]: [, output.headers[_xacc__]], + [_CSHA]: [, output.headers[_xacs]], + [_CSHAh]: [, output.headers[_xacs_]], + [_CT]: [, output.headers[_xact]], + [_SSE]: [, output.headers[_xasse]], + [_VI]: [, output.headers[_xavi]], + [_SSECA]: [, output.headers[_xasseca]], + [_SSECKMD]: [, output.headers[_xasseckm]], + [_SSEKMSKI]: [, output.headers[_xasseakki]], + [_SSEKMSEC]: [, output.headers[_xassec]], + [_BKE]: [() => void 0 !== output.headers[_xassebke], () => __parseBoolean(output.headers[_xassebke])], + [_Si]: [() => void 0 !== output.headers[_xaos], () => __strictParseLong(output.headers[_xaos])], + [_RC]: [, output.headers[_xarc]], + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutObjectAclCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]], + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutObjectLegalHoldCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]], + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutObjectLockConfigurationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]], + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutObjectRetentionCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]], + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutObjectTaggingCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_VI]: [, output.headers[_xavi]], + }); + await collectBody(output.body, context); + return contents; +}; +export const de_PutPublicAccessBlockCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +export const de_RestoreObjectCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_RC]: [, output.headers[_xarc]], + [_ROP]: [, output.headers[_xarop]], + }); + await collectBody(output.body, context); + return contents; +}; +export const de_SelectObjectContentCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = output.body; + contents.Payload = de_SelectObjectContentEventStream(data, context); + return contents; +}; +export const de_UploadPartCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_SSE]: [, output.headers[_xasse]], + [_ETa]: [, output.headers[_eta]], + [_CCRC]: [, output.headers[_xacc]], + [_CCRCC]: [, output.headers[_xacc_]], + [_CCRCNVME]: [, output.headers[_xacc__]], + [_CSHA]: [, output.headers[_xacs]], + [_CSHAh]: [, output.headers[_xacs_]], + [_SSECA]: [, output.headers[_xasseca]], + [_SSECKMD]: [, output.headers[_xasseckm]], + [_SSEKMSKI]: [, output.headers[_xasseakki]], + [_BKE]: [() => void 0 !== output.headers[_xassebke], () => __parseBoolean(output.headers[_xassebke])], + [_RC]: [, output.headers[_xarc]], + }); + await collectBody(output.body, context); + return contents; +}; +export const de_UploadPartCopyCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + [_CSVI]: [, output.headers[_xacsvi]], + [_SSE]: [, output.headers[_xasse]], + [_SSECA]: [, output.headers[_xasseca]], + [_SSECKMD]: [, output.headers[_xasseckm]], + [_SSEKMSKI]: [, output.headers[_xasseakki]], + [_BKE]: [() => void 0 !== output.headers[_xassebke], () => __parseBoolean(output.headers[_xassebke])], + [_RC]: [, output.headers[_xarc]], + }); + const data = __expectObject(await parseBody(output.body, context)); + contents.CopyPartResult = de_CopyPartResult(data, context); + return contents; +}; +export const de_WriteGetObjectResponseCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +const de_CommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestXmlErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "NoSuchUpload": + case "com.amazonaws.s3#NoSuchUpload": + throw await de_NoSuchUploadRes(parsedOutput, context); + case "ObjectNotInActiveTierError": + case "com.amazonaws.s3#ObjectNotInActiveTierError": + throw await de_ObjectNotInActiveTierErrorRes(parsedOutput, context); + case "BucketAlreadyExists": + case "com.amazonaws.s3#BucketAlreadyExists": + throw await de_BucketAlreadyExistsRes(parsedOutput, context); + case "BucketAlreadyOwnedByYou": + case "com.amazonaws.s3#BucketAlreadyOwnedByYou": + throw await de_BucketAlreadyOwnedByYouRes(parsedOutput, context); + case "NoSuchBucket": + case "com.amazonaws.s3#NoSuchBucket": + throw await de_NoSuchBucketRes(parsedOutput, context); + case "InvalidObjectState": + case "com.amazonaws.s3#InvalidObjectState": + throw await de_InvalidObjectStateRes(parsedOutput, context); + case "NoSuchKey": + case "com.amazonaws.s3#NoSuchKey": + throw await de_NoSuchKeyRes(parsedOutput, context); + case "NotFound": + case "com.amazonaws.s3#NotFound": + throw await de_NotFoundRes(parsedOutput, context); + case "EncryptionTypeMismatch": + case "com.amazonaws.s3#EncryptionTypeMismatch": + throw await de_EncryptionTypeMismatchRes(parsedOutput, context); + case "InvalidRequest": + case "com.amazonaws.s3#InvalidRequest": + throw await de_InvalidRequestRes(parsedOutput, context); + case "InvalidWriteOffset": + case "com.amazonaws.s3#InvalidWriteOffset": + throw await de_InvalidWriteOffsetRes(parsedOutput, context); + case "TooManyParts": + case "com.amazonaws.s3#TooManyParts": + throw await de_TooManyPartsRes(parsedOutput, context); + case "ObjectAlreadyInActiveTierError": + case "com.amazonaws.s3#ObjectAlreadyInActiveTierError": + throw await de_ObjectAlreadyInActiveTierErrorRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const throwDefaultError = withBaseException(__BaseException); +const de_BucketAlreadyExistsRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const exception = new BucketAlreadyExists({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_BucketAlreadyOwnedByYouRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const exception = new BucketAlreadyOwnedByYou({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_EncryptionTypeMismatchRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const exception = new EncryptionTypeMismatch({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InvalidObjectStateRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + if (data[_AT] != null) { + contents[_AT] = __expectString(data[_AT]); + } + if (data[_SC] != null) { + contents[_SC] = __expectString(data[_SC]); + } + const exception = new InvalidObjectState({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InvalidRequestRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const exception = new InvalidRequest({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InvalidWriteOffsetRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const exception = new InvalidWriteOffset({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_NoSuchBucketRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const exception = new NoSuchBucket({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_NoSuchKeyRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const exception = new NoSuchKey({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_NoSuchUploadRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const exception = new NoSuchUpload({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_NotFoundRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const exception = new NotFound({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_ObjectAlreadyInActiveTierErrorRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const exception = new ObjectAlreadyInActiveTierError({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_ObjectNotInActiveTierErrorRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const exception = new ObjectNotInActiveTierError({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_TooManyPartsRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const exception = new TooManyParts({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_SelectObjectContentEventStream = (output, context) => { + return context.eventStreamMarshaller.deserialize(output, async (event) => { + if (event["Records"] != null) { + return { + Records: await de_RecordsEvent_event(event["Records"], context), + }; + } + if (event["Stats"] != null) { + return { + Stats: await de_StatsEvent_event(event["Stats"], context), + }; + } + if (event["Progress"] != null) { + return { + Progress: await de_ProgressEvent_event(event["Progress"], context), + }; + } + if (event["Cont"] != null) { + return { + Cont: await de_ContinuationEvent_event(event["Cont"], context), + }; + } + if (event["End"] != null) { + return { + End: await de_EndEvent_event(event["End"], context), + }; + } + return { $unknown: output }; + }); +}; +const de_ContinuationEvent_event = async (output, context) => { + const contents = {}; + const data = await parseBody(output.body, context); + Object.assign(contents, de_ContinuationEvent(data, context)); + return contents; +}; +const de_EndEvent_event = async (output, context) => { + const contents = {}; + const data = await parseBody(output.body, context); + Object.assign(contents, de_EndEvent(data, context)); + return contents; +}; +const de_ProgressEvent_event = async (output, context) => { + const contents = {}; + const data = await parseBody(output.body, context); + contents.Details = de_Progress(data, context); + return contents; +}; +const de_RecordsEvent_event = async (output, context) => { + const contents = {}; + contents.Payload = output.body; + return contents; +}; +const de_StatsEvent_event = async (output, context) => { + const contents = {}; + const data = await parseBody(output.body, context); + contents.Details = de_Stats(data, context); + return contents; +}; +const se_AbortIncompleteMultipartUpload = (input, context) => { + const bn = new __XmlNode(_AIMU); + if (input[_DAI] != null) { + bn.c(__XmlNode.of(_DAI, String(input[_DAI])).n(_DAI)); + } + return bn; +}; +const se_AccelerateConfiguration = (input, context) => { + const bn = new __XmlNode(_ACc); + if (input[_S] != null) { + bn.c(__XmlNode.of(_BAS, input[_S]).n(_S)); + } + return bn; +}; +const se_AccessControlPolicy = (input, context) => { + const bn = new __XmlNode(_ACP); + bn.lc(input, "Grants", "AccessControlList", () => se_Grants(input[_Gr], context)); + if (input[_O] != null) { + bn.c(se_Owner(input[_O], context).n(_O)); + } + return bn; +}; +const se_AccessControlTranslation = (input, context) => { + const bn = new __XmlNode(_ACT); + if (input[_O] != null) { + bn.c(__XmlNode.of(_OOw, input[_O]).n(_O)); + } + return bn; +}; +const se_AllowedHeaders = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = __XmlNode.of(_AH, entry); + return n.n(_me); + }); +}; +const se_AllowedMethods = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = __XmlNode.of(_AM, entry); + return n.n(_me); + }); +}; +const se_AllowedOrigins = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = __XmlNode.of(_AO, entry); + return n.n(_me); + }); +}; +const se_AnalyticsAndOperator = (input, context) => { + const bn = new __XmlNode(_AAO); + bn.cc(input, _P); + bn.l(input, "Tags", "Tag", () => se_TagSet(input[_Tag], context)); + return bn; +}; +const se_AnalyticsConfiguration = (input, context) => { + const bn = new __XmlNode(_AC); + if (input[_I] != null) { + bn.c(__XmlNode.of(_AI, input[_I]).n(_I)); + } + if (input[_F] != null) { + bn.c(se_AnalyticsFilter(input[_F], context).n(_F)); + } + if (input[_SCA] != null) { + bn.c(se_StorageClassAnalysis(input[_SCA], context).n(_SCA)); + } + return bn; +}; +const se_AnalyticsExportDestination = (input, context) => { + const bn = new __XmlNode(_AED); + if (input[_SBD] != null) { + bn.c(se_AnalyticsS3BucketDestination(input[_SBD], context).n(_SBD)); + } + return bn; +}; +const se_AnalyticsFilter = (input, context) => { + const bn = new __XmlNode(_AF); + AnalyticsFilter.visit(input, { + Prefix: (value) => { + if (input[_P] != null) { + bn.c(__XmlNode.of(_P, value).n(_P)); + } + }, + Tag: (value) => { + if (input[_Ta] != null) { + bn.c(se_Tag(value, context).n(_Ta)); + } + }, + And: (value) => { + if (input[_A] != null) { + bn.c(se_AnalyticsAndOperator(value, context).n(_A)); + } + }, + _: (name, value) => { + if (!(value instanceof __XmlNode || value instanceof __XmlText)) { + throw new Error("Unable to serialize unknown union members in XML."); + } + bn.c(new __XmlNode(name).c(value)); + }, + }); + return bn; +}; +const se_AnalyticsS3BucketDestination = (input, context) => { + const bn = new __XmlNode(_ASBD); + if (input[_Fo] != null) { + bn.c(__XmlNode.of(_ASEFF, input[_Fo]).n(_Fo)); + } + if (input[_BAI] != null) { + bn.c(__XmlNode.of(_AIc, input[_BAI]).n(_BAI)); + } + if (input[_B] != null) { + bn.c(__XmlNode.of(_BN, input[_B]).n(_B)); + } + bn.cc(input, _P); + return bn; +}; +const se_BucketInfo = (input, context) => { + const bn = new __XmlNode(_BI); + bn.cc(input, _DR); + if (input[_Ty] != null) { + bn.c(__XmlNode.of(_BT, input[_Ty]).n(_Ty)); + } + return bn; +}; +const se_BucketLifecycleConfiguration = (input, context) => { + const bn = new __XmlNode(_BLC); + bn.l(input, "Rules", "Rule", () => se_LifecycleRules(input[_Rul], context)); + return bn; +}; +const se_BucketLoggingStatus = (input, context) => { + const bn = new __XmlNode(_BLS); + if (input[_LE] != null) { + bn.c(se_LoggingEnabled(input[_LE], context).n(_LE)); + } + return bn; +}; +const se_CompletedMultipartUpload = (input, context) => { + const bn = new __XmlNode(_CMU); + bn.l(input, "Parts", "Part", () => se_CompletedPartList(input[_Part], context)); + return bn; +}; +const se_CompletedPart = (input, context) => { + const bn = new __XmlNode(_CPo); + bn.cc(input, _ETa); + bn.cc(input, _CCRC); + bn.cc(input, _CCRCC); + bn.cc(input, _CCRCNVME); + bn.cc(input, _CSHA); + bn.cc(input, _CSHAh); + if (input[_PN] != null) { + bn.c(__XmlNode.of(_PN, String(input[_PN])).n(_PN)); + } + return bn; +}; +const se_CompletedPartList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = se_CompletedPart(entry, context); + return n.n(_me); + }); +}; +const se_Condition = (input, context) => { + const bn = new __XmlNode(_Con); + bn.cc(input, _HECRE); + bn.cc(input, _KPE); + return bn; +}; +const se_CORSConfiguration = (input, context) => { + const bn = new __XmlNode(_CORSC); + bn.l(input, "CORSRules", "CORSRule", () => se_CORSRules(input[_CORSRu], context)); + return bn; +}; +const se_CORSRule = (input, context) => { + const bn = new __XmlNode(_CORSR); + bn.cc(input, _ID_); + bn.l(input, "AllowedHeaders", "AllowedHeader", () => se_AllowedHeaders(input[_AHl], context)); + bn.l(input, "AllowedMethods", "AllowedMethod", () => se_AllowedMethods(input[_AMl], context)); + bn.l(input, "AllowedOrigins", "AllowedOrigin", () => se_AllowedOrigins(input[_AOl], context)); + bn.l(input, "ExposeHeaders", "ExposeHeader", () => se_ExposeHeaders(input[_EH], context)); + if (input[_MAS] != null) { + bn.c(__XmlNode.of(_MAS, String(input[_MAS])).n(_MAS)); + } + return bn; +}; +const se_CORSRules = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = se_CORSRule(entry, context); + return n.n(_me); + }); +}; +const se_CreateBucketConfiguration = (input, context) => { + const bn = new __XmlNode(_CBC); + if (input[_LC] != null) { + bn.c(__XmlNode.of(_BLCu, input[_LC]).n(_LC)); + } + if (input[_L] != null) { + bn.c(se_LocationInfo(input[_L], context).n(_L)); + } + if (input[_B] != null) { + bn.c(se_BucketInfo(input[_B], context).n(_B)); + } + return bn; +}; +const se_CSVInput = (input, context) => { + const bn = new __XmlNode(_CSVIn); + bn.cc(input, _FHI); + bn.cc(input, _Com); + bn.cc(input, _QEC); + bn.cc(input, _RD); + bn.cc(input, _FD); + bn.cc(input, _QCuo); + if (input[_AQRD] != null) { + bn.c(__XmlNode.of(_AQRD, String(input[_AQRD])).n(_AQRD)); + } + return bn; +}; +const se_CSVOutput = (input, context) => { + const bn = new __XmlNode(_CSVO); + bn.cc(input, _QF); + bn.cc(input, _QEC); + bn.cc(input, _RD); + bn.cc(input, _FD); + bn.cc(input, _QCuo); + return bn; +}; +const se_DefaultRetention = (input, context) => { + const bn = new __XmlNode(_DRe); + if (input[_Mo] != null) { + bn.c(__XmlNode.of(_OLRM, input[_Mo]).n(_Mo)); + } + if (input[_Da] != null) { + bn.c(__XmlNode.of(_Da, String(input[_Da])).n(_Da)); + } + if (input[_Y] != null) { + bn.c(__XmlNode.of(_Y, String(input[_Y])).n(_Y)); + } + return bn; +}; +const se_Delete = (input, context) => { + const bn = new __XmlNode(_Del); + bn.l(input, "Objects", "Object", () => se_ObjectIdentifierList(input[_Ob], context)); + if (input[_Q] != null) { + bn.c(__XmlNode.of(_Q, String(input[_Q])).n(_Q)); + } + return bn; +}; +const se_DeleteMarkerReplication = (input, context) => { + const bn = new __XmlNode(_DMR); + if (input[_S] != null) { + bn.c(__XmlNode.of(_DMRS, input[_S]).n(_S)); + } + return bn; +}; +const se_Destination = (input, context) => { + const bn = new __XmlNode(_Des); + if (input[_B] != null) { + bn.c(__XmlNode.of(_BN, input[_B]).n(_B)); + } + if (input[_Ac] != null) { + bn.c(__XmlNode.of(_AIc, input[_Ac]).n(_Ac)); + } + bn.cc(input, _SC); + if (input[_ACT] != null) { + bn.c(se_AccessControlTranslation(input[_ACT], context).n(_ACT)); + } + if (input[_ECn] != null) { + bn.c(se_EncryptionConfiguration(input[_ECn], context).n(_ECn)); + } + if (input[_RTe] != null) { + bn.c(se_ReplicationTime(input[_RTe], context).n(_RTe)); + } + if (input[_Me] != null) { + bn.c(se_Metrics(input[_Me], context).n(_Me)); + } + return bn; +}; +const se_Encryption = (input, context) => { + const bn = new __XmlNode(_En); + if (input[_ETn] != null) { + bn.c(__XmlNode.of(_SSE, input[_ETn]).n(_ETn)); + } + if (input[_KMSKI] != null) { + bn.c(__XmlNode.of(_SSEKMSKI, input[_KMSKI]).n(_KMSKI)); + } + bn.cc(input, _KMSC); + return bn; +}; +const se_EncryptionConfiguration = (input, context) => { + const bn = new __XmlNode(_ECn); + bn.cc(input, _RKKID); + return bn; +}; +const se_ErrorDocument = (input, context) => { + const bn = new __XmlNode(_ED); + if (input[_K] != null) { + bn.c(__XmlNode.of(_OK, input[_K]).n(_K)); + } + return bn; +}; +const se_EventBridgeConfiguration = (input, context) => { + const bn = new __XmlNode(_EBC); + return bn; +}; +const se_EventList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = __XmlNode.of(_Ev, entry); + return n.n(_me); + }); +}; +const se_ExistingObjectReplication = (input, context) => { + const bn = new __XmlNode(_EOR); + if (input[_S] != null) { + bn.c(__XmlNode.of(_EORS, input[_S]).n(_S)); + } + return bn; +}; +const se_ExposeHeaders = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = __XmlNode.of(_EHx, entry); + return n.n(_me); + }); +}; +const se_FilterRule = (input, context) => { + const bn = new __XmlNode(_FR); + if (input[_N] != null) { + bn.c(__XmlNode.of(_FRN, input[_N]).n(_N)); + } + if (input[_Va] != null) { + bn.c(__XmlNode.of(_FRV, input[_Va]).n(_Va)); + } + return bn; +}; +const se_FilterRuleList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = se_FilterRule(entry, context); + return n.n(_me); + }); +}; +const se_GlacierJobParameters = (input, context) => { + const bn = new __XmlNode(_GJP); + bn.cc(input, _Ti); + return bn; +}; +const se_Grant = (input, context) => { + const bn = new __XmlNode(_G); + if (input[_Gra] != null) { + const n = se_Grantee(input[_Gra], context).n(_Gra); + n.a("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance"); + bn.c(n); + } + bn.cc(input, _Pe); + return bn; +}; +const se_Grantee = (input, context) => { + const bn = new __XmlNode(_Gra); + bn.cc(input, _DN); + bn.cc(input, _EA); + bn.cc(input, _ID_); + bn.cc(input, _URI); + bn.a("xsi:type", input[_Ty]); + return bn; +}; +const se_Grants = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = se_Grant(entry, context); + return n.n(_G); + }); +}; +const se_IndexDocument = (input, context) => { + const bn = new __XmlNode(_ID); + bn.cc(input, _Su); + return bn; +}; +const se_InputSerialization = (input, context) => { + const bn = new __XmlNode(_IS); + if (input[_CSV] != null) { + bn.c(se_CSVInput(input[_CSV], context).n(_CSV)); + } + bn.cc(input, _CTom); + if (input[_JSON] != null) { + bn.c(se_JSONInput(input[_JSON], context).n(_JSON)); + } + if (input[_Parq] != null) { + bn.c(se_ParquetInput(input[_Parq], context).n(_Parq)); + } + return bn; +}; +const se_IntelligentTieringAndOperator = (input, context) => { + const bn = new __XmlNode(_ITAO); + bn.cc(input, _P); + bn.l(input, "Tags", "Tag", () => se_TagSet(input[_Tag], context)); + return bn; +}; +const se_IntelligentTieringConfiguration = (input, context) => { + const bn = new __XmlNode(_ITC); + if (input[_I] != null) { + bn.c(__XmlNode.of(_ITI, input[_I]).n(_I)); + } + if (input[_F] != null) { + bn.c(se_IntelligentTieringFilter(input[_F], context).n(_F)); + } + if (input[_S] != null) { + bn.c(__XmlNode.of(_ITS, input[_S]).n(_S)); + } + bn.l(input, "Tierings", "Tiering", () => se_TieringList(input[_Tie], context)); + return bn; +}; +const se_IntelligentTieringFilter = (input, context) => { + const bn = new __XmlNode(_ITF); + bn.cc(input, _P); + if (input[_Ta] != null) { + bn.c(se_Tag(input[_Ta], context).n(_Ta)); + } + if (input[_A] != null) { + bn.c(se_IntelligentTieringAndOperator(input[_A], context).n(_A)); + } + return bn; +}; +const se_InventoryConfiguration = (input, context) => { + const bn = new __XmlNode(_IC); + if (input[_Des] != null) { + bn.c(se_InventoryDestination(input[_Des], context).n(_Des)); + } + if (input[_IE] != null) { + bn.c(__XmlNode.of(_IE, String(input[_IE])).n(_IE)); + } + if (input[_F] != null) { + bn.c(se_InventoryFilter(input[_F], context).n(_F)); + } + if (input[_I] != null) { + bn.c(__XmlNode.of(_II, input[_I]).n(_I)); + } + if (input[_IOV] != null) { + bn.c(__XmlNode.of(_IIOV, input[_IOV]).n(_IOV)); + } + bn.lc(input, "OptionalFields", "OptionalFields", () => se_InventoryOptionalFields(input[_OF], context)); + if (input[_Sc] != null) { + bn.c(se_InventorySchedule(input[_Sc], context).n(_Sc)); + } + return bn; +}; +const se_InventoryDestination = (input, context) => { + const bn = new __XmlNode(_IDn); + if (input[_SBD] != null) { + bn.c(se_InventoryS3BucketDestination(input[_SBD], context).n(_SBD)); + } + return bn; +}; +const se_InventoryEncryption = (input, context) => { + const bn = new __XmlNode(_IEn); + if (input[_SSES] != null) { + bn.c(se_SSES3(input[_SSES], context).n(_SS)); + } + if (input[_SSEKMS] != null) { + bn.c(se_SSEKMS(input[_SSEKMS], context).n(_SK)); + } + return bn; +}; +const se_InventoryFilter = (input, context) => { + const bn = new __XmlNode(_IF); + bn.cc(input, _P); + return bn; +}; +const se_InventoryOptionalFields = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = __XmlNode.of(_IOF, entry); + return n.n(_Fi); + }); +}; +const se_InventoryS3BucketDestination = (input, context) => { + const bn = new __XmlNode(_ISBD); + bn.cc(input, _AIc); + if (input[_B] != null) { + bn.c(__XmlNode.of(_BN, input[_B]).n(_B)); + } + if (input[_Fo] != null) { + bn.c(__XmlNode.of(_IFn, input[_Fo]).n(_Fo)); + } + bn.cc(input, _P); + if (input[_En] != null) { + bn.c(se_InventoryEncryption(input[_En], context).n(_En)); + } + return bn; +}; +const se_InventorySchedule = (input, context) => { + const bn = new __XmlNode(_ISn); + if (input[_Fr] != null) { + bn.c(__XmlNode.of(_IFnv, input[_Fr]).n(_Fr)); + } + return bn; +}; +const se_JSONInput = (input, context) => { + const bn = new __XmlNode(_JSONI); + if (input[_Ty] != null) { + bn.c(__XmlNode.of(_JSONT, input[_Ty]).n(_Ty)); + } + return bn; +}; +const se_JSONOutput = (input, context) => { + const bn = new __XmlNode(_JSONO); + bn.cc(input, _RD); + return bn; +}; +const se_LambdaFunctionConfiguration = (input, context) => { + const bn = new __XmlNode(_LFCa); + if (input[_I] != null) { + bn.c(__XmlNode.of(_NI, input[_I]).n(_I)); + } + if (input[_LFA] != null) { + bn.c(__XmlNode.of(_LFA, input[_LFA]).n(_CF)); + } + bn.l(input, "Events", "Event", () => se_EventList(input[_Eve], context)); + if (input[_F] != null) { + bn.c(se_NotificationConfigurationFilter(input[_F], context).n(_F)); + } + return bn; +}; +const se_LambdaFunctionConfigurationList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = se_LambdaFunctionConfiguration(entry, context); + return n.n(_me); + }); +}; +const se_LifecycleExpiration = (input, context) => { + const bn = new __XmlNode(_LEi); + if (input[_Dat] != null) { + bn.c(__XmlNode.of(_Dat, __serializeDateTime(input[_Dat]).toString()).n(_Dat)); + } + if (input[_Da] != null) { + bn.c(__XmlNode.of(_Da, String(input[_Da])).n(_Da)); + } + if (input[_EODM] != null) { + bn.c(__XmlNode.of(_EODM, String(input[_EODM])).n(_EODM)); + } + return bn; +}; +const se_LifecycleRule = (input, context) => { + const bn = new __XmlNode(_LR); + if (input[_Exp] != null) { + bn.c(se_LifecycleExpiration(input[_Exp], context).n(_Exp)); + } + bn.cc(input, _ID_); + bn.cc(input, _P); + if (input[_F] != null) { + bn.c(se_LifecycleRuleFilter(input[_F], context).n(_F)); + } + if (input[_S] != null) { + bn.c(__XmlNode.of(_ESx, input[_S]).n(_S)); + } + bn.l(input, "Transitions", "Transition", () => se_TransitionList(input[_Tr], context)); + bn.l(input, "NoncurrentVersionTransitions", "NoncurrentVersionTransition", () => se_NoncurrentVersionTransitionList(input[_NVT], context)); + if (input[_NVE] != null) { + bn.c(se_NoncurrentVersionExpiration(input[_NVE], context).n(_NVE)); + } + if (input[_AIMU] != null) { + bn.c(se_AbortIncompleteMultipartUpload(input[_AIMU], context).n(_AIMU)); + } + return bn; +}; +const se_LifecycleRuleAndOperator = (input, context) => { + const bn = new __XmlNode(_LRAO); + bn.cc(input, _P); + bn.l(input, "Tags", "Tag", () => se_TagSet(input[_Tag], context)); + if (input[_OSGT] != null) { + bn.c(__XmlNode.of(_OSGTB, String(input[_OSGT])).n(_OSGT)); + } + if (input[_OSLT] != null) { + bn.c(__XmlNode.of(_OSLTB, String(input[_OSLT])).n(_OSLT)); + } + return bn; +}; +const se_LifecycleRuleFilter = (input, context) => { + const bn = new __XmlNode(_LRF); + bn.cc(input, _P); + if (input[_Ta] != null) { + bn.c(se_Tag(input[_Ta], context).n(_Ta)); + } + if (input[_OSGT] != null) { + bn.c(__XmlNode.of(_OSGTB, String(input[_OSGT])).n(_OSGT)); + } + if (input[_OSLT] != null) { + bn.c(__XmlNode.of(_OSLTB, String(input[_OSLT])).n(_OSLT)); + } + if (input[_A] != null) { + bn.c(se_LifecycleRuleAndOperator(input[_A], context).n(_A)); + } + return bn; +}; +const se_LifecycleRules = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = se_LifecycleRule(entry, context); + return n.n(_me); + }); +}; +const se_LocationInfo = (input, context) => { + const bn = new __XmlNode(_LI); + if (input[_Ty] != null) { + bn.c(__XmlNode.of(_LT, input[_Ty]).n(_Ty)); + } + if (input[_N] != null) { + bn.c(__XmlNode.of(_LNAS, input[_N]).n(_N)); + } + return bn; +}; +const se_LoggingEnabled = (input, context) => { + const bn = new __XmlNode(_LE); + bn.cc(input, _TB); + bn.lc(input, "TargetGrants", "TargetGrants", () => se_TargetGrants(input[_TG], context)); + bn.cc(input, _TP); + if (input[_TOKF] != null) { + bn.c(se_TargetObjectKeyFormat(input[_TOKF], context).n(_TOKF)); + } + return bn; +}; +const se_MetadataEntry = (input, context) => { + const bn = new __XmlNode(_ME); + if (input[_N] != null) { + bn.c(__XmlNode.of(_MKe, input[_N]).n(_N)); + } + if (input[_Va] != null) { + bn.c(__XmlNode.of(_MV, input[_Va]).n(_Va)); + } + return bn; +}; +const se_MetadataTableConfiguration = (input, context) => { + const bn = new __XmlNode(_MTC); + if (input[_STD] != null) { + bn.c(se_S3TablesDestination(input[_STD], context).n(_STD)); + } + return bn; +}; +const se_Metrics = (input, context) => { + const bn = new __XmlNode(_Me); + if (input[_S] != null) { + bn.c(__XmlNode.of(_MS, input[_S]).n(_S)); + } + if (input[_ETv] != null) { + bn.c(se_ReplicationTimeValue(input[_ETv], context).n(_ETv)); + } + return bn; +}; +const se_MetricsAndOperator = (input, context) => { + const bn = new __XmlNode(_MAO); + bn.cc(input, _P); + bn.l(input, "Tags", "Tag", () => se_TagSet(input[_Tag], context)); + bn.cc(input, _APAc); + return bn; +}; +const se_MetricsConfiguration = (input, context) => { + const bn = new __XmlNode(_MC); + if (input[_I] != null) { + bn.c(__XmlNode.of(_MI, input[_I]).n(_I)); + } + if (input[_F] != null) { + bn.c(se_MetricsFilter(input[_F], context).n(_F)); + } + return bn; +}; +const se_MetricsFilter = (input, context) => { + const bn = new __XmlNode(_MF); + MetricsFilter.visit(input, { + Prefix: (value) => { + if (input[_P] != null) { + bn.c(__XmlNode.of(_P, value).n(_P)); + } + }, + Tag: (value) => { + if (input[_Ta] != null) { + bn.c(se_Tag(value, context).n(_Ta)); + } + }, + AccessPointArn: (value) => { + if (input[_APAc] != null) { + bn.c(__XmlNode.of(_APAc, value).n(_APAc)); + } + }, + And: (value) => { + if (input[_A] != null) { + bn.c(se_MetricsAndOperator(value, context).n(_A)); + } + }, + _: (name, value) => { + if (!(value instanceof __XmlNode || value instanceof __XmlText)) { + throw new Error("Unable to serialize unknown union members in XML."); + } + bn.c(new __XmlNode(name).c(value)); + }, + }); + return bn; +}; +const se_NoncurrentVersionExpiration = (input, context) => { + const bn = new __XmlNode(_NVE); + if (input[_ND] != null) { + bn.c(__XmlNode.of(_Da, String(input[_ND])).n(_ND)); + } + if (input[_NNV] != null) { + bn.c(__XmlNode.of(_VC, String(input[_NNV])).n(_NNV)); + } + return bn; +}; +const se_NoncurrentVersionTransition = (input, context) => { + const bn = new __XmlNode(_NVTo); + if (input[_ND] != null) { + bn.c(__XmlNode.of(_Da, String(input[_ND])).n(_ND)); + } + if (input[_SC] != null) { + bn.c(__XmlNode.of(_TSC, input[_SC]).n(_SC)); + } + if (input[_NNV] != null) { + bn.c(__XmlNode.of(_VC, String(input[_NNV])).n(_NNV)); + } + return bn; +}; +const se_NoncurrentVersionTransitionList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = se_NoncurrentVersionTransition(entry, context); + return n.n(_me); + }); +}; +const se_NotificationConfiguration = (input, context) => { + const bn = new __XmlNode(_NC); + bn.l(input, "TopicConfigurations", "TopicConfiguration", () => se_TopicConfigurationList(input[_TCop], context)); + bn.l(input, "QueueConfigurations", "QueueConfiguration", () => se_QueueConfigurationList(input[_QCu], context)); + bn.l(input, "LambdaFunctionConfigurations", "CloudFunctionConfiguration", () => se_LambdaFunctionConfigurationList(input[_LFC], context)); + if (input[_EBC] != null) { + bn.c(se_EventBridgeConfiguration(input[_EBC], context).n(_EBC)); + } + return bn; +}; +const se_NotificationConfigurationFilter = (input, context) => { + const bn = new __XmlNode(_NCF); + if (input[_K] != null) { + bn.c(se_S3KeyFilter(input[_K], context).n(_SKe)); + } + return bn; +}; +const se_ObjectIdentifier = (input, context) => { + const bn = new __XmlNode(_OI); + if (input[_K] != null) { + bn.c(__XmlNode.of(_OK, input[_K]).n(_K)); + } + if (input[_VI] != null) { + bn.c(__XmlNode.of(_OVI, input[_VI]).n(_VI)); + } + bn.cc(input, _ETa); + if (input[_LMT] != null) { + bn.c(__XmlNode.of(_LMT, __dateToUtcString(input[_LMT]).toString()).n(_LMT)); + } + if (input[_Si] != null) { + bn.c(__XmlNode.of(_Si, String(input[_Si])).n(_Si)); + } + return bn; +}; +const se_ObjectIdentifierList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = se_ObjectIdentifier(entry, context); + return n.n(_me); + }); +}; +const se_ObjectLockConfiguration = (input, context) => { + const bn = new __XmlNode(_OLC); + bn.cc(input, _OLE); + if (input[_Ru] != null) { + bn.c(se_ObjectLockRule(input[_Ru], context).n(_Ru)); + } + return bn; +}; +const se_ObjectLockLegalHold = (input, context) => { + const bn = new __XmlNode(_OLLH); + if (input[_S] != null) { + bn.c(__XmlNode.of(_OLLHS, input[_S]).n(_S)); + } + return bn; +}; +const se_ObjectLockRetention = (input, context) => { + const bn = new __XmlNode(_OLR); + if (input[_Mo] != null) { + bn.c(__XmlNode.of(_OLRM, input[_Mo]).n(_Mo)); + } + if (input[_RUD] != null) { + bn.c(__XmlNode.of(_Dat, __serializeDateTime(input[_RUD]).toString()).n(_RUD)); + } + return bn; +}; +const se_ObjectLockRule = (input, context) => { + const bn = new __XmlNode(_OLRb); + if (input[_DRe] != null) { + bn.c(se_DefaultRetention(input[_DRe], context).n(_DRe)); + } + return bn; +}; +const se_OutputLocation = (input, context) => { + const bn = new __XmlNode(_OL); + if (input[_S_] != null) { + bn.c(se_S3Location(input[_S_], context).n(_S_)); + } + return bn; +}; +const se_OutputSerialization = (input, context) => { + const bn = new __XmlNode(_OS); + if (input[_CSV] != null) { + bn.c(se_CSVOutput(input[_CSV], context).n(_CSV)); + } + if (input[_JSON] != null) { + bn.c(se_JSONOutput(input[_JSON], context).n(_JSON)); + } + return bn; +}; +const se_Owner = (input, context) => { + const bn = new __XmlNode(_O); + bn.cc(input, _DN); + bn.cc(input, _ID_); + return bn; +}; +const se_OwnershipControls = (input, context) => { + const bn = new __XmlNode(_OC); + bn.l(input, "Rules", "Rule", () => se_OwnershipControlsRules(input[_Rul], context)); + return bn; +}; +const se_OwnershipControlsRule = (input, context) => { + const bn = new __XmlNode(_OCR); + bn.cc(input, _OO); + return bn; +}; +const se_OwnershipControlsRules = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = se_OwnershipControlsRule(entry, context); + return n.n(_me); + }); +}; +const se_ParquetInput = (input, context) => { + const bn = new __XmlNode(_PI); + return bn; +}; +const se_PartitionedPrefix = (input, context) => { + const bn = new __XmlNode(_PP); + bn.cc(input, _PDS); + return bn; +}; +const se_PublicAccessBlockConfiguration = (input, context) => { + const bn = new __XmlNode(_PABC); + if (input[_BPA] != null) { + bn.c(__XmlNode.of(_Se, String(input[_BPA])).n(_BPA)); + } + if (input[_IPA] != null) { + bn.c(__XmlNode.of(_Se, String(input[_IPA])).n(_IPA)); + } + if (input[_BPP] != null) { + bn.c(__XmlNode.of(_Se, String(input[_BPP])).n(_BPP)); + } + if (input[_RPB] != null) { + bn.c(__XmlNode.of(_Se, String(input[_RPB])).n(_RPB)); + } + return bn; +}; +const se_QueueConfiguration = (input, context) => { + const bn = new __XmlNode(_QC); + if (input[_I] != null) { + bn.c(__XmlNode.of(_NI, input[_I]).n(_I)); + } + if (input[_QA] != null) { + bn.c(__XmlNode.of(_QA, input[_QA]).n(_Qu)); + } + bn.l(input, "Events", "Event", () => se_EventList(input[_Eve], context)); + if (input[_F] != null) { + bn.c(se_NotificationConfigurationFilter(input[_F], context).n(_F)); + } + return bn; +}; +const se_QueueConfigurationList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = se_QueueConfiguration(entry, context); + return n.n(_me); + }); +}; +const se_Redirect = (input, context) => { + const bn = new __XmlNode(_Red); + bn.cc(input, _HN); + bn.cc(input, _HRC); + bn.cc(input, _Pr); + bn.cc(input, _RKPW); + bn.cc(input, _RKW); + return bn; +}; +const se_RedirectAllRequestsTo = (input, context) => { + const bn = new __XmlNode(_RART); + bn.cc(input, _HN); + bn.cc(input, _Pr); + return bn; +}; +const se_ReplicaModifications = (input, context) => { + const bn = new __XmlNode(_RM); + if (input[_S] != null) { + bn.c(__XmlNode.of(_RMS, input[_S]).n(_S)); + } + return bn; +}; +const se_ReplicationConfiguration = (input, context) => { + const bn = new __XmlNode(_RCe); + bn.cc(input, _Ro); + bn.l(input, "Rules", "Rule", () => se_ReplicationRules(input[_Rul], context)); + return bn; +}; +const se_ReplicationRule = (input, context) => { + const bn = new __XmlNode(_RRe); + bn.cc(input, _ID_); + if (input[_Pri] != null) { + bn.c(__XmlNode.of(_Pri, String(input[_Pri])).n(_Pri)); + } + bn.cc(input, _P); + if (input[_F] != null) { + bn.c(se_ReplicationRuleFilter(input[_F], context).n(_F)); + } + if (input[_S] != null) { + bn.c(__XmlNode.of(_RRS, input[_S]).n(_S)); + } + if (input[_SSC] != null) { + bn.c(se_SourceSelectionCriteria(input[_SSC], context).n(_SSC)); + } + if (input[_EOR] != null) { + bn.c(se_ExistingObjectReplication(input[_EOR], context).n(_EOR)); + } + if (input[_Des] != null) { + bn.c(se_Destination(input[_Des], context).n(_Des)); + } + if (input[_DMR] != null) { + bn.c(se_DeleteMarkerReplication(input[_DMR], context).n(_DMR)); + } + return bn; +}; +const se_ReplicationRuleAndOperator = (input, context) => { + const bn = new __XmlNode(_RRAO); + bn.cc(input, _P); + bn.l(input, "Tags", "Tag", () => se_TagSet(input[_Tag], context)); + return bn; +}; +const se_ReplicationRuleFilter = (input, context) => { + const bn = new __XmlNode(_RRF); + bn.cc(input, _P); + if (input[_Ta] != null) { + bn.c(se_Tag(input[_Ta], context).n(_Ta)); + } + if (input[_A] != null) { + bn.c(se_ReplicationRuleAndOperator(input[_A], context).n(_A)); + } + return bn; +}; +const se_ReplicationRules = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = se_ReplicationRule(entry, context); + return n.n(_me); + }); +}; +const se_ReplicationTime = (input, context) => { + const bn = new __XmlNode(_RTe); + if (input[_S] != null) { + bn.c(__XmlNode.of(_RTS, input[_S]).n(_S)); + } + if (input[_Tim] != null) { + bn.c(se_ReplicationTimeValue(input[_Tim], context).n(_Tim)); + } + return bn; +}; +const se_ReplicationTimeValue = (input, context) => { + const bn = new __XmlNode(_RTV); + if (input[_Mi] != null) { + bn.c(__XmlNode.of(_Mi, String(input[_Mi])).n(_Mi)); + } + return bn; +}; +const se_RequestPaymentConfiguration = (input, context) => { + const bn = new __XmlNode(_RPC); + bn.cc(input, _Pa); + return bn; +}; +const se_RequestProgress = (input, context) => { + const bn = new __XmlNode(_RPe); + if (input[_Ena] != null) { + bn.c(__XmlNode.of(_ERP, String(input[_Ena])).n(_Ena)); + } + return bn; +}; +const se_RestoreRequest = (input, context) => { + const bn = new __XmlNode(_RRes); + if (input[_Da] != null) { + bn.c(__XmlNode.of(_Da, String(input[_Da])).n(_Da)); + } + if (input[_GJP] != null) { + bn.c(se_GlacierJobParameters(input[_GJP], context).n(_GJP)); + } + if (input[_Ty] != null) { + bn.c(__XmlNode.of(_RRT, input[_Ty]).n(_Ty)); + } + bn.cc(input, _Ti); + bn.cc(input, _Desc); + if (input[_SP] != null) { + bn.c(se_SelectParameters(input[_SP], context).n(_SP)); + } + if (input[_OL] != null) { + bn.c(se_OutputLocation(input[_OL], context).n(_OL)); + } + return bn; +}; +const se_RoutingRule = (input, context) => { + const bn = new __XmlNode(_RRou); + if (input[_Con] != null) { + bn.c(se_Condition(input[_Con], context).n(_Con)); + } + if (input[_Red] != null) { + bn.c(se_Redirect(input[_Red], context).n(_Red)); + } + return bn; +}; +const se_RoutingRules = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = se_RoutingRule(entry, context); + return n.n(_RRou); + }); +}; +const se_S3KeyFilter = (input, context) => { + const bn = new __XmlNode(_SKF); + bn.l(input, "FilterRules", "FilterRule", () => se_FilterRuleList(input[_FRi], context)); + return bn; +}; +const se_S3Location = (input, context) => { + const bn = new __XmlNode(_SL); + bn.cc(input, _BN); + if (input[_P] != null) { + bn.c(__XmlNode.of(_LP, input[_P]).n(_P)); + } + if (input[_En] != null) { + bn.c(se_Encryption(input[_En], context).n(_En)); + } + if (input[_CACL] != null) { + bn.c(__XmlNode.of(_OCACL, input[_CACL]).n(_CACL)); + } + bn.lc(input, "AccessControlList", "AccessControlList", () => se_Grants(input[_ACLc], context)); + if (input[_T] != null) { + bn.c(se_Tagging(input[_T], context).n(_T)); + } + bn.lc(input, "UserMetadata", "UserMetadata", () => se_UserMetadata(input[_UM], context)); + bn.cc(input, _SC); + return bn; +}; +const se_S3TablesDestination = (input, context) => { + const bn = new __XmlNode(_STD); + if (input[_TBA] != null) { + bn.c(__XmlNode.of(_STBA, input[_TBA]).n(_TBA)); + } + if (input[_TN] != null) { + bn.c(__XmlNode.of(_STN, input[_TN]).n(_TN)); + } + return bn; +}; +const se_ScanRange = (input, context) => { + const bn = new __XmlNode(_SR); + if (input[_St] != null) { + bn.c(__XmlNode.of(_St, String(input[_St])).n(_St)); + } + if (input[_End] != null) { + bn.c(__XmlNode.of(_End, String(input[_End])).n(_End)); + } + return bn; +}; +const se_SelectParameters = (input, context) => { + const bn = new __XmlNode(_SP); + if (input[_IS] != null) { + bn.c(se_InputSerialization(input[_IS], context).n(_IS)); + } + bn.cc(input, _ETx); + bn.cc(input, _Ex); + if (input[_OS] != null) { + bn.c(se_OutputSerialization(input[_OS], context).n(_OS)); + } + return bn; +}; +const se_ServerSideEncryptionByDefault = (input, context) => { + const bn = new __XmlNode(_SSEBD); + if (input[_SSEA] != null) { + bn.c(__XmlNode.of(_SSE, input[_SSEA]).n(_SSEA)); + } + if (input[_KMSMKID] != null) { + bn.c(__XmlNode.of(_SSEKMSKI, input[_KMSMKID]).n(_KMSMKID)); + } + return bn; +}; +const se_ServerSideEncryptionConfiguration = (input, context) => { + const bn = new __XmlNode(_SSEC); + bn.l(input, "Rules", "Rule", () => se_ServerSideEncryptionRules(input[_Rul], context)); + return bn; +}; +const se_ServerSideEncryptionRule = (input, context) => { + const bn = new __XmlNode(_SSER); + if (input[_ASSEBD] != null) { + bn.c(se_ServerSideEncryptionByDefault(input[_ASSEBD], context).n(_ASSEBD)); + } + if (input[_BKE] != null) { + bn.c(__XmlNode.of(_BKE, String(input[_BKE])).n(_BKE)); + } + return bn; +}; +const se_ServerSideEncryptionRules = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = se_ServerSideEncryptionRule(entry, context); + return n.n(_me); + }); +}; +const se_SimplePrefix = (input, context) => { + const bn = new __XmlNode(_SPi); + return bn; +}; +const se_SourceSelectionCriteria = (input, context) => { + const bn = new __XmlNode(_SSC); + if (input[_SKEO] != null) { + bn.c(se_SseKmsEncryptedObjects(input[_SKEO], context).n(_SKEO)); + } + if (input[_RM] != null) { + bn.c(se_ReplicaModifications(input[_RM], context).n(_RM)); + } + return bn; +}; +const se_SSEKMS = (input, context) => { + const bn = new __XmlNode(_SK); + if (input[_KI] != null) { + bn.c(__XmlNode.of(_SSEKMSKI, input[_KI]).n(_KI)); + } + return bn; +}; +const se_SseKmsEncryptedObjects = (input, context) => { + const bn = new __XmlNode(_SKEO); + if (input[_S] != null) { + bn.c(__XmlNode.of(_SKEOS, input[_S]).n(_S)); + } + return bn; +}; +const se_SSES3 = (input, context) => { + const bn = new __XmlNode(_SS); + return bn; +}; +const se_StorageClassAnalysis = (input, context) => { + const bn = new __XmlNode(_SCA); + if (input[_DE] != null) { + bn.c(se_StorageClassAnalysisDataExport(input[_DE], context).n(_DE)); + } + return bn; +}; +const se_StorageClassAnalysisDataExport = (input, context) => { + const bn = new __XmlNode(_SCADE); + if (input[_OSV] != null) { + bn.c(__XmlNode.of(_SCASV, input[_OSV]).n(_OSV)); + } + if (input[_Des] != null) { + bn.c(se_AnalyticsExportDestination(input[_Des], context).n(_Des)); + } + return bn; +}; +const se_Tag = (input, context) => { + const bn = new __XmlNode(_Ta); + if (input[_K] != null) { + bn.c(__XmlNode.of(_OK, input[_K]).n(_K)); + } + bn.cc(input, _Va); + return bn; +}; +const se_Tagging = (input, context) => { + const bn = new __XmlNode(_T); + bn.lc(input, "TagSet", "TagSet", () => se_TagSet(input[_TS], context)); + return bn; +}; +const se_TagSet = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = se_Tag(entry, context); + return n.n(_Ta); + }); +}; +const se_TargetGrant = (input, context) => { + const bn = new __XmlNode(_TGa); + if (input[_Gra] != null) { + const n = se_Grantee(input[_Gra], context).n(_Gra); + n.a("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance"); + bn.c(n); + } + if (input[_Pe] != null) { + bn.c(__XmlNode.of(_BLP, input[_Pe]).n(_Pe)); + } + return bn; +}; +const se_TargetGrants = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = se_TargetGrant(entry, context); + return n.n(_G); + }); +}; +const se_TargetObjectKeyFormat = (input, context) => { + const bn = new __XmlNode(_TOKF); + if (input[_SPi] != null) { + bn.c(se_SimplePrefix(input[_SPi], context).n(_SPi)); + } + if (input[_PP] != null) { + bn.c(se_PartitionedPrefix(input[_PP], context).n(_PP)); + } + return bn; +}; +const se_Tiering = (input, context) => { + const bn = new __XmlNode(_Tier); + if (input[_Da] != null) { + bn.c(__XmlNode.of(_ITD, String(input[_Da])).n(_Da)); + } + if (input[_AT] != null) { + bn.c(__XmlNode.of(_ITAT, input[_AT]).n(_AT)); + } + return bn; +}; +const se_TieringList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = se_Tiering(entry, context); + return n.n(_me); + }); +}; +const se_TopicConfiguration = (input, context) => { + const bn = new __XmlNode(_TCo); + if (input[_I] != null) { + bn.c(__XmlNode.of(_NI, input[_I]).n(_I)); + } + if (input[_TA] != null) { + bn.c(__XmlNode.of(_TA, input[_TA]).n(_Top)); + } + bn.l(input, "Events", "Event", () => se_EventList(input[_Eve], context)); + if (input[_F] != null) { + bn.c(se_NotificationConfigurationFilter(input[_F], context).n(_F)); + } + return bn; +}; +const se_TopicConfigurationList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = se_TopicConfiguration(entry, context); + return n.n(_me); + }); +}; +const se_Transition = (input, context) => { + const bn = new __XmlNode(_Tra); + if (input[_Dat] != null) { + bn.c(__XmlNode.of(_Dat, __serializeDateTime(input[_Dat]).toString()).n(_Dat)); + } + if (input[_Da] != null) { + bn.c(__XmlNode.of(_Da, String(input[_Da])).n(_Da)); + } + if (input[_SC] != null) { + bn.c(__XmlNode.of(_TSC, input[_SC]).n(_SC)); + } + return bn; +}; +const se_TransitionList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = se_Transition(entry, context); + return n.n(_me); + }); +}; +const se_UserMetadata = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + const n = se_MetadataEntry(entry, context); + return n.n(_ME); + }); +}; +const se_VersioningConfiguration = (input, context) => { + const bn = new __XmlNode(_VCe); + if (input[_MFAD] != null) { + bn.c(__XmlNode.of(_MFAD, input[_MFAD]).n(_MDf)); + } + if (input[_S] != null) { + bn.c(__XmlNode.of(_BVS, input[_S]).n(_S)); + } + return bn; +}; +const se_WebsiteConfiguration = (input, context) => { + const bn = new __XmlNode(_WC); + if (input[_ED] != null) { + bn.c(se_ErrorDocument(input[_ED], context).n(_ED)); + } + if (input[_ID] != null) { + bn.c(se_IndexDocument(input[_ID], context).n(_ID)); + } + if (input[_RART] != null) { + bn.c(se_RedirectAllRequestsTo(input[_RART], context).n(_RART)); + } + bn.lc(input, "RoutingRules", "RoutingRules", () => se_RoutingRules(input[_RRo], context)); + return bn; +}; +const de_AbortIncompleteMultipartUpload = (output, context) => { + const contents = {}; + if (output[_DAI] != null) { + contents[_DAI] = __strictParseInt32(output[_DAI]); + } + return contents; +}; +const de_AccessControlTranslation = (output, context) => { + const contents = {}; + if (output[_O] != null) { + contents[_O] = __expectString(output[_O]); + } + return contents; +}; +const de_AllowedHeaders = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return __expectString(entry); + }); +}; +const de_AllowedMethods = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return __expectString(entry); + }); +}; +const de_AllowedOrigins = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return __expectString(entry); + }); +}; +const de_AnalyticsAndOperator = (output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = __expectString(output[_P]); + } + if (output.Tag === "") { + contents[_Tag] = []; + } + else if (output[_Ta] != null) { + contents[_Tag] = de_TagSet(__getArrayIfSingleItem(output[_Ta]), context); + } + return contents; +}; +const de_AnalyticsConfiguration = (output, context) => { + const contents = {}; + if (output[_I] != null) { + contents[_I] = __expectString(output[_I]); + } + if (output.Filter === "") { + } + else if (output[_F] != null) { + contents[_F] = de_AnalyticsFilter(__expectUnion(output[_F]), context); + } + if (output[_SCA] != null) { + contents[_SCA] = de_StorageClassAnalysis(output[_SCA], context); + } + return contents; +}; +const de_AnalyticsConfigurationList = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_AnalyticsConfiguration(entry, context); + }); +}; +const de_AnalyticsExportDestination = (output, context) => { + const contents = {}; + if (output[_SBD] != null) { + contents[_SBD] = de_AnalyticsS3BucketDestination(output[_SBD], context); + } + return contents; +}; +const de_AnalyticsFilter = (output, context) => { + if (output[_P] != null) { + return { + Prefix: __expectString(output[_P]), + }; + } + if (output[_Ta] != null) { + return { + Tag: de_Tag(output[_Ta], context), + }; + } + if (output[_A] != null) { + return { + And: de_AnalyticsAndOperator(output[_A], context), + }; + } + return { $unknown: Object.entries(output)[0] }; +}; +const de_AnalyticsS3BucketDestination = (output, context) => { + const contents = {}; + if (output[_Fo] != null) { + contents[_Fo] = __expectString(output[_Fo]); + } + if (output[_BAI] != null) { + contents[_BAI] = __expectString(output[_BAI]); + } + if (output[_B] != null) { + contents[_B] = __expectString(output[_B]); + } + if (output[_P] != null) { + contents[_P] = __expectString(output[_P]); + } + return contents; +}; +const de_Bucket = (output, context) => { + const contents = {}; + if (output[_N] != null) { + contents[_N] = __expectString(output[_N]); + } + if (output[_CDr] != null) { + contents[_CDr] = __expectNonNull(__parseRfc3339DateTimeWithOffset(output[_CDr])); + } + if (output[_BR] != null) { + contents[_BR] = __expectString(output[_BR]); + } + return contents; +}; +const de_Buckets = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_Bucket(entry, context); + }); +}; +const de_Checksum = (output, context) => { + const contents = {}; + if (output[_CCRC] != null) { + contents[_CCRC] = __expectString(output[_CCRC]); + } + if (output[_CCRCC] != null) { + contents[_CCRCC] = __expectString(output[_CCRCC]); + } + if (output[_CCRCNVME] != null) { + contents[_CCRCNVME] = __expectString(output[_CCRCNVME]); + } + if (output[_CSHA] != null) { + contents[_CSHA] = __expectString(output[_CSHA]); + } + if (output[_CSHAh] != null) { + contents[_CSHAh] = __expectString(output[_CSHAh]); + } + if (output[_CT] != null) { + contents[_CT] = __expectString(output[_CT]); + } + return contents; +}; +const de_ChecksumAlgorithmList = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return __expectString(entry); + }); +}; +const de_CommonPrefix = (output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = __expectString(output[_P]); + } + return contents; +}; +const de_CommonPrefixList = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_CommonPrefix(entry, context); + }); +}; +const de_Condition = (output, context) => { + const contents = {}; + if (output[_HECRE] != null) { + contents[_HECRE] = __expectString(output[_HECRE]); + } + if (output[_KPE] != null) { + contents[_KPE] = __expectString(output[_KPE]); + } + return contents; +}; +const de_ContinuationEvent = (output, context) => { + const contents = {}; + return contents; +}; +const de_CopyObjectResult = (output, context) => { + const contents = {}; + if (output[_ETa] != null) { + contents[_ETa] = __expectString(output[_ETa]); + } + if (output[_LM] != null) { + contents[_LM] = __expectNonNull(__parseRfc3339DateTimeWithOffset(output[_LM])); + } + if (output[_CT] != null) { + contents[_CT] = __expectString(output[_CT]); + } + if (output[_CCRC] != null) { + contents[_CCRC] = __expectString(output[_CCRC]); + } + if (output[_CCRCC] != null) { + contents[_CCRCC] = __expectString(output[_CCRCC]); + } + if (output[_CCRCNVME] != null) { + contents[_CCRCNVME] = __expectString(output[_CCRCNVME]); + } + if (output[_CSHA] != null) { + contents[_CSHA] = __expectString(output[_CSHA]); + } + if (output[_CSHAh] != null) { + contents[_CSHAh] = __expectString(output[_CSHAh]); + } + return contents; +}; +const de_CopyPartResult = (output, context) => { + const contents = {}; + if (output[_ETa] != null) { + contents[_ETa] = __expectString(output[_ETa]); + } + if (output[_LM] != null) { + contents[_LM] = __expectNonNull(__parseRfc3339DateTimeWithOffset(output[_LM])); + } + if (output[_CCRC] != null) { + contents[_CCRC] = __expectString(output[_CCRC]); + } + if (output[_CCRCC] != null) { + contents[_CCRCC] = __expectString(output[_CCRCC]); + } + if (output[_CCRCNVME] != null) { + contents[_CCRCNVME] = __expectString(output[_CCRCNVME]); + } + if (output[_CSHA] != null) { + contents[_CSHA] = __expectString(output[_CSHA]); + } + if (output[_CSHAh] != null) { + contents[_CSHAh] = __expectString(output[_CSHAh]); + } + return contents; +}; +const de_CORSRule = (output, context) => { + const contents = {}; + if (output[_ID_] != null) { + contents[_ID_] = __expectString(output[_ID_]); + } + if (output.AllowedHeader === "") { + contents[_AHl] = []; + } + else if (output[_AH] != null) { + contents[_AHl] = de_AllowedHeaders(__getArrayIfSingleItem(output[_AH]), context); + } + if (output.AllowedMethod === "") { + contents[_AMl] = []; + } + else if (output[_AM] != null) { + contents[_AMl] = de_AllowedMethods(__getArrayIfSingleItem(output[_AM]), context); + } + if (output.AllowedOrigin === "") { + contents[_AOl] = []; + } + else if (output[_AO] != null) { + contents[_AOl] = de_AllowedOrigins(__getArrayIfSingleItem(output[_AO]), context); + } + if (output.ExposeHeader === "") { + contents[_EH] = []; + } + else if (output[_EHx] != null) { + contents[_EH] = de_ExposeHeaders(__getArrayIfSingleItem(output[_EHx]), context); + } + if (output[_MAS] != null) { + contents[_MAS] = __strictParseInt32(output[_MAS]); + } + return contents; +}; +const de_CORSRules = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_CORSRule(entry, context); + }); +}; +const de_DefaultRetention = (output, context) => { + const contents = {}; + if (output[_Mo] != null) { + contents[_Mo] = __expectString(output[_Mo]); + } + if (output[_Da] != null) { + contents[_Da] = __strictParseInt32(output[_Da]); + } + if (output[_Y] != null) { + contents[_Y] = __strictParseInt32(output[_Y]); + } + return contents; +}; +const de_DeletedObject = (output, context) => { + const contents = {}; + if (output[_K] != null) { + contents[_K] = __expectString(output[_K]); + } + if (output[_VI] != null) { + contents[_VI] = __expectString(output[_VI]); + } + if (output[_DM] != null) { + contents[_DM] = __parseBoolean(output[_DM]); + } + if (output[_DMVI] != null) { + contents[_DMVI] = __expectString(output[_DMVI]); + } + return contents; +}; +const de_DeletedObjects = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_DeletedObject(entry, context); + }); +}; +const de_DeleteMarkerEntry = (output, context) => { + const contents = {}; + if (output[_O] != null) { + contents[_O] = de_Owner(output[_O], context); + } + if (output[_K] != null) { + contents[_K] = __expectString(output[_K]); + } + if (output[_VI] != null) { + contents[_VI] = __expectString(output[_VI]); + } + if (output[_IL] != null) { + contents[_IL] = __parseBoolean(output[_IL]); + } + if (output[_LM] != null) { + contents[_LM] = __expectNonNull(__parseRfc3339DateTimeWithOffset(output[_LM])); + } + return contents; +}; +const de_DeleteMarkerReplication = (output, context) => { + const contents = {}; + if (output[_S] != null) { + contents[_S] = __expectString(output[_S]); + } + return contents; +}; +const de_DeleteMarkers = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_DeleteMarkerEntry(entry, context); + }); +}; +const de_Destination = (output, context) => { + const contents = {}; + if (output[_B] != null) { + contents[_B] = __expectString(output[_B]); + } + if (output[_Ac] != null) { + contents[_Ac] = __expectString(output[_Ac]); + } + if (output[_SC] != null) { + contents[_SC] = __expectString(output[_SC]); + } + if (output[_ACT] != null) { + contents[_ACT] = de_AccessControlTranslation(output[_ACT], context); + } + if (output[_ECn] != null) { + contents[_ECn] = de_EncryptionConfiguration(output[_ECn], context); + } + if (output[_RTe] != null) { + contents[_RTe] = de_ReplicationTime(output[_RTe], context); + } + if (output[_Me] != null) { + contents[_Me] = de_Metrics(output[_Me], context); + } + return contents; +}; +const de_EncryptionConfiguration = (output, context) => { + const contents = {}; + if (output[_RKKID] != null) { + contents[_RKKID] = __expectString(output[_RKKID]); + } + return contents; +}; +const de_EndEvent = (output, context) => { + const contents = {}; + return contents; +}; +const de__Error = (output, context) => { + const contents = {}; + if (output[_K] != null) { + contents[_K] = __expectString(output[_K]); + } + if (output[_VI] != null) { + contents[_VI] = __expectString(output[_VI]); + } + if (output[_Cod] != null) { + contents[_Cod] = __expectString(output[_Cod]); + } + if (output[_Mes] != null) { + contents[_Mes] = __expectString(output[_Mes]); + } + return contents; +}; +const de_ErrorDetails = (output, context) => { + const contents = {}; + if (output[_EC] != null) { + contents[_EC] = __expectString(output[_EC]); + } + if (output[_EM] != null) { + contents[_EM] = __expectString(output[_EM]); + } + return contents; +}; +const de_ErrorDocument = (output, context) => { + const contents = {}; + if (output[_K] != null) { + contents[_K] = __expectString(output[_K]); + } + return contents; +}; +const de_Errors = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de__Error(entry, context); + }); +}; +const de_EventBridgeConfiguration = (output, context) => { + const contents = {}; + return contents; +}; +const de_EventList = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return __expectString(entry); + }); +}; +const de_ExistingObjectReplication = (output, context) => { + const contents = {}; + if (output[_S] != null) { + contents[_S] = __expectString(output[_S]); + } + return contents; +}; +const de_ExposeHeaders = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return __expectString(entry); + }); +}; +const de_FilterRule = (output, context) => { + const contents = {}; + if (output[_N] != null) { + contents[_N] = __expectString(output[_N]); + } + if (output[_Va] != null) { + contents[_Va] = __expectString(output[_Va]); + } + return contents; +}; +const de_FilterRuleList = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_FilterRule(entry, context); + }); +}; +const de_GetBucketMetadataTableConfigurationResult = (output, context) => { + const contents = {}; + if (output[_MTCR] != null) { + contents[_MTCR] = de_MetadataTableConfigurationResult(output[_MTCR], context); + } + if (output[_S] != null) { + contents[_S] = __expectString(output[_S]); + } + if (output[_Er] != null) { + contents[_Er] = de_ErrorDetails(output[_Er], context); + } + return contents; +}; +const de_GetObjectAttributesParts = (output, context) => { + const contents = {}; + if (output[_PC] != null) { + contents[_TPC] = __strictParseInt32(output[_PC]); + } + if (output[_PNM] != null) { + contents[_PNM] = __expectString(output[_PNM]); + } + if (output[_NPNM] != null) { + contents[_NPNM] = __expectString(output[_NPNM]); + } + if (output[_MP] != null) { + contents[_MP] = __strictParseInt32(output[_MP]); + } + if (output[_IT] != null) { + contents[_IT] = __parseBoolean(output[_IT]); + } + if (output.Part === "") { + contents[_Part] = []; + } + else if (output[_Par] != null) { + contents[_Part] = de_PartsList(__getArrayIfSingleItem(output[_Par]), context); + } + return contents; +}; +const de_Grant = (output, context) => { + const contents = {}; + if (output[_Gra] != null) { + contents[_Gra] = de_Grantee(output[_Gra], context); + } + if (output[_Pe] != null) { + contents[_Pe] = __expectString(output[_Pe]); + } + return contents; +}; +const de_Grantee = (output, context) => { + const contents = {}; + if (output[_DN] != null) { + contents[_DN] = __expectString(output[_DN]); + } + if (output[_EA] != null) { + contents[_EA] = __expectString(output[_EA]); + } + if (output[_ID_] != null) { + contents[_ID_] = __expectString(output[_ID_]); + } + if (output[_URI] != null) { + contents[_URI] = __expectString(output[_URI]); + } + if (output[_x] != null) { + contents[_Ty] = __expectString(output[_x]); + } + return contents; +}; +const de_Grants = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_Grant(entry, context); + }); +}; +const de_IndexDocument = (output, context) => { + const contents = {}; + if (output[_Su] != null) { + contents[_Su] = __expectString(output[_Su]); + } + return contents; +}; +const de_Initiator = (output, context) => { + const contents = {}; + if (output[_ID_] != null) { + contents[_ID_] = __expectString(output[_ID_]); + } + if (output[_DN] != null) { + contents[_DN] = __expectString(output[_DN]); + } + return contents; +}; +const de_IntelligentTieringAndOperator = (output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = __expectString(output[_P]); + } + if (output.Tag === "") { + contents[_Tag] = []; + } + else if (output[_Ta] != null) { + contents[_Tag] = de_TagSet(__getArrayIfSingleItem(output[_Ta]), context); + } + return contents; +}; +const de_IntelligentTieringConfiguration = (output, context) => { + const contents = {}; + if (output[_I] != null) { + contents[_I] = __expectString(output[_I]); + } + if (output[_F] != null) { + contents[_F] = de_IntelligentTieringFilter(output[_F], context); + } + if (output[_S] != null) { + contents[_S] = __expectString(output[_S]); + } + if (output.Tiering === "") { + contents[_Tie] = []; + } + else if (output[_Tier] != null) { + contents[_Tie] = de_TieringList(__getArrayIfSingleItem(output[_Tier]), context); + } + return contents; +}; +const de_IntelligentTieringConfigurationList = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_IntelligentTieringConfiguration(entry, context); + }); +}; +const de_IntelligentTieringFilter = (output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = __expectString(output[_P]); + } + if (output[_Ta] != null) { + contents[_Ta] = de_Tag(output[_Ta], context); + } + if (output[_A] != null) { + contents[_A] = de_IntelligentTieringAndOperator(output[_A], context); + } + return contents; +}; +const de_InventoryConfiguration = (output, context) => { + const contents = {}; + if (output[_Des] != null) { + contents[_Des] = de_InventoryDestination(output[_Des], context); + } + if (output[_IE] != null) { + contents[_IE] = __parseBoolean(output[_IE]); + } + if (output[_F] != null) { + contents[_F] = de_InventoryFilter(output[_F], context); + } + if (output[_I] != null) { + contents[_I] = __expectString(output[_I]); + } + if (output[_IOV] != null) { + contents[_IOV] = __expectString(output[_IOV]); + } + if (output.OptionalFields === "") { + contents[_OF] = []; + } + else if (output[_OF] != null && output[_OF][_Fi] != null) { + contents[_OF] = de_InventoryOptionalFields(__getArrayIfSingleItem(output[_OF][_Fi]), context); + } + if (output[_Sc] != null) { + contents[_Sc] = de_InventorySchedule(output[_Sc], context); + } + return contents; +}; +const de_InventoryConfigurationList = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_InventoryConfiguration(entry, context); + }); +}; +const de_InventoryDestination = (output, context) => { + const contents = {}; + if (output[_SBD] != null) { + contents[_SBD] = de_InventoryS3BucketDestination(output[_SBD], context); + } + return contents; +}; +const de_InventoryEncryption = (output, context) => { + const contents = {}; + if (output[_SS] != null) { + contents[_SSES] = de_SSES3(output[_SS], context); + } + if (output[_SK] != null) { + contents[_SSEKMS] = de_SSEKMS(output[_SK], context); + } + return contents; +}; +const de_InventoryFilter = (output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = __expectString(output[_P]); + } + return contents; +}; +const de_InventoryOptionalFields = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return __expectString(entry); + }); +}; +const de_InventoryS3BucketDestination = (output, context) => { + const contents = {}; + if (output[_AIc] != null) { + contents[_AIc] = __expectString(output[_AIc]); + } + if (output[_B] != null) { + contents[_B] = __expectString(output[_B]); + } + if (output[_Fo] != null) { + contents[_Fo] = __expectString(output[_Fo]); + } + if (output[_P] != null) { + contents[_P] = __expectString(output[_P]); + } + if (output[_En] != null) { + contents[_En] = de_InventoryEncryption(output[_En], context); + } + return contents; +}; +const de_InventorySchedule = (output, context) => { + const contents = {}; + if (output[_Fr] != null) { + contents[_Fr] = __expectString(output[_Fr]); + } + return contents; +}; +const de_LambdaFunctionConfiguration = (output, context) => { + const contents = {}; + if (output[_I] != null) { + contents[_I] = __expectString(output[_I]); + } + if (output[_CF] != null) { + contents[_LFA] = __expectString(output[_CF]); + } + if (output.Event === "") { + contents[_Eve] = []; + } + else if (output[_Ev] != null) { + contents[_Eve] = de_EventList(__getArrayIfSingleItem(output[_Ev]), context); + } + if (output[_F] != null) { + contents[_F] = de_NotificationConfigurationFilter(output[_F], context); + } + return contents; +}; +const de_LambdaFunctionConfigurationList = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_LambdaFunctionConfiguration(entry, context); + }); +}; +const de_LifecycleExpiration = (output, context) => { + const contents = {}; + if (output[_Dat] != null) { + contents[_Dat] = __expectNonNull(__parseRfc3339DateTimeWithOffset(output[_Dat])); + } + if (output[_Da] != null) { + contents[_Da] = __strictParseInt32(output[_Da]); + } + if (output[_EODM] != null) { + contents[_EODM] = __parseBoolean(output[_EODM]); + } + return contents; +}; +const de_LifecycleRule = (output, context) => { + const contents = {}; + if (output[_Exp] != null) { + contents[_Exp] = de_LifecycleExpiration(output[_Exp], context); + } + if (output[_ID_] != null) { + contents[_ID_] = __expectString(output[_ID_]); + } + if (output[_P] != null) { + contents[_P] = __expectString(output[_P]); + } + if (output[_F] != null) { + contents[_F] = de_LifecycleRuleFilter(output[_F], context); + } + if (output[_S] != null) { + contents[_S] = __expectString(output[_S]); + } + if (output.Transition === "") { + contents[_Tr] = []; + } + else if (output[_Tra] != null) { + contents[_Tr] = de_TransitionList(__getArrayIfSingleItem(output[_Tra]), context); + } + if (output.NoncurrentVersionTransition === "") { + contents[_NVT] = []; + } + else if (output[_NVTo] != null) { + contents[_NVT] = de_NoncurrentVersionTransitionList(__getArrayIfSingleItem(output[_NVTo]), context); + } + if (output[_NVE] != null) { + contents[_NVE] = de_NoncurrentVersionExpiration(output[_NVE], context); + } + if (output[_AIMU] != null) { + contents[_AIMU] = de_AbortIncompleteMultipartUpload(output[_AIMU], context); + } + return contents; +}; +const de_LifecycleRuleAndOperator = (output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = __expectString(output[_P]); + } + if (output.Tag === "") { + contents[_Tag] = []; + } + else if (output[_Ta] != null) { + contents[_Tag] = de_TagSet(__getArrayIfSingleItem(output[_Ta]), context); + } + if (output[_OSGT] != null) { + contents[_OSGT] = __strictParseLong(output[_OSGT]); + } + if (output[_OSLT] != null) { + contents[_OSLT] = __strictParseLong(output[_OSLT]); + } + return contents; +}; +const de_LifecycleRuleFilter = (output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = __expectString(output[_P]); + } + if (output[_Ta] != null) { + contents[_Ta] = de_Tag(output[_Ta], context); + } + if (output[_OSGT] != null) { + contents[_OSGT] = __strictParseLong(output[_OSGT]); + } + if (output[_OSLT] != null) { + contents[_OSLT] = __strictParseLong(output[_OSLT]); + } + if (output[_A] != null) { + contents[_A] = de_LifecycleRuleAndOperator(output[_A], context); + } + return contents; +}; +const de_LifecycleRules = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_LifecycleRule(entry, context); + }); +}; +const de_LoggingEnabled = (output, context) => { + const contents = {}; + if (output[_TB] != null) { + contents[_TB] = __expectString(output[_TB]); + } + if (output.TargetGrants === "") { + contents[_TG] = []; + } + else if (output[_TG] != null && output[_TG][_G] != null) { + contents[_TG] = de_TargetGrants(__getArrayIfSingleItem(output[_TG][_G]), context); + } + if (output[_TP] != null) { + contents[_TP] = __expectString(output[_TP]); + } + if (output[_TOKF] != null) { + contents[_TOKF] = de_TargetObjectKeyFormat(output[_TOKF], context); + } + return contents; +}; +const de_MetadataTableConfigurationResult = (output, context) => { + const contents = {}; + if (output[_STDR] != null) { + contents[_STDR] = de_S3TablesDestinationResult(output[_STDR], context); + } + return contents; +}; +const de_Metrics = (output, context) => { + const contents = {}; + if (output[_S] != null) { + contents[_S] = __expectString(output[_S]); + } + if (output[_ETv] != null) { + contents[_ETv] = de_ReplicationTimeValue(output[_ETv], context); + } + return contents; +}; +const de_MetricsAndOperator = (output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = __expectString(output[_P]); + } + if (output.Tag === "") { + contents[_Tag] = []; + } + else if (output[_Ta] != null) { + contents[_Tag] = de_TagSet(__getArrayIfSingleItem(output[_Ta]), context); + } + if (output[_APAc] != null) { + contents[_APAc] = __expectString(output[_APAc]); + } + return contents; +}; +const de_MetricsConfiguration = (output, context) => { + const contents = {}; + if (output[_I] != null) { + contents[_I] = __expectString(output[_I]); + } + if (output.Filter === "") { + } + else if (output[_F] != null) { + contents[_F] = de_MetricsFilter(__expectUnion(output[_F]), context); + } + return contents; +}; +const de_MetricsConfigurationList = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_MetricsConfiguration(entry, context); + }); +}; +const de_MetricsFilter = (output, context) => { + if (output[_P] != null) { + return { + Prefix: __expectString(output[_P]), + }; + } + if (output[_Ta] != null) { + return { + Tag: de_Tag(output[_Ta], context), + }; + } + if (output[_APAc] != null) { + return { + AccessPointArn: __expectString(output[_APAc]), + }; + } + if (output[_A] != null) { + return { + And: de_MetricsAndOperator(output[_A], context), + }; + } + return { $unknown: Object.entries(output)[0] }; +}; +const de_MultipartUpload = (output, context) => { + const contents = {}; + if (output[_UI] != null) { + contents[_UI] = __expectString(output[_UI]); + } + if (output[_K] != null) { + contents[_K] = __expectString(output[_K]); + } + if (output[_Ini] != null) { + contents[_Ini] = __expectNonNull(__parseRfc3339DateTimeWithOffset(output[_Ini])); + } + if (output[_SC] != null) { + contents[_SC] = __expectString(output[_SC]); + } + if (output[_O] != null) { + contents[_O] = de_Owner(output[_O], context); + } + if (output[_In] != null) { + contents[_In] = de_Initiator(output[_In], context); + } + if (output[_CA] != null) { + contents[_CA] = __expectString(output[_CA]); + } + if (output[_CT] != null) { + contents[_CT] = __expectString(output[_CT]); + } + return contents; +}; +const de_MultipartUploadList = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_MultipartUpload(entry, context); + }); +}; +const de_NoncurrentVersionExpiration = (output, context) => { + const contents = {}; + if (output[_ND] != null) { + contents[_ND] = __strictParseInt32(output[_ND]); + } + if (output[_NNV] != null) { + contents[_NNV] = __strictParseInt32(output[_NNV]); + } + return contents; +}; +const de_NoncurrentVersionTransition = (output, context) => { + const contents = {}; + if (output[_ND] != null) { + contents[_ND] = __strictParseInt32(output[_ND]); + } + if (output[_SC] != null) { + contents[_SC] = __expectString(output[_SC]); + } + if (output[_NNV] != null) { + contents[_NNV] = __strictParseInt32(output[_NNV]); + } + return contents; +}; +const de_NoncurrentVersionTransitionList = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_NoncurrentVersionTransition(entry, context); + }); +}; +const de_NotificationConfigurationFilter = (output, context) => { + const contents = {}; + if (output[_SKe] != null) { + contents[_K] = de_S3KeyFilter(output[_SKe], context); + } + return contents; +}; +const de__Object = (output, context) => { + const contents = {}; + if (output[_K] != null) { + contents[_K] = __expectString(output[_K]); + } + if (output[_LM] != null) { + contents[_LM] = __expectNonNull(__parseRfc3339DateTimeWithOffset(output[_LM])); + } + if (output[_ETa] != null) { + contents[_ETa] = __expectString(output[_ETa]); + } + if (output.ChecksumAlgorithm === "") { + contents[_CA] = []; + } + else if (output[_CA] != null) { + contents[_CA] = de_ChecksumAlgorithmList(__getArrayIfSingleItem(output[_CA]), context); + } + if (output[_CT] != null) { + contents[_CT] = __expectString(output[_CT]); + } + if (output[_Si] != null) { + contents[_Si] = __strictParseLong(output[_Si]); + } + if (output[_SC] != null) { + contents[_SC] = __expectString(output[_SC]); + } + if (output[_O] != null) { + contents[_O] = de_Owner(output[_O], context); + } + if (output[_RSe] != null) { + contents[_RSe] = de_RestoreStatus(output[_RSe], context); + } + return contents; +}; +const de_ObjectList = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de__Object(entry, context); + }); +}; +const de_ObjectLockConfiguration = (output, context) => { + const contents = {}; + if (output[_OLE] != null) { + contents[_OLE] = __expectString(output[_OLE]); + } + if (output[_Ru] != null) { + contents[_Ru] = de_ObjectLockRule(output[_Ru], context); + } + return contents; +}; +const de_ObjectLockLegalHold = (output, context) => { + const contents = {}; + if (output[_S] != null) { + contents[_S] = __expectString(output[_S]); + } + return contents; +}; +const de_ObjectLockRetention = (output, context) => { + const contents = {}; + if (output[_Mo] != null) { + contents[_Mo] = __expectString(output[_Mo]); + } + if (output[_RUD] != null) { + contents[_RUD] = __expectNonNull(__parseRfc3339DateTimeWithOffset(output[_RUD])); + } + return contents; +}; +const de_ObjectLockRule = (output, context) => { + const contents = {}; + if (output[_DRe] != null) { + contents[_DRe] = de_DefaultRetention(output[_DRe], context); + } + return contents; +}; +const de_ObjectPart = (output, context) => { + const contents = {}; + if (output[_PN] != null) { + contents[_PN] = __strictParseInt32(output[_PN]); + } + if (output[_Si] != null) { + contents[_Si] = __strictParseLong(output[_Si]); + } + if (output[_CCRC] != null) { + contents[_CCRC] = __expectString(output[_CCRC]); + } + if (output[_CCRCC] != null) { + contents[_CCRCC] = __expectString(output[_CCRCC]); + } + if (output[_CCRCNVME] != null) { + contents[_CCRCNVME] = __expectString(output[_CCRCNVME]); + } + if (output[_CSHA] != null) { + contents[_CSHA] = __expectString(output[_CSHA]); + } + if (output[_CSHAh] != null) { + contents[_CSHAh] = __expectString(output[_CSHAh]); + } + return contents; +}; +const de_ObjectVersion = (output, context) => { + const contents = {}; + if (output[_ETa] != null) { + contents[_ETa] = __expectString(output[_ETa]); + } + if (output.ChecksumAlgorithm === "") { + contents[_CA] = []; + } + else if (output[_CA] != null) { + contents[_CA] = de_ChecksumAlgorithmList(__getArrayIfSingleItem(output[_CA]), context); + } + if (output[_CT] != null) { + contents[_CT] = __expectString(output[_CT]); + } + if (output[_Si] != null) { + contents[_Si] = __strictParseLong(output[_Si]); + } + if (output[_SC] != null) { + contents[_SC] = __expectString(output[_SC]); + } + if (output[_K] != null) { + contents[_K] = __expectString(output[_K]); + } + if (output[_VI] != null) { + contents[_VI] = __expectString(output[_VI]); + } + if (output[_IL] != null) { + contents[_IL] = __parseBoolean(output[_IL]); + } + if (output[_LM] != null) { + contents[_LM] = __expectNonNull(__parseRfc3339DateTimeWithOffset(output[_LM])); + } + if (output[_O] != null) { + contents[_O] = de_Owner(output[_O], context); + } + if (output[_RSe] != null) { + contents[_RSe] = de_RestoreStatus(output[_RSe], context); + } + return contents; +}; +const de_ObjectVersionList = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ObjectVersion(entry, context); + }); +}; +const de_Owner = (output, context) => { + const contents = {}; + if (output[_DN] != null) { + contents[_DN] = __expectString(output[_DN]); + } + if (output[_ID_] != null) { + contents[_ID_] = __expectString(output[_ID_]); + } + return contents; +}; +const de_OwnershipControls = (output, context) => { + const contents = {}; + if (output.Rule === "") { + contents[_Rul] = []; + } + else if (output[_Ru] != null) { + contents[_Rul] = de_OwnershipControlsRules(__getArrayIfSingleItem(output[_Ru]), context); + } + return contents; +}; +const de_OwnershipControlsRule = (output, context) => { + const contents = {}; + if (output[_OO] != null) { + contents[_OO] = __expectString(output[_OO]); + } + return contents; +}; +const de_OwnershipControlsRules = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_OwnershipControlsRule(entry, context); + }); +}; +const de_Part = (output, context) => { + const contents = {}; + if (output[_PN] != null) { + contents[_PN] = __strictParseInt32(output[_PN]); + } + if (output[_LM] != null) { + contents[_LM] = __expectNonNull(__parseRfc3339DateTimeWithOffset(output[_LM])); + } + if (output[_ETa] != null) { + contents[_ETa] = __expectString(output[_ETa]); + } + if (output[_Si] != null) { + contents[_Si] = __strictParseLong(output[_Si]); + } + if (output[_CCRC] != null) { + contents[_CCRC] = __expectString(output[_CCRC]); + } + if (output[_CCRCC] != null) { + contents[_CCRCC] = __expectString(output[_CCRCC]); + } + if (output[_CCRCNVME] != null) { + contents[_CCRCNVME] = __expectString(output[_CCRCNVME]); + } + if (output[_CSHA] != null) { + contents[_CSHA] = __expectString(output[_CSHA]); + } + if (output[_CSHAh] != null) { + contents[_CSHAh] = __expectString(output[_CSHAh]); + } + return contents; +}; +const de_PartitionedPrefix = (output, context) => { + const contents = {}; + if (output[_PDS] != null) { + contents[_PDS] = __expectString(output[_PDS]); + } + return contents; +}; +const de_Parts = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_Part(entry, context); + }); +}; +const de_PartsList = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ObjectPart(entry, context); + }); +}; +const de_PolicyStatus = (output, context) => { + const contents = {}; + if (output[_IP] != null) { + contents[_IP] = __parseBoolean(output[_IP]); + } + return contents; +}; +const de_Progress = (output, context) => { + const contents = {}; + if (output[_BS] != null) { + contents[_BS] = __strictParseLong(output[_BS]); + } + if (output[_BP] != null) { + contents[_BP] = __strictParseLong(output[_BP]); + } + if (output[_BRy] != null) { + contents[_BRy] = __strictParseLong(output[_BRy]); + } + return contents; +}; +const de_PublicAccessBlockConfiguration = (output, context) => { + const contents = {}; + if (output[_BPA] != null) { + contents[_BPA] = __parseBoolean(output[_BPA]); + } + if (output[_IPA] != null) { + contents[_IPA] = __parseBoolean(output[_IPA]); + } + if (output[_BPP] != null) { + contents[_BPP] = __parseBoolean(output[_BPP]); + } + if (output[_RPB] != null) { + contents[_RPB] = __parseBoolean(output[_RPB]); + } + return contents; +}; +const de_QueueConfiguration = (output, context) => { + const contents = {}; + if (output[_I] != null) { + contents[_I] = __expectString(output[_I]); + } + if (output[_Qu] != null) { + contents[_QA] = __expectString(output[_Qu]); + } + if (output.Event === "") { + contents[_Eve] = []; + } + else if (output[_Ev] != null) { + contents[_Eve] = de_EventList(__getArrayIfSingleItem(output[_Ev]), context); + } + if (output[_F] != null) { + contents[_F] = de_NotificationConfigurationFilter(output[_F], context); + } + return contents; +}; +const de_QueueConfigurationList = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_QueueConfiguration(entry, context); + }); +}; +const de_Redirect = (output, context) => { + const contents = {}; + if (output[_HN] != null) { + contents[_HN] = __expectString(output[_HN]); + } + if (output[_HRC] != null) { + contents[_HRC] = __expectString(output[_HRC]); + } + if (output[_Pr] != null) { + contents[_Pr] = __expectString(output[_Pr]); + } + if (output[_RKPW] != null) { + contents[_RKPW] = __expectString(output[_RKPW]); + } + if (output[_RKW] != null) { + contents[_RKW] = __expectString(output[_RKW]); + } + return contents; +}; +const de_RedirectAllRequestsTo = (output, context) => { + const contents = {}; + if (output[_HN] != null) { + contents[_HN] = __expectString(output[_HN]); + } + if (output[_Pr] != null) { + contents[_Pr] = __expectString(output[_Pr]); + } + return contents; +}; +const de_ReplicaModifications = (output, context) => { + const contents = {}; + if (output[_S] != null) { + contents[_S] = __expectString(output[_S]); + } + return contents; +}; +const de_ReplicationConfiguration = (output, context) => { + const contents = {}; + if (output[_Ro] != null) { + contents[_Ro] = __expectString(output[_Ro]); + } + if (output.Rule === "") { + contents[_Rul] = []; + } + else if (output[_Ru] != null) { + contents[_Rul] = de_ReplicationRules(__getArrayIfSingleItem(output[_Ru]), context); + } + return contents; +}; +const de_ReplicationRule = (output, context) => { + const contents = {}; + if (output[_ID_] != null) { + contents[_ID_] = __expectString(output[_ID_]); + } + if (output[_Pri] != null) { + contents[_Pri] = __strictParseInt32(output[_Pri]); + } + if (output[_P] != null) { + contents[_P] = __expectString(output[_P]); + } + if (output[_F] != null) { + contents[_F] = de_ReplicationRuleFilter(output[_F], context); + } + if (output[_S] != null) { + contents[_S] = __expectString(output[_S]); + } + if (output[_SSC] != null) { + contents[_SSC] = de_SourceSelectionCriteria(output[_SSC], context); + } + if (output[_EOR] != null) { + contents[_EOR] = de_ExistingObjectReplication(output[_EOR], context); + } + if (output[_Des] != null) { + contents[_Des] = de_Destination(output[_Des], context); + } + if (output[_DMR] != null) { + contents[_DMR] = de_DeleteMarkerReplication(output[_DMR], context); + } + return contents; +}; +const de_ReplicationRuleAndOperator = (output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = __expectString(output[_P]); + } + if (output.Tag === "") { + contents[_Tag] = []; + } + else if (output[_Ta] != null) { + contents[_Tag] = de_TagSet(__getArrayIfSingleItem(output[_Ta]), context); + } + return contents; +}; +const de_ReplicationRuleFilter = (output, context) => { + const contents = {}; + if (output[_P] != null) { + contents[_P] = __expectString(output[_P]); + } + if (output[_Ta] != null) { + contents[_Ta] = de_Tag(output[_Ta], context); + } + if (output[_A] != null) { + contents[_A] = de_ReplicationRuleAndOperator(output[_A], context); + } + return contents; +}; +const de_ReplicationRules = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ReplicationRule(entry, context); + }); +}; +const de_ReplicationTime = (output, context) => { + const contents = {}; + if (output[_S] != null) { + contents[_S] = __expectString(output[_S]); + } + if (output[_Tim] != null) { + contents[_Tim] = de_ReplicationTimeValue(output[_Tim], context); + } + return contents; +}; +const de_ReplicationTimeValue = (output, context) => { + const contents = {}; + if (output[_Mi] != null) { + contents[_Mi] = __strictParseInt32(output[_Mi]); + } + return contents; +}; +const de_RestoreStatus = (output, context) => { + const contents = {}; + if (output[_IRIP] != null) { + contents[_IRIP] = __parseBoolean(output[_IRIP]); + } + if (output[_RED] != null) { + contents[_RED] = __expectNonNull(__parseRfc3339DateTimeWithOffset(output[_RED])); + } + return contents; +}; +const de_RoutingRule = (output, context) => { + const contents = {}; + if (output[_Con] != null) { + contents[_Con] = de_Condition(output[_Con], context); + } + if (output[_Red] != null) { + contents[_Red] = de_Redirect(output[_Red], context); + } + return contents; +}; +const de_RoutingRules = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_RoutingRule(entry, context); + }); +}; +const de_S3KeyFilter = (output, context) => { + const contents = {}; + if (output.FilterRule === "") { + contents[_FRi] = []; + } + else if (output[_FR] != null) { + contents[_FRi] = de_FilterRuleList(__getArrayIfSingleItem(output[_FR]), context); + } + return contents; +}; +const de_S3TablesDestinationResult = (output, context) => { + const contents = {}; + if (output[_TBA] != null) { + contents[_TBA] = __expectString(output[_TBA]); + } + if (output[_TN] != null) { + contents[_TN] = __expectString(output[_TN]); + } + if (output[_TAa] != null) { + contents[_TAa] = __expectString(output[_TAa]); + } + if (output[_TNa] != null) { + contents[_TNa] = __expectString(output[_TNa]); + } + return contents; +}; +const de_ServerSideEncryptionByDefault = (output, context) => { + const contents = {}; + if (output[_SSEA] != null) { + contents[_SSEA] = __expectString(output[_SSEA]); + } + if (output[_KMSMKID] != null) { + contents[_KMSMKID] = __expectString(output[_KMSMKID]); + } + return contents; +}; +const de_ServerSideEncryptionConfiguration = (output, context) => { + const contents = {}; + if (output.Rule === "") { + contents[_Rul] = []; + } + else if (output[_Ru] != null) { + contents[_Rul] = de_ServerSideEncryptionRules(__getArrayIfSingleItem(output[_Ru]), context); + } + return contents; +}; +const de_ServerSideEncryptionRule = (output, context) => { + const contents = {}; + if (output[_ASSEBD] != null) { + contents[_ASSEBD] = de_ServerSideEncryptionByDefault(output[_ASSEBD], context); + } + if (output[_BKE] != null) { + contents[_BKE] = __parseBoolean(output[_BKE]); + } + return contents; +}; +const de_ServerSideEncryptionRules = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ServerSideEncryptionRule(entry, context); + }); +}; +const de_SessionCredentials = (output, context) => { + const contents = {}; + if (output[_AKI] != null) { + contents[_AKI] = __expectString(output[_AKI]); + } + if (output[_SAK] != null) { + contents[_SAK] = __expectString(output[_SAK]); + } + if (output[_ST] != null) { + contents[_ST] = __expectString(output[_ST]); + } + if (output[_Exp] != null) { + contents[_Exp] = __expectNonNull(__parseRfc3339DateTimeWithOffset(output[_Exp])); + } + return contents; +}; +const de_SimplePrefix = (output, context) => { + const contents = {}; + return contents; +}; +const de_SourceSelectionCriteria = (output, context) => { + const contents = {}; + if (output[_SKEO] != null) { + contents[_SKEO] = de_SseKmsEncryptedObjects(output[_SKEO], context); + } + if (output[_RM] != null) { + contents[_RM] = de_ReplicaModifications(output[_RM], context); + } + return contents; +}; +const de_SSEKMS = (output, context) => { + const contents = {}; + if (output[_KI] != null) { + contents[_KI] = __expectString(output[_KI]); + } + return contents; +}; +const de_SseKmsEncryptedObjects = (output, context) => { + const contents = {}; + if (output[_S] != null) { + contents[_S] = __expectString(output[_S]); + } + return contents; +}; +const de_SSES3 = (output, context) => { + const contents = {}; + return contents; +}; +const de_Stats = (output, context) => { + const contents = {}; + if (output[_BS] != null) { + contents[_BS] = __strictParseLong(output[_BS]); + } + if (output[_BP] != null) { + contents[_BP] = __strictParseLong(output[_BP]); + } + if (output[_BRy] != null) { + contents[_BRy] = __strictParseLong(output[_BRy]); + } + return contents; +}; +const de_StorageClassAnalysis = (output, context) => { + const contents = {}; + if (output[_DE] != null) { + contents[_DE] = de_StorageClassAnalysisDataExport(output[_DE], context); + } + return contents; +}; +const de_StorageClassAnalysisDataExport = (output, context) => { + const contents = {}; + if (output[_OSV] != null) { + contents[_OSV] = __expectString(output[_OSV]); + } + if (output[_Des] != null) { + contents[_Des] = de_AnalyticsExportDestination(output[_Des], context); + } + return contents; +}; +const de_Tag = (output, context) => { + const contents = {}; + if (output[_K] != null) { + contents[_K] = __expectString(output[_K]); + } + if (output[_Va] != null) { + contents[_Va] = __expectString(output[_Va]); + } + return contents; +}; +const de_TagSet = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_Tag(entry, context); + }); +}; +const de_TargetGrant = (output, context) => { + const contents = {}; + if (output[_Gra] != null) { + contents[_Gra] = de_Grantee(output[_Gra], context); + } + if (output[_Pe] != null) { + contents[_Pe] = __expectString(output[_Pe]); + } + return contents; +}; +const de_TargetGrants = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_TargetGrant(entry, context); + }); +}; +const de_TargetObjectKeyFormat = (output, context) => { + const contents = {}; + if (output[_SPi] != null) { + contents[_SPi] = de_SimplePrefix(output[_SPi], context); + } + if (output[_PP] != null) { + contents[_PP] = de_PartitionedPrefix(output[_PP], context); + } + return contents; +}; +const de_Tiering = (output, context) => { + const contents = {}; + if (output[_Da] != null) { + contents[_Da] = __strictParseInt32(output[_Da]); + } + if (output[_AT] != null) { + contents[_AT] = __expectString(output[_AT]); + } + return contents; +}; +const de_TieringList = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_Tiering(entry, context); + }); +}; +const de_TopicConfiguration = (output, context) => { + const contents = {}; + if (output[_I] != null) { + contents[_I] = __expectString(output[_I]); + } + if (output[_Top] != null) { + contents[_TA] = __expectString(output[_Top]); + } + if (output.Event === "") { + contents[_Eve] = []; + } + else if (output[_Ev] != null) { + contents[_Eve] = de_EventList(__getArrayIfSingleItem(output[_Ev]), context); + } + if (output[_F] != null) { + contents[_F] = de_NotificationConfigurationFilter(output[_F], context); + } + return contents; +}; +const de_TopicConfigurationList = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_TopicConfiguration(entry, context); + }); +}; +const de_Transition = (output, context) => { + const contents = {}; + if (output[_Dat] != null) { + contents[_Dat] = __expectNonNull(__parseRfc3339DateTimeWithOffset(output[_Dat])); + } + if (output[_Da] != null) { + contents[_Da] = __strictParseInt32(output[_Da]); + } + if (output[_SC] != null) { + contents[_SC] = __expectString(output[_SC]); + } + return contents; +}; +const de_TransitionList = (output, context) => { + return (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_Transition(entry, context); + }); +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); +const _A = "And"; +const _AAO = "AnalyticsAndOperator"; +const _AC = "AnalyticsConfiguration"; +const _ACL = "ACL"; +const _ACLc = "AccessControlList"; +const _ACLn = "AnalyticsConfigurationList"; +const _ACP = "AccessControlPolicy"; +const _ACT = "AccessControlTranslation"; +const _ACc = "AccelerateConfiguration"; +const _AD = "AbortDate"; +const _AED = "AnalyticsExportDestination"; +const _AF = "AnalyticsFilter"; +const _AH = "AllowedHeader"; +const _AHl = "AllowedHeaders"; +const _AI = "AnalyticsId"; +const _AIMU = "AbortIncompleteMultipartUpload"; +const _AIc = "AccountId"; +const _AKI = "AccessKeyId"; +const _AM = "AllowedMethod"; +const _AMl = "AllowedMethods"; +const _AO = "AllowedOrigin"; +const _AOl = "AllowedOrigins"; +const _APA = "AccessPointAlias"; +const _APAc = "AccessPointArn"; +const _AQRD = "AllowQuotedRecordDelimiter"; +const _AR = "AcceptRanges"; +const _ARI = "AbortRuleId"; +const _AS = "ArchiveStatus"; +const _ASBD = "AnalyticsS3BucketDestination"; +const _ASEFF = "AnalyticsS3ExportFileFormat"; +const _ASSEBD = "ApplyServerSideEncryptionByDefault"; +const _AT = "AccessTier"; +const _Ac = "Account"; +const _B = "Bucket"; +const _BAI = "BucketAccountId"; +const _BAS = "BucketAccelerateStatus"; +const _BGR = "BypassGovernanceRetention"; +const _BI = "BucketInfo"; +const _BKE = "BucketKeyEnabled"; +const _BLC = "BucketLifecycleConfiguration"; +const _BLCu = "BucketLocationConstraint"; +const _BLN = "BucketLocationName"; +const _BLP = "BucketLogsPermission"; +const _BLS = "BucketLoggingStatus"; +const _BLT = "BucketLocationType"; +const _BN = "BucketName"; +const _BP = "BytesProcessed"; +const _BPA = "BlockPublicAcls"; +const _BPP = "BlockPublicPolicy"; +const _BR = "BucketRegion"; +const _BRy = "BytesReturned"; +const _BS = "BytesScanned"; +const _BT = "BucketType"; +const _BVS = "BucketVersioningStatus"; +const _Bu = "Buckets"; +const _C = "Credentials"; +const _CA = "ChecksumAlgorithm"; +const _CACL = "CannedACL"; +const _CBC = "CreateBucketConfiguration"; +const _CC = "CacheControl"; +const _CCRC = "ChecksumCRC32"; +const _CCRCC = "ChecksumCRC32C"; +const _CCRCNVME = "ChecksumCRC64NVME"; +const _CD = "ContentDisposition"; +const _CDr = "CreationDate"; +const _CE = "ContentEncoding"; +const _CF = "CloudFunction"; +const _CFC = "CloudFunctionConfiguration"; +const _CL = "ContentLanguage"; +const _CLo = "ContentLength"; +const _CM = "ChecksumMode"; +const _CMD = "ContentMD5"; +const _CMU = "CompletedMultipartUpload"; +const _CORSC = "CORSConfiguration"; +const _CORSR = "CORSRule"; +const _CORSRu = "CORSRules"; +const _CP = "CommonPrefixes"; +const _CPo = "CompletedPart"; +const _CR = "ContentRange"; +const _CRSBA = "ConfirmRemoveSelfBucketAccess"; +const _CS = "CopySource"; +const _CSHA = "ChecksumSHA1"; +const _CSHAh = "ChecksumSHA256"; +const _CSIM = "CopySourceIfMatch"; +const _CSIMS = "CopySourceIfModifiedSince"; +const _CSINM = "CopySourceIfNoneMatch"; +const _CSIUS = "CopySourceIfUnmodifiedSince"; +const _CSR = "CopySourceRange"; +const _CSSSECA = "CopySourceSSECustomerAlgorithm"; +const _CSSSECK = "CopySourceSSECustomerKey"; +const _CSSSECKMD = "CopySourceSSECustomerKeyMD5"; +const _CSV = "CSV"; +const _CSVI = "CopySourceVersionId"; +const _CSVIn = "CSVInput"; +const _CSVO = "CSVOutput"; +const _CT = "ChecksumType"; +const _CTo = "ContentType"; +const _CTom = "CompressionType"; +const _CTon = "ContinuationToken"; +const _Ch = "Checksum"; +const _Co = "Contents"; +const _Cod = "Code"; +const _Com = "Comments"; +const _Con = "Condition"; +const _D = "Delimiter"; +const _DAI = "DaysAfterInitiation"; +const _DE = "DataExport"; +const _DM = "DeleteMarker"; +const _DMR = "DeleteMarkerReplication"; +const _DMRS = "DeleteMarkerReplicationStatus"; +const _DMVI = "DeleteMarkerVersionId"; +const _DMe = "DeleteMarkers"; +const _DN = "DisplayName"; +const _DR = "DataRedundancy"; +const _DRe = "DefaultRetention"; +const _Da = "Days"; +const _Dat = "Date"; +const _De = "Deleted"; +const _Del = "Delete"; +const _Des = "Destination"; +const _Desc = "Description"; +const _E = "Expires"; +const _EA = "EmailAddress"; +const _EBC = "EventBridgeConfiguration"; +const _EBO = "ExpectedBucketOwner"; +const _EC = "ErrorCode"; +const _ECn = "EncryptionConfiguration"; +const _ED = "ErrorDocument"; +const _EH = "ExposeHeaders"; +const _EHx = "ExposeHeader"; +const _EM = "ErrorMessage"; +const _EODM = "ExpiredObjectDeleteMarker"; +const _EOR = "ExistingObjectReplication"; +const _EORS = "ExistingObjectReplicationStatus"; +const _ERP = "EnableRequestProgress"; +const _ES = "ExpiresString"; +const _ESBO = "ExpectedSourceBucketOwner"; +const _ESx = "ExpirationStatus"; +const _ET = "EncodingType"; +const _ETa = "ETag"; +const _ETn = "EncryptionType"; +const _ETv = "EventThreshold"; +const _ETx = "ExpressionType"; +const _En = "Encryption"; +const _Ena = "Enabled"; +const _End = "End"; +const _Er = "Error"; +const _Err = "Errors"; +const _Ev = "Event"; +const _Eve = "Events"; +const _Ex = "Expression"; +const _Exp = "Expiration"; +const _F = "Filter"; +const _FD = "FieldDelimiter"; +const _FHI = "FileHeaderInfo"; +const _FO = "FetchOwner"; +const _FR = "FilterRule"; +const _FRN = "FilterRuleName"; +const _FRV = "FilterRuleValue"; +const _FRi = "FilterRules"; +const _Fi = "Field"; +const _Fo = "Format"; +const _Fr = "Frequency"; +const _G = "Grant"; +const _GFC = "GrantFullControl"; +const _GJP = "GlacierJobParameters"; +const _GR = "GrantRead"; +const _GRACP = "GrantReadACP"; +const _GW = "GrantWrite"; +const _GWACP = "GrantWriteACP"; +const _Gr = "Grants"; +const _Gra = "Grantee"; +const _HECRE = "HttpErrorCodeReturnedEquals"; +const _HN = "HostName"; +const _HRC = "HttpRedirectCode"; +const _I = "Id"; +const _IC = "InventoryConfiguration"; +const _ICL = "InventoryConfigurationList"; +const _ID = "IndexDocument"; +const _ID_ = "ID"; +const _IDn = "InventoryDestination"; +const _IE = "IsEnabled"; +const _IEn = "InventoryEncryption"; +const _IF = "InventoryFilter"; +const _IFn = "InventoryFormat"; +const _IFnv = "InventoryFrequency"; +const _II = "InventoryId"; +const _IIOV = "InventoryIncludedObjectVersions"; +const _IL = "IsLatest"; +const _IM = "IfMatch"; +const _IMIT = "IfMatchInitiatedTime"; +const _IMLMT = "IfMatchLastModifiedTime"; +const _IMS = "IfMatchSize"; +const _IMSf = "IfModifiedSince"; +const _INM = "IfNoneMatch"; +const _IOF = "InventoryOptionalField"; +const _IOV = "IncludedObjectVersions"; +const _IP = "IsPublic"; +const _IPA = "IgnorePublicAcls"; +const _IRIP = "IsRestoreInProgress"; +const _IS = "InputSerialization"; +const _ISBD = "InventoryS3BucketDestination"; +const _ISn = "InventorySchedule"; +const _IT = "IsTruncated"; +const _ITAO = "IntelligentTieringAndOperator"; +const _ITAT = "IntelligentTieringAccessTier"; +const _ITC = "IntelligentTieringConfiguration"; +const _ITCL = "IntelligentTieringConfigurationList"; +const _ITD = "IntelligentTieringDays"; +const _ITF = "IntelligentTieringFilter"; +const _ITI = "IntelligentTieringId"; +const _ITS = "IntelligentTieringStatus"; +const _IUS = "IfUnmodifiedSince"; +const _In = "Initiator"; +const _Ini = "Initiated"; +const _JSON = "JSON"; +const _JSONI = "JSONInput"; +const _JSONO = "JSONOutput"; +const _JSONT = "JSONType"; +const _K = "Key"; +const _KC = "KeyCount"; +const _KI = "KeyId"; +const _KM = "KeyMarker"; +const _KMSC = "KMSContext"; +const _KMSKI = "KMSKeyId"; +const _KMSMKID = "KMSMasterKeyID"; +const _KPE = "KeyPrefixEquals"; +const _L = "Location"; +const _LC = "LocationConstraint"; +const _LE = "LoggingEnabled"; +const _LEi = "LifecycleExpiration"; +const _LFA = "LambdaFunctionArn"; +const _LFC = "LambdaFunctionConfigurations"; +const _LFCa = "LambdaFunctionConfiguration"; +const _LI = "LocationInfo"; +const _LM = "LastModified"; +const _LMT = "LastModifiedTime"; +const _LNAS = "LocationNameAsString"; +const _LP = "LocationPrefix"; +const _LR = "LifecycleRule"; +const _LRAO = "LifecycleRuleAndOperator"; +const _LRF = "LifecycleRuleFilter"; +const _LT = "LocationType"; +const _M = "Marker"; +const _MAO = "MetricsAndOperator"; +const _MAS = "MaxAgeSeconds"; +const _MB = "MaxBuckets"; +const _MC = "MetricsConfiguration"; +const _MCL = "MetricsConfigurationList"; +const _MD = "MetadataDirective"; +const _MDB = "MaxDirectoryBuckets"; +const _MDf = "MfaDelete"; +const _ME = "MetadataEntry"; +const _MF = "MetricsFilter"; +const _MFA = "MFA"; +const _MFAD = "MFADelete"; +const _MI = "MetricsId"; +const _MK = "MaxKeys"; +const _MKe = "MetadataKey"; +const _MM = "MissingMeta"; +const _MOS = "MpuObjectSize"; +const _MP = "MaxParts"; +const _MS = "MetricsStatus"; +const _MTC = "MetadataTableConfiguration"; +const _MTCR = "MetadataTableConfigurationResult"; +const _MU = "MaxUploads"; +const _MV = "MetadataValue"; +const _Me = "Metrics"; +const _Mes = "Message"; +const _Mi = "Minutes"; +const _Mo = "Mode"; +const _N = "Name"; +const _NC = "NotificationConfiguration"; +const _NCF = "NotificationConfigurationFilter"; +const _NCT = "NextContinuationToken"; +const _ND = "NoncurrentDays"; +const _NI = "NotificationId"; +const _NKM = "NextKeyMarker"; +const _NM = "NextMarker"; +const _NNV = "NewerNoncurrentVersions"; +const _NPNM = "NextPartNumberMarker"; +const _NUIM = "NextUploadIdMarker"; +const _NVE = "NoncurrentVersionExpiration"; +const _NVIM = "NextVersionIdMarker"; +const _NVT = "NoncurrentVersionTransitions"; +const _NVTo = "NoncurrentVersionTransition"; +const _O = "Owner"; +const _OA = "ObjectAttributes"; +const _OC = "OwnershipControls"; +const _OCACL = "ObjectCannedACL"; +const _OCR = "OwnershipControlsRule"; +const _OF = "OptionalFields"; +const _OI = "ObjectIdentifier"; +const _OK = "ObjectKey"; +const _OL = "OutputLocation"; +const _OLC = "ObjectLockConfiguration"; +const _OLE = "ObjectLockEnabled"; +const _OLEFB = "ObjectLockEnabledForBucket"; +const _OLLH = "ObjectLockLegalHold"; +const _OLLHS = "ObjectLockLegalHoldStatus"; +const _OLM = "ObjectLockMode"; +const _OLR = "ObjectLockRetention"; +const _OLRM = "ObjectLockRetentionMode"; +const _OLRUD = "ObjectLockRetainUntilDate"; +const _OLRb = "ObjectLockRule"; +const _OO = "ObjectOwnership"; +const _OOA = "OptionalObjectAttributes"; +const _OOw = "OwnerOverride"; +const _OP = "ObjectParts"; +const _OS = "OutputSerialization"; +const _OSGT = "ObjectSizeGreaterThan"; +const _OSGTB = "ObjectSizeGreaterThanBytes"; +const _OSLT = "ObjectSizeLessThan"; +const _OSLTB = "ObjectSizeLessThanBytes"; +const _OSV = "OutputSchemaVersion"; +const _OSb = "ObjectSize"; +const _OVI = "ObjectVersionId"; +const _Ob = "Objects"; +const _P = "Prefix"; +const _PABC = "PublicAccessBlockConfiguration"; +const _PC = "PartsCount"; +const _PDS = "PartitionDateSource"; +const _PI = "ParquetInput"; +const _PN = "PartNumber"; +const _PNM = "PartNumberMarker"; +const _PP = "PartitionedPrefix"; +const _Pa = "Payer"; +const _Par = "Part"; +const _Parq = "Parquet"; +const _Part = "Parts"; +const _Pe = "Permission"; +const _Pr = "Protocol"; +const _Pri = "Priority"; +const _Q = "Quiet"; +const _QA = "QueueArn"; +const _QC = "QueueConfiguration"; +const _QCu = "QueueConfigurations"; +const _QCuo = "QuoteCharacter"; +const _QEC = "QuoteEscapeCharacter"; +const _QF = "QuoteFields"; +const _Qu = "Queue"; +const _R = "Range"; +const _RART = "RedirectAllRequestsTo"; +const _RC = "RequestCharged"; +const _RCC = "ResponseCacheControl"; +const _RCD = "ResponseContentDisposition"; +const _RCE = "ResponseContentEncoding"; +const _RCL = "ResponseContentLanguage"; +const _RCT = "ResponseContentType"; +const _RCe = "ReplicationConfiguration"; +const _RD = "RecordDelimiter"; +const _RE = "ResponseExpires"; +const _RED = "RestoreExpiryDate"; +const _RKKID = "ReplicaKmsKeyID"; +const _RKPW = "ReplaceKeyPrefixWith"; +const _RKW = "ReplaceKeyWith"; +const _RM = "ReplicaModifications"; +const _RMS = "ReplicaModificationsStatus"; +const _ROP = "RestoreOutputPath"; +const _RP = "RequestPayer"; +const _RPB = "RestrictPublicBuckets"; +const _RPC = "RequestPaymentConfiguration"; +const _RPe = "RequestProgress"; +const _RR = "RequestRoute"; +const _RRAO = "ReplicationRuleAndOperator"; +const _RRF = "ReplicationRuleFilter"; +const _RRS = "ReplicationRuleStatus"; +const _RRT = "RestoreRequestType"; +const _RRe = "ReplicationRule"; +const _RRes = "RestoreRequest"; +const _RRo = "RoutingRules"; +const _RRou = "RoutingRule"; +const _RS = "ReplicationStatus"; +const _RSe = "RestoreStatus"; +const _RT = "RequestToken"; +const _RTS = "ReplicationTimeStatus"; +const _RTV = "ReplicationTimeValue"; +const _RTe = "ReplicationTime"; +const _RUD = "RetainUntilDate"; +const _Re = "Restore"; +const _Red = "Redirect"; +const _Ro = "Role"; +const _Ru = "Rule"; +const _Rul = "Rules"; +const _S = "Status"; +const _SA = "StartAfter"; +const _SAK = "SecretAccessKey"; +const _SBD = "S3BucketDestination"; +const _SC = "StorageClass"; +const _SCA = "StorageClassAnalysis"; +const _SCADE = "StorageClassAnalysisDataExport"; +const _SCASV = "StorageClassAnalysisSchemaVersion"; +const _SCt = "StatusCode"; +const _SDV = "SkipDestinationValidation"; +const _SK = "SSE-KMS"; +const _SKEO = "SseKmsEncryptedObjects"; +const _SKEOS = "SseKmsEncryptedObjectsStatus"; +const _SKF = "S3KeyFilter"; +const _SKe = "S3Key"; +const _SL = "S3Location"; +const _SM = "SessionMode"; +const _SOCR = "SelectObjectContentRequest"; +const _SP = "SelectParameters"; +const _SPi = "SimplePrefix"; +const _SR = "ScanRange"; +const _SS = "SSE-S3"; +const _SSC = "SourceSelectionCriteria"; +const _SSE = "ServerSideEncryption"; +const _SSEA = "SSEAlgorithm"; +const _SSEBD = "ServerSideEncryptionByDefault"; +const _SSEC = "ServerSideEncryptionConfiguration"; +const _SSECA = "SSECustomerAlgorithm"; +const _SSECK = "SSECustomerKey"; +const _SSECKMD = "SSECustomerKeyMD5"; +const _SSEKMS = "SSEKMS"; +const _SSEKMSEC = "SSEKMSEncryptionContext"; +const _SSEKMSKI = "SSEKMSKeyId"; +const _SSER = "ServerSideEncryptionRule"; +const _SSES = "SSES3"; +const _ST = "SessionToken"; +const _STBA = "S3TablesBucketArn"; +const _STD = "S3TablesDestination"; +const _STDR = "S3TablesDestinationResult"; +const _STN = "S3TablesName"; +const _S_ = "S3"; +const _Sc = "Schedule"; +const _Se = "Setting"; +const _Si = "Size"; +const _St = "Start"; +const _Su = "Suffix"; +const _T = "Tagging"; +const _TA = "TopicArn"; +const _TAa = "TableArn"; +const _TB = "TargetBucket"; +const _TBA = "TableBucketArn"; +const _TC = "TagCount"; +const _TCo = "TopicConfiguration"; +const _TCop = "TopicConfigurations"; +const _TD = "TaggingDirective"; +const _TDMOS = "TransitionDefaultMinimumObjectSize"; +const _TG = "TargetGrants"; +const _TGa = "TargetGrant"; +const _TN = "TableName"; +const _TNa = "TableNamespace"; +const _TOKF = "TargetObjectKeyFormat"; +const _TP = "TargetPrefix"; +const _TPC = "TotalPartsCount"; +const _TS = "TagSet"; +const _TSC = "TransitionStorageClass"; +const _Ta = "Tag"; +const _Tag = "Tags"; +const _Ti = "Tier"; +const _Tie = "Tierings"; +const _Tier = "Tiering"; +const _Tim = "Time"; +const _To = "Token"; +const _Top = "Topic"; +const _Tr = "Transitions"; +const _Tra = "Transition"; +const _Ty = "Type"; +const _U = "Upload"; +const _UI = "UploadId"; +const _UIM = "UploadIdMarker"; +const _UM = "UserMetadata"; +const _URI = "URI"; +const _Up = "Uploads"; +const _V = "Version"; +const _VC = "VersionCount"; +const _VCe = "VersioningConfiguration"; +const _VI = "VersionId"; +const _VIM = "VersionIdMarker"; +const _Va = "Value"; +const _Ve = "Versions"; +const _WC = "WebsiteConfiguration"; +const _WOB = "WriteOffsetBytes"; +const _WRL = "WebsiteRedirectLocation"; +const _Y = "Years"; +const _a = "analytics"; +const _ac = "accelerate"; +const _acl = "acl"; +const _ar = "accept-ranges"; +const _at = "attributes"; +const _br = "bucket-region"; +const _c = "cors"; +const _cc = "cache-control"; +const _cd = "content-disposition"; +const _ce = "content-encoding"; +const _cl = "content-language"; +const _cl_ = "content-length"; +const _cm = "content-md5"; +const _cr = "content-range"; +const _ct = "content-type"; +const _ct_ = "continuation-token"; +const _d = "delete"; +const _de = "delimiter"; +const _e = "expires"; +const _en = "encryption"; +const _et = "encoding-type"; +const _eta = "etag"; +const _ex = "expiresstring"; +const _fo = "fetch-owner"; +const _i = "id"; +const _im = "if-match"; +const _ims = "if-modified-since"; +const _in = "inventory"; +const _inm = "if-none-match"; +const _it = "intelligent-tiering"; +const _ius = "if-unmodified-since"; +const _km = "key-marker"; +const _l = "lifecycle"; +const _lh = "legal-hold"; +const _lm = "last-modified"; +const _lo = "location"; +const _log = "logging"; +const _lt = "list-type"; +const _m = "metrics"; +const _mT = "metadataTable"; +const _ma = "marker"; +const _mb = "max-buckets"; +const _mdb = "max-directory-buckets"; +const _me = "member"; +const _mk = "max-keys"; +const _mp = "max-parts"; +const _mu = "max-uploads"; +const _n = "notification"; +const _oC = "ownershipControls"; +const _ol = "object-lock"; +const _p = "policy"; +const _pAB = "publicAccessBlock"; +const _pN = "partNumber"; +const _pS = "policyStatus"; +const _pnm = "part-number-marker"; +const _pr = "prefix"; +const _r = "replication"; +const _rP = "requestPayment"; +const _ra = "range"; +const _rcc = "response-cache-control"; +const _rcd = "response-content-disposition"; +const _rce = "response-content-encoding"; +const _rcl = "response-content-language"; +const _rct = "response-content-type"; +const _re = "response-expires"; +const _res = "restore"; +const _ret = "retention"; +const _s = "session"; +const _sa = "start-after"; +const _se = "select"; +const _st = "select-type"; +const _t = "tagging"; +const _to = "torrent"; +const _u = "uploads"; +const _uI = "uploadId"; +const _uim = "upload-id-marker"; +const _v = "versioning"; +const _vI = "versionId"; +const _ve = ''; +const _ver = "versions"; +const _vim = "version-id-marker"; +const _w = "website"; +const _x = "xsi:type"; +const _xaa = "x-amz-acl"; +const _xaad = "x-amz-abort-date"; +const _xaapa = "x-amz-access-point-alias"; +const _xaari = "x-amz-abort-rule-id"; +const _xaas = "x-amz-archive-status"; +const _xabgr = "x-amz-bypass-governance-retention"; +const _xabln = "x-amz-bucket-location-name"; +const _xablt = "x-amz-bucket-location-type"; +const _xabole = "x-amz-bucket-object-lock-enabled"; +const _xabolt = "x-amz-bucket-object-lock-token"; +const _xabr = "x-amz-bucket-region"; +const _xaca = "x-amz-checksum-algorithm"; +const _xacc = "x-amz-checksum-crc32"; +const _xacc_ = "x-amz-checksum-crc32c"; +const _xacc__ = "x-amz-checksum-crc64nvme"; +const _xacm = "x-amz-checksum-mode"; +const _xacrsba = "x-amz-confirm-remove-self-bucket-access"; +const _xacs = "x-amz-checksum-sha1"; +const _xacs_ = "x-amz-checksum-sha256"; +const _xacs__ = "x-amz-copy-source"; +const _xacsim = "x-amz-copy-source-if-match"; +const _xacsims = "x-amz-copy-source-if-modified-since"; +const _xacsinm = "x-amz-copy-source-if-none-match"; +const _xacsius = "x-amz-copy-source-if-unmodified-since"; +const _xacsm = "x-amz-create-session-mode"; +const _xacsr = "x-amz-copy-source-range"; +const _xacssseca = "x-amz-copy-source-server-side-encryption-customer-algorithm"; +const _xacssseck = "x-amz-copy-source-server-side-encryption-customer-key"; +const _xacssseckm = "x-amz-copy-source-server-side-encryption-customer-key-md5"; +const _xacsvi = "x-amz-copy-source-version-id"; +const _xact = "x-amz-checksum-type"; +const _xadm = "x-amz-delete-marker"; +const _xae = "x-amz-expiration"; +const _xaebo = "x-amz-expected-bucket-owner"; +const _xafec = "x-amz-fwd-error-code"; +const _xafem = "x-amz-fwd-error-message"; +const _xafhar = "x-amz-fwd-header-accept-ranges"; +const _xafhcc = "x-amz-fwd-header-cache-control"; +const _xafhcd = "x-amz-fwd-header-content-disposition"; +const _xafhce = "x-amz-fwd-header-content-encoding"; +const _xafhcl = "x-amz-fwd-header-content-language"; +const _xafhcr = "x-amz-fwd-header-content-range"; +const _xafhct = "x-amz-fwd-header-content-type"; +const _xafhe = "x-amz-fwd-header-etag"; +const _xafhe_ = "x-amz-fwd-header-expires"; +const _xafhlm = "x-amz-fwd-header-last-modified"; +const _xafhxacc = "x-amz-fwd-header-x-amz-checksum-crc32"; +const _xafhxacc_ = "x-amz-fwd-header-x-amz-checksum-crc32c"; +const _xafhxacc__ = "x-amz-fwd-header-x-amz-checksum-crc64nvme"; +const _xafhxacs = "x-amz-fwd-header-x-amz-checksum-sha1"; +const _xafhxacs_ = "x-amz-fwd-header-x-amz-checksum-sha256"; +const _xafhxadm = "x-amz-fwd-header-x-amz-delete-marker"; +const _xafhxae = "x-amz-fwd-header-x-amz-expiration"; +const _xafhxamm = "x-amz-fwd-header-x-amz-missing-meta"; +const _xafhxampc = "x-amz-fwd-header-x-amz-mp-parts-count"; +const _xafhxaollh = "x-amz-fwd-header-x-amz-object-lock-legal-hold"; +const _xafhxaolm = "x-amz-fwd-header-x-amz-object-lock-mode"; +const _xafhxaolrud = "x-amz-fwd-header-x-amz-object-lock-retain-until-date"; +const _xafhxar = "x-amz-fwd-header-x-amz-restore"; +const _xafhxarc = "x-amz-fwd-header-x-amz-request-charged"; +const _xafhxars = "x-amz-fwd-header-x-amz-replication-status"; +const _xafhxasc = "x-amz-fwd-header-x-amz-storage-class"; +const _xafhxasse = "x-amz-fwd-header-x-amz-server-side-encryption"; +const _xafhxasseakki = "x-amz-fwd-header-x-amz-server-side-encryption-aws-kms-key-id"; +const _xafhxassebke = "x-amz-fwd-header-x-amz-server-side-encryption-bucket-key-enabled"; +const _xafhxasseca = "x-amz-fwd-header-x-amz-server-side-encryption-customer-algorithm"; +const _xafhxasseckm = "x-amz-fwd-header-x-amz-server-side-encryption-customer-key-md5"; +const _xafhxatc = "x-amz-fwd-header-x-amz-tagging-count"; +const _xafhxavi = "x-amz-fwd-header-x-amz-version-id"; +const _xafs = "x-amz-fwd-status"; +const _xagfc = "x-amz-grant-full-control"; +const _xagr = "x-amz-grant-read"; +const _xagra = "x-amz-grant-read-acp"; +const _xagw = "x-amz-grant-write"; +const _xagwa = "x-amz-grant-write-acp"; +const _xaimit = "x-amz-if-match-initiated-time"; +const _xaimlmt = "x-amz-if-match-last-modified-time"; +const _xaims = "x-amz-if-match-size"; +const _xam = "x-amz-mfa"; +const _xamd = "x-amz-metadata-directive"; +const _xamm = "x-amz-missing-meta"; +const _xamos = "x-amz-mp-object-size"; +const _xamp = "x-amz-max-parts"; +const _xampc = "x-amz-mp-parts-count"; +const _xaoa = "x-amz-object-attributes"; +const _xaollh = "x-amz-object-lock-legal-hold"; +const _xaolm = "x-amz-object-lock-mode"; +const _xaolrud = "x-amz-object-lock-retain-until-date"; +const _xaoo = "x-amz-object-ownership"; +const _xaooa = "x-amz-optional-object-attributes"; +const _xaos = "x-amz-object-size"; +const _xapnm = "x-amz-part-number-marker"; +const _xar = "x-amz-restore"; +const _xarc = "x-amz-request-charged"; +const _xarop = "x-amz-restore-output-path"; +const _xarp = "x-amz-request-payer"; +const _xarr = "x-amz-request-route"; +const _xars = "x-amz-replication-status"; +const _xart = "x-amz-request-token"; +const _xasc = "x-amz-storage-class"; +const _xasca = "x-amz-sdk-checksum-algorithm"; +const _xasdv = "x-amz-skip-destination-validation"; +const _xasebo = "x-amz-source-expected-bucket-owner"; +const _xasse = "x-amz-server-side-encryption"; +const _xasseakki = "x-amz-server-side-encryption-aws-kms-key-id"; +const _xassebke = "x-amz-server-side-encryption-bucket-key-enabled"; +const _xassec = "x-amz-server-side-encryption-context"; +const _xasseca = "x-amz-server-side-encryption-customer-algorithm"; +const _xasseck = "x-amz-server-side-encryption-customer-key"; +const _xasseckm = "x-amz-server-side-encryption-customer-key-md5"; +const _xat = "x-amz-tagging"; +const _xatc = "x-amz-tagging-count"; +const _xatd = "x-amz-tagging-directive"; +const _xatdmos = "x-amz-transition-default-minimum-object-size"; +const _xavi = "x-amz-version-id"; +const _xawob = "x-amz-write-offset-bytes"; +const _xawrl = "x-amz-website-redirect-location"; +const _xi = "x-id"; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/runtimeConfig.browser.js b/node_modules/@aws-sdk/client-s3/dist-es/runtimeConfig.browser.js new file mode 100644 index 00000000..93c7d48c --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/runtimeConfig.browser.js @@ -0,0 +1,42 @@ +import packageInfo from "../package.json"; +import { Sha1 } from "@aws-crypto/sha1-browser"; +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-browser"; +import { DEFAULT_USE_DUALSTACK_ENDPOINT, DEFAULT_USE_FIPS_ENDPOINT } from "@smithy/config-resolver"; +import { eventStreamSerdeProvider } from "@smithy/eventstream-serde-browser"; +import { FetchHttpHandler as RequestHandler, streamCollector } from "@smithy/fetch-http-handler"; +import { blobHasher as streamHasher } from "@smithy/hash-blob-browser"; +import { invalidProvider } from "@smithy/invalid-dependency"; +import { Md5 } from "@smithy/md5-js"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-browser"; +export const getRuntimeConfig = (config) => { + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? ((_) => () => Promise.reject(new Error("Credential is missing"))), + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + eventStreamSerdeProvider: config?.eventStreamSerdeProvider ?? eventStreamSerdeProvider, + maxAttempts: config?.maxAttempts ?? DEFAULT_MAX_ATTEMPTS, + md5: config?.md5 ?? Md5, + region: config?.region ?? invalidProvider("Region is missing"), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE), + sha1: config?.sha1 ?? Sha1, + sha256: config?.sha256 ?? Sha256, + streamCollector: config?.streamCollector ?? streamCollector, + streamHasher: config?.streamHasher ?? streamHasher, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/runtimeConfig.js b/node_modules/@aws-sdk/client-s3/dist-es/runtimeConfig.js new file mode 100644 index 00000000..5f523e1d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/runtimeConfig.js @@ -0,0 +1,65 @@ +import packageInfo from "../package.json"; +import { NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, NODE_SIGV4A_CONFIG_OPTIONS, emitWarningIfUnsupportedVersion as awsCheckVersion, } from "@aws-sdk/core"; +import { defaultProvider as credentialDefaultProvider } from "@aws-sdk/credential-provider-node"; +import { NODE_USE_ARN_REGION_CONFIG_OPTIONS } from "@aws-sdk/middleware-bucket-endpoint"; +import { NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS, NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS, } from "@aws-sdk/middleware-flexible-checksums"; +import { NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_OPTIONS } from "@aws-sdk/middleware-sdk-s3"; +import { NODE_APP_ID_CONFIG_OPTIONS, createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-node"; +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS, NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, } from "@smithy/config-resolver"; +import { eventStreamSerdeProvider } from "@smithy/eventstream-serde-node"; +import { Hash } from "@smithy/hash-node"; +import { readableStreamHasher as streamHasher } from "@smithy/hash-stream-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS, NODE_RETRY_MODE_CONFIG_OPTIONS } from "@smithy/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@smithy/node-config-provider"; +import { NodeHttpHandler as RequestHandler, streamCollector } from "@smithy/node-http-handler"; +import { calculateBodyLength } from "@smithy/util-body-length-node"; +import { DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-node"; +import { emitWarningIfUnsupportedVersion } from "@smithy/smithy-client"; +export const getRuntimeConfig = (config) => { + emitWarningIfUnsupportedVersion(process.version); + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + awsCheckVersion(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? loadNodeConfig(NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? credentialDefaultProvider, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + disableS3ExpressSessionAuth: config?.disableS3ExpressSessionAuth ?? + loadNodeConfig(NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_OPTIONS, profileConfig), + eventStreamSerdeProvider: config?.eventStreamSerdeProvider ?? eventStreamSerdeProvider, + maxAttempts: config?.maxAttempts ?? loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + md5: config?.md5 ?? Hash.bind(null, "md5"), + region: config?.region ?? + loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, { ...NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestChecksumCalculation: config?.requestChecksumCalculation ?? + loadNodeConfig(NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS, profileConfig), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + responseChecksumValidation: config?.responseChecksumValidation ?? + loadNodeConfig(NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS, profileConfig), + retryMode: config?.retryMode ?? + loadNodeConfig({ + ...NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE, + }, config), + sha1: config?.sha1 ?? Hash.bind(null, "sha1"), + sha256: config?.sha256 ?? Hash.bind(null, "sha256"), + sigv4aSigningRegionSet: config?.sigv4aSigningRegionSet ?? loadNodeConfig(NODE_SIGV4A_CONFIG_OPTIONS, profileConfig), + streamCollector: config?.streamCollector ?? streamCollector, + streamHasher: config?.streamHasher ?? streamHasher, + useArnRegion: config?.useArnRegion ?? loadNodeConfig(NODE_USE_ARN_REGION_CONFIG_OPTIONS, profileConfig), + useDualstackEndpoint: config?.useDualstackEndpoint ?? loadNodeConfig(NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? loadNodeConfig(NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? loadNodeConfig(NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/runtimeConfig.native.js b/node_modules/@aws-sdk/client-s3/dist-es/runtimeConfig.native.js new file mode 100644 index 00000000..0b546952 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/runtimeConfig.native.js @@ -0,0 +1,11 @@ +import { Sha256 } from "@aws-crypto/sha256-js"; +import { getRuntimeConfig as getBrowserRuntimeConfig } from "./runtimeConfig.browser"; +export const getRuntimeConfig = (config) => { + const browserDefaults = getBrowserRuntimeConfig(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? Sha256, + }; +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/runtimeConfig.shared.js b/node_modules/@aws-sdk/client-s3/dist-es/runtimeConfig.shared.js new file mode 100644 index 00000000..118b805f --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/runtimeConfig.shared.js @@ -0,0 +1,42 @@ +import { AwsSdkSigV4ASigner, AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { SignatureV4MultiRegion } from "@aws-sdk/signature-v4-multi-region"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { parseUrl } from "@smithy/url-parser"; +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { getAwsChunkedEncodingStream, sdkStreamMixin } from "@smithy/util-stream"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { defaultS3HttpAuthSchemeProvider } from "./auth/httpAuthSchemeProvider"; +import { defaultEndpointResolver } from "./endpoint/endpointResolver"; +export const getRuntimeConfig = (config) => { + return { + apiVersion: "2006-03-01", + base64Decoder: config?.base64Decoder ?? fromBase64, + base64Encoder: config?.base64Encoder ?? toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? defaultEndpointResolver, + extensions: config?.extensions ?? [], + getAwsChunkedEncodingStream: config?.getAwsChunkedEncodingStream ?? getAwsChunkedEncodingStream, + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? defaultS3HttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new AwsSdkSigV4Signer(), + }, + { + schemeId: "aws.auth#sigv4a", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4a"), + signer: new AwsSdkSigV4ASigner(), + }, + ], + logger: config?.logger ?? new NoOpLogger(), + sdkStreamMixin: config?.sdkStreamMixin ?? sdkStreamMixin, + serviceId: config?.serviceId ?? "S3", + signerConstructor: config?.signerConstructor ?? SignatureV4MultiRegion, + signingEscapePath: config?.signingEscapePath ?? false, + urlParser: config?.urlParser ?? parseUrl, + useArnRegion: config?.useArnRegion ?? false, + utf8Decoder: config?.utf8Decoder ?? fromUtf8, + utf8Encoder: config?.utf8Encoder ?? toUtf8, + }; +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/runtimeExtensions.js b/node_modules/@aws-sdk/client-s3/dist-es/runtimeExtensions.js new file mode 100644 index 00000000..5b296950 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/runtimeExtensions.js @@ -0,0 +1,9 @@ +import { getAwsRegionExtensionConfiguration, resolveAwsRegionExtensionConfiguration, } from "@aws-sdk/region-config-resolver"; +import { getHttpHandlerExtensionConfiguration, resolveHttpHandlerRuntimeConfig } from "@smithy/protocol-http"; +import { getDefaultExtensionConfiguration, resolveDefaultRuntimeConfig } from "@smithy/smithy-client"; +import { getHttpAuthExtensionConfiguration, resolveHttpAuthRuntimeConfig } from "./auth/httpAuthExtensionConfiguration"; +export const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign(getAwsRegionExtensionConfiguration(runtimeConfig), getDefaultExtensionConfiguration(runtimeConfig), getHttpHandlerExtensionConfiguration(runtimeConfig), getHttpAuthExtensionConfiguration(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, resolveAwsRegionExtensionConfiguration(extensionConfiguration), resolveDefaultRuntimeConfig(extensionConfiguration), resolveHttpHandlerRuntimeConfig(extensionConfiguration), resolveHttpAuthRuntimeConfig(extensionConfiguration)); +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/waiters/index.js b/node_modules/@aws-sdk/client-s3/dist-es/waiters/index.js new file mode 100644 index 00000000..a139674b --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/waiters/index.js @@ -0,0 +1,4 @@ +export * from "./waitForBucketExists"; +export * from "./waitForBucketNotExists"; +export * from "./waitForObjectExists"; +export * from "./waitForObjectNotExists"; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/waiters/waitForBucketExists.js b/node_modules/@aws-sdk/client-s3/dist-es/waiters/waitForBucketExists.js new file mode 100644 index 00000000..0b10d582 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/waiters/waitForBucketExists.js @@ -0,0 +1,26 @@ +import { checkExceptions, createWaiter, WaiterState } from "@smithy/util-waiter"; +import { HeadBucketCommand } from "../commands/HeadBucketCommand"; +const checkState = async (client, input) => { + let reason; + try { + const result = await client.send(new HeadBucketCommand(input)); + reason = result; + return { state: WaiterState.SUCCESS, reason }; + } + catch (exception) { + reason = exception; + if (exception.name && exception.name == "NotFound") { + return { state: WaiterState.RETRY, reason }; + } + } + return { state: WaiterState.RETRY, reason }; +}; +export const waitForBucketExists = async (params, input) => { + const serviceDefaults = { minDelay: 5, maxDelay: 120 }; + return createWaiter({ ...serviceDefaults, ...params }, input, checkState); +}; +export const waitUntilBucketExists = async (params, input) => { + const serviceDefaults = { minDelay: 5, maxDelay: 120 }; + const result = await createWaiter({ ...serviceDefaults, ...params }, input, checkState); + return checkExceptions(result); +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/waiters/waitForBucketNotExists.js b/node_modules/@aws-sdk/client-s3/dist-es/waiters/waitForBucketNotExists.js new file mode 100644 index 00000000..382e0b72 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/waiters/waitForBucketNotExists.js @@ -0,0 +1,25 @@ +import { checkExceptions, createWaiter, WaiterState } from "@smithy/util-waiter"; +import { HeadBucketCommand } from "../commands/HeadBucketCommand"; +const checkState = async (client, input) => { + let reason; + try { + const result = await client.send(new HeadBucketCommand(input)); + reason = result; + } + catch (exception) { + reason = exception; + if (exception.name && exception.name == "NotFound") { + return { state: WaiterState.SUCCESS, reason }; + } + } + return { state: WaiterState.RETRY, reason }; +}; +export const waitForBucketNotExists = async (params, input) => { + const serviceDefaults = { minDelay: 5, maxDelay: 120 }; + return createWaiter({ ...serviceDefaults, ...params }, input, checkState); +}; +export const waitUntilBucketNotExists = async (params, input) => { + const serviceDefaults = { minDelay: 5, maxDelay: 120 }; + const result = await createWaiter({ ...serviceDefaults, ...params }, input, checkState); + return checkExceptions(result); +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/waiters/waitForObjectExists.js b/node_modules/@aws-sdk/client-s3/dist-es/waiters/waitForObjectExists.js new file mode 100644 index 00000000..2401020e --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/waiters/waitForObjectExists.js @@ -0,0 +1,26 @@ +import { checkExceptions, createWaiter, WaiterState } from "@smithy/util-waiter"; +import { HeadObjectCommand } from "../commands/HeadObjectCommand"; +const checkState = async (client, input) => { + let reason; + try { + const result = await client.send(new HeadObjectCommand(input)); + reason = result; + return { state: WaiterState.SUCCESS, reason }; + } + catch (exception) { + reason = exception; + if (exception.name && exception.name == "NotFound") { + return { state: WaiterState.RETRY, reason }; + } + } + return { state: WaiterState.RETRY, reason }; +}; +export const waitForObjectExists = async (params, input) => { + const serviceDefaults = { minDelay: 5, maxDelay: 120 }; + return createWaiter({ ...serviceDefaults, ...params }, input, checkState); +}; +export const waitUntilObjectExists = async (params, input) => { + const serviceDefaults = { minDelay: 5, maxDelay: 120 }; + const result = await createWaiter({ ...serviceDefaults, ...params }, input, checkState); + return checkExceptions(result); +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-es/waiters/waitForObjectNotExists.js b/node_modules/@aws-sdk/client-s3/dist-es/waiters/waitForObjectNotExists.js new file mode 100644 index 00000000..cb415f1a --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-es/waiters/waitForObjectNotExists.js @@ -0,0 +1,25 @@ +import { checkExceptions, createWaiter, WaiterState } from "@smithy/util-waiter"; +import { HeadObjectCommand } from "../commands/HeadObjectCommand"; +const checkState = async (client, input) => { + let reason; + try { + const result = await client.send(new HeadObjectCommand(input)); + reason = result; + } + catch (exception) { + reason = exception; + if (exception.name && exception.name == "NotFound") { + return { state: WaiterState.SUCCESS, reason }; + } + } + return { state: WaiterState.RETRY, reason }; +}; +export const waitForObjectNotExists = async (params, input) => { + const serviceDefaults = { minDelay: 5, maxDelay: 120 }; + return createWaiter({ ...serviceDefaults, ...params }, input, checkState); +}; +export const waitUntilObjectNotExists = async (params, input) => { + const serviceDefaults = { minDelay: 5, maxDelay: 120 }; + const result = await createWaiter({ ...serviceDefaults, ...params }, input, checkState); + return checkExceptions(result); +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/S3.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/S3.d.ts new file mode 100644 index 00000000..1bb46ea6 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/S3.d.ts @@ -0,0 +1,698 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { AbortMultipartUploadCommandInput, AbortMultipartUploadCommandOutput } from "./commands/AbortMultipartUploadCommand"; +import { CompleteMultipartUploadCommandInput, CompleteMultipartUploadCommandOutput } from "./commands/CompleteMultipartUploadCommand"; +import { CopyObjectCommandInput, CopyObjectCommandOutput } from "./commands/CopyObjectCommand"; +import { CreateBucketCommandInput, CreateBucketCommandOutput } from "./commands/CreateBucketCommand"; +import { CreateBucketMetadataTableConfigurationCommandInput, CreateBucketMetadataTableConfigurationCommandOutput } from "./commands/CreateBucketMetadataTableConfigurationCommand"; +import { CreateMultipartUploadCommandInput, CreateMultipartUploadCommandOutput } from "./commands/CreateMultipartUploadCommand"; +import { CreateSessionCommandInput, CreateSessionCommandOutput } from "./commands/CreateSessionCommand"; +import { DeleteBucketAnalyticsConfigurationCommandInput, DeleteBucketAnalyticsConfigurationCommandOutput } from "./commands/DeleteBucketAnalyticsConfigurationCommand"; +import { DeleteBucketCommandInput, DeleteBucketCommandOutput } from "./commands/DeleteBucketCommand"; +import { DeleteBucketCorsCommandInput, DeleteBucketCorsCommandOutput } from "./commands/DeleteBucketCorsCommand"; +import { DeleteBucketEncryptionCommandInput, DeleteBucketEncryptionCommandOutput } from "./commands/DeleteBucketEncryptionCommand"; +import { DeleteBucketIntelligentTieringConfigurationCommandInput, DeleteBucketIntelligentTieringConfigurationCommandOutput } from "./commands/DeleteBucketIntelligentTieringConfigurationCommand"; +import { DeleteBucketInventoryConfigurationCommandInput, DeleteBucketInventoryConfigurationCommandOutput } from "./commands/DeleteBucketInventoryConfigurationCommand"; +import { DeleteBucketLifecycleCommandInput, DeleteBucketLifecycleCommandOutput } from "./commands/DeleteBucketLifecycleCommand"; +import { DeleteBucketMetadataTableConfigurationCommandInput, DeleteBucketMetadataTableConfigurationCommandOutput } from "./commands/DeleteBucketMetadataTableConfigurationCommand"; +import { DeleteBucketMetricsConfigurationCommandInput, DeleteBucketMetricsConfigurationCommandOutput } from "./commands/DeleteBucketMetricsConfigurationCommand"; +import { DeleteBucketOwnershipControlsCommandInput, DeleteBucketOwnershipControlsCommandOutput } from "./commands/DeleteBucketOwnershipControlsCommand"; +import { DeleteBucketPolicyCommandInput, DeleteBucketPolicyCommandOutput } from "./commands/DeleteBucketPolicyCommand"; +import { DeleteBucketReplicationCommandInput, DeleteBucketReplicationCommandOutput } from "./commands/DeleteBucketReplicationCommand"; +import { DeleteBucketTaggingCommandInput, DeleteBucketTaggingCommandOutput } from "./commands/DeleteBucketTaggingCommand"; +import { DeleteBucketWebsiteCommandInput, DeleteBucketWebsiteCommandOutput } from "./commands/DeleteBucketWebsiteCommand"; +import { DeleteObjectCommandInput, DeleteObjectCommandOutput } from "./commands/DeleteObjectCommand"; +import { DeleteObjectsCommandInput, DeleteObjectsCommandOutput } from "./commands/DeleteObjectsCommand"; +import { DeleteObjectTaggingCommandInput, DeleteObjectTaggingCommandOutput } from "./commands/DeleteObjectTaggingCommand"; +import { DeletePublicAccessBlockCommandInput, DeletePublicAccessBlockCommandOutput } from "./commands/DeletePublicAccessBlockCommand"; +import { GetBucketAccelerateConfigurationCommandInput, GetBucketAccelerateConfigurationCommandOutput } from "./commands/GetBucketAccelerateConfigurationCommand"; +import { GetBucketAclCommandInput, GetBucketAclCommandOutput } from "./commands/GetBucketAclCommand"; +import { GetBucketAnalyticsConfigurationCommandInput, GetBucketAnalyticsConfigurationCommandOutput } from "./commands/GetBucketAnalyticsConfigurationCommand"; +import { GetBucketCorsCommandInput, GetBucketCorsCommandOutput } from "./commands/GetBucketCorsCommand"; +import { GetBucketEncryptionCommandInput, GetBucketEncryptionCommandOutput } from "./commands/GetBucketEncryptionCommand"; +import { GetBucketIntelligentTieringConfigurationCommandInput, GetBucketIntelligentTieringConfigurationCommandOutput } from "./commands/GetBucketIntelligentTieringConfigurationCommand"; +import { GetBucketInventoryConfigurationCommandInput, GetBucketInventoryConfigurationCommandOutput } from "./commands/GetBucketInventoryConfigurationCommand"; +import { GetBucketLifecycleConfigurationCommandInput, GetBucketLifecycleConfigurationCommandOutput } from "./commands/GetBucketLifecycleConfigurationCommand"; +import { GetBucketLocationCommandInput, GetBucketLocationCommandOutput } from "./commands/GetBucketLocationCommand"; +import { GetBucketLoggingCommandInput, GetBucketLoggingCommandOutput } from "./commands/GetBucketLoggingCommand"; +import { GetBucketMetadataTableConfigurationCommandInput, GetBucketMetadataTableConfigurationCommandOutput } from "./commands/GetBucketMetadataTableConfigurationCommand"; +import { GetBucketMetricsConfigurationCommandInput, GetBucketMetricsConfigurationCommandOutput } from "./commands/GetBucketMetricsConfigurationCommand"; +import { GetBucketNotificationConfigurationCommandInput, GetBucketNotificationConfigurationCommandOutput } from "./commands/GetBucketNotificationConfigurationCommand"; +import { GetBucketOwnershipControlsCommandInput, GetBucketOwnershipControlsCommandOutput } from "./commands/GetBucketOwnershipControlsCommand"; +import { GetBucketPolicyCommandInput, GetBucketPolicyCommandOutput } from "./commands/GetBucketPolicyCommand"; +import { GetBucketPolicyStatusCommandInput, GetBucketPolicyStatusCommandOutput } from "./commands/GetBucketPolicyStatusCommand"; +import { GetBucketReplicationCommandInput, GetBucketReplicationCommandOutput } from "./commands/GetBucketReplicationCommand"; +import { GetBucketRequestPaymentCommandInput, GetBucketRequestPaymentCommandOutput } from "./commands/GetBucketRequestPaymentCommand"; +import { GetBucketTaggingCommandInput, GetBucketTaggingCommandOutput } from "./commands/GetBucketTaggingCommand"; +import { GetBucketVersioningCommandInput, GetBucketVersioningCommandOutput } from "./commands/GetBucketVersioningCommand"; +import { GetBucketWebsiteCommandInput, GetBucketWebsiteCommandOutput } from "./commands/GetBucketWebsiteCommand"; +import { GetObjectAclCommandInput, GetObjectAclCommandOutput } from "./commands/GetObjectAclCommand"; +import { GetObjectAttributesCommandInput, GetObjectAttributesCommandOutput } from "./commands/GetObjectAttributesCommand"; +import { GetObjectCommandInput, GetObjectCommandOutput } from "./commands/GetObjectCommand"; +import { GetObjectLegalHoldCommandInput, GetObjectLegalHoldCommandOutput } from "./commands/GetObjectLegalHoldCommand"; +import { GetObjectLockConfigurationCommandInput, GetObjectLockConfigurationCommandOutput } from "./commands/GetObjectLockConfigurationCommand"; +import { GetObjectRetentionCommandInput, GetObjectRetentionCommandOutput } from "./commands/GetObjectRetentionCommand"; +import { GetObjectTaggingCommandInput, GetObjectTaggingCommandOutput } from "./commands/GetObjectTaggingCommand"; +import { GetObjectTorrentCommandInput, GetObjectTorrentCommandOutput } from "./commands/GetObjectTorrentCommand"; +import { GetPublicAccessBlockCommandInput, GetPublicAccessBlockCommandOutput } from "./commands/GetPublicAccessBlockCommand"; +import { HeadBucketCommandInput, HeadBucketCommandOutput } from "./commands/HeadBucketCommand"; +import { HeadObjectCommandInput, HeadObjectCommandOutput } from "./commands/HeadObjectCommand"; +import { ListBucketAnalyticsConfigurationsCommandInput, ListBucketAnalyticsConfigurationsCommandOutput } from "./commands/ListBucketAnalyticsConfigurationsCommand"; +import { ListBucketIntelligentTieringConfigurationsCommandInput, ListBucketIntelligentTieringConfigurationsCommandOutput } from "./commands/ListBucketIntelligentTieringConfigurationsCommand"; +import { ListBucketInventoryConfigurationsCommandInput, ListBucketInventoryConfigurationsCommandOutput } from "./commands/ListBucketInventoryConfigurationsCommand"; +import { ListBucketMetricsConfigurationsCommandInput, ListBucketMetricsConfigurationsCommandOutput } from "./commands/ListBucketMetricsConfigurationsCommand"; +import { ListBucketsCommandInput, ListBucketsCommandOutput } from "./commands/ListBucketsCommand"; +import { ListDirectoryBucketsCommandInput, ListDirectoryBucketsCommandOutput } from "./commands/ListDirectoryBucketsCommand"; +import { ListMultipartUploadsCommandInput, ListMultipartUploadsCommandOutput } from "./commands/ListMultipartUploadsCommand"; +import { ListObjectsCommandInput, ListObjectsCommandOutput } from "./commands/ListObjectsCommand"; +import { ListObjectsV2CommandInput, ListObjectsV2CommandOutput } from "./commands/ListObjectsV2Command"; +import { ListObjectVersionsCommandInput, ListObjectVersionsCommandOutput } from "./commands/ListObjectVersionsCommand"; +import { ListPartsCommandInput, ListPartsCommandOutput } from "./commands/ListPartsCommand"; +import { PutBucketAccelerateConfigurationCommandInput, PutBucketAccelerateConfigurationCommandOutput } from "./commands/PutBucketAccelerateConfigurationCommand"; +import { PutBucketAclCommandInput, PutBucketAclCommandOutput } from "./commands/PutBucketAclCommand"; +import { PutBucketAnalyticsConfigurationCommandInput, PutBucketAnalyticsConfigurationCommandOutput } from "./commands/PutBucketAnalyticsConfigurationCommand"; +import { PutBucketCorsCommandInput, PutBucketCorsCommandOutput } from "./commands/PutBucketCorsCommand"; +import { PutBucketEncryptionCommandInput, PutBucketEncryptionCommandOutput } from "./commands/PutBucketEncryptionCommand"; +import { PutBucketIntelligentTieringConfigurationCommandInput, PutBucketIntelligentTieringConfigurationCommandOutput } from "./commands/PutBucketIntelligentTieringConfigurationCommand"; +import { PutBucketInventoryConfigurationCommandInput, PutBucketInventoryConfigurationCommandOutput } from "./commands/PutBucketInventoryConfigurationCommand"; +import { PutBucketLifecycleConfigurationCommandInput, PutBucketLifecycleConfigurationCommandOutput } from "./commands/PutBucketLifecycleConfigurationCommand"; +import { PutBucketLoggingCommandInput, PutBucketLoggingCommandOutput } from "./commands/PutBucketLoggingCommand"; +import { PutBucketMetricsConfigurationCommandInput, PutBucketMetricsConfigurationCommandOutput } from "./commands/PutBucketMetricsConfigurationCommand"; +import { PutBucketNotificationConfigurationCommandInput, PutBucketNotificationConfigurationCommandOutput } from "./commands/PutBucketNotificationConfigurationCommand"; +import { PutBucketOwnershipControlsCommandInput, PutBucketOwnershipControlsCommandOutput } from "./commands/PutBucketOwnershipControlsCommand"; +import { PutBucketPolicyCommandInput, PutBucketPolicyCommandOutput } from "./commands/PutBucketPolicyCommand"; +import { PutBucketReplicationCommandInput, PutBucketReplicationCommandOutput } from "./commands/PutBucketReplicationCommand"; +import { PutBucketRequestPaymentCommandInput, PutBucketRequestPaymentCommandOutput } from "./commands/PutBucketRequestPaymentCommand"; +import { PutBucketTaggingCommandInput, PutBucketTaggingCommandOutput } from "./commands/PutBucketTaggingCommand"; +import { PutBucketVersioningCommandInput, PutBucketVersioningCommandOutput } from "./commands/PutBucketVersioningCommand"; +import { PutBucketWebsiteCommandInput, PutBucketWebsiteCommandOutput } from "./commands/PutBucketWebsiteCommand"; +import { PutObjectAclCommandInput, PutObjectAclCommandOutput } from "./commands/PutObjectAclCommand"; +import { PutObjectCommandInput, PutObjectCommandOutput } from "./commands/PutObjectCommand"; +import { PutObjectLegalHoldCommandInput, PutObjectLegalHoldCommandOutput } from "./commands/PutObjectLegalHoldCommand"; +import { PutObjectLockConfigurationCommandInput, PutObjectLockConfigurationCommandOutput } from "./commands/PutObjectLockConfigurationCommand"; +import { PutObjectRetentionCommandInput, PutObjectRetentionCommandOutput } from "./commands/PutObjectRetentionCommand"; +import { PutObjectTaggingCommandInput, PutObjectTaggingCommandOutput } from "./commands/PutObjectTaggingCommand"; +import { PutPublicAccessBlockCommandInput, PutPublicAccessBlockCommandOutput } from "./commands/PutPublicAccessBlockCommand"; +import { RestoreObjectCommandInput, RestoreObjectCommandOutput } from "./commands/RestoreObjectCommand"; +import { SelectObjectContentCommandInput, SelectObjectContentCommandOutput } from "./commands/SelectObjectContentCommand"; +import { UploadPartCommandInput, UploadPartCommandOutput } from "./commands/UploadPartCommand"; +import { UploadPartCopyCommandInput, UploadPartCopyCommandOutput } from "./commands/UploadPartCopyCommand"; +import { WriteGetObjectResponseCommandInput, WriteGetObjectResponseCommandOutput } from "./commands/WriteGetObjectResponseCommand"; +import { S3Client } from "./S3Client"; +export interface S3 { + /** + * @see {@link AbortMultipartUploadCommand} + */ + abortMultipartUpload(args: AbortMultipartUploadCommandInput, options?: __HttpHandlerOptions): Promise; + abortMultipartUpload(args: AbortMultipartUploadCommandInput, cb: (err: any, data?: AbortMultipartUploadCommandOutput) => void): void; + abortMultipartUpload(args: AbortMultipartUploadCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: AbortMultipartUploadCommandOutput) => void): void; + /** + * @see {@link CompleteMultipartUploadCommand} + */ + completeMultipartUpload(args: CompleteMultipartUploadCommandInput, options?: __HttpHandlerOptions): Promise; + completeMultipartUpload(args: CompleteMultipartUploadCommandInput, cb: (err: any, data?: CompleteMultipartUploadCommandOutput) => void): void; + completeMultipartUpload(args: CompleteMultipartUploadCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: CompleteMultipartUploadCommandOutput) => void): void; + /** + * @see {@link CopyObjectCommand} + */ + copyObject(args: CopyObjectCommandInput, options?: __HttpHandlerOptions): Promise; + copyObject(args: CopyObjectCommandInput, cb: (err: any, data?: CopyObjectCommandOutput) => void): void; + copyObject(args: CopyObjectCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: CopyObjectCommandOutput) => void): void; + /** + * @see {@link CreateBucketCommand} + */ + createBucket(args: CreateBucketCommandInput, options?: __HttpHandlerOptions): Promise; + createBucket(args: CreateBucketCommandInput, cb: (err: any, data?: CreateBucketCommandOutput) => void): void; + createBucket(args: CreateBucketCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: CreateBucketCommandOutput) => void): void; + /** + * @see {@link CreateBucketMetadataTableConfigurationCommand} + */ + createBucketMetadataTableConfiguration(args: CreateBucketMetadataTableConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + createBucketMetadataTableConfiguration(args: CreateBucketMetadataTableConfigurationCommandInput, cb: (err: any, data?: CreateBucketMetadataTableConfigurationCommandOutput) => void): void; + createBucketMetadataTableConfiguration(args: CreateBucketMetadataTableConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: CreateBucketMetadataTableConfigurationCommandOutput) => void): void; + /** + * @see {@link CreateMultipartUploadCommand} + */ + createMultipartUpload(args: CreateMultipartUploadCommandInput, options?: __HttpHandlerOptions): Promise; + createMultipartUpload(args: CreateMultipartUploadCommandInput, cb: (err: any, data?: CreateMultipartUploadCommandOutput) => void): void; + createMultipartUpload(args: CreateMultipartUploadCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: CreateMultipartUploadCommandOutput) => void): void; + /** + * @see {@link CreateSessionCommand} + */ + createSession(args: CreateSessionCommandInput, options?: __HttpHandlerOptions): Promise; + createSession(args: CreateSessionCommandInput, cb: (err: any, data?: CreateSessionCommandOutput) => void): void; + createSession(args: CreateSessionCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: CreateSessionCommandOutput) => void): void; + /** + * @see {@link DeleteBucketCommand} + */ + deleteBucket(args: DeleteBucketCommandInput, options?: __HttpHandlerOptions): Promise; + deleteBucket(args: DeleteBucketCommandInput, cb: (err: any, data?: DeleteBucketCommandOutput) => void): void; + deleteBucket(args: DeleteBucketCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteBucketCommandOutput) => void): void; + /** + * @see {@link DeleteBucketAnalyticsConfigurationCommand} + */ + deleteBucketAnalyticsConfiguration(args: DeleteBucketAnalyticsConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + deleteBucketAnalyticsConfiguration(args: DeleteBucketAnalyticsConfigurationCommandInput, cb: (err: any, data?: DeleteBucketAnalyticsConfigurationCommandOutput) => void): void; + deleteBucketAnalyticsConfiguration(args: DeleteBucketAnalyticsConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteBucketAnalyticsConfigurationCommandOutput) => void): void; + /** + * @see {@link DeleteBucketCorsCommand} + */ + deleteBucketCors(args: DeleteBucketCorsCommandInput, options?: __HttpHandlerOptions): Promise; + deleteBucketCors(args: DeleteBucketCorsCommandInput, cb: (err: any, data?: DeleteBucketCorsCommandOutput) => void): void; + deleteBucketCors(args: DeleteBucketCorsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteBucketCorsCommandOutput) => void): void; + /** + * @see {@link DeleteBucketEncryptionCommand} + */ + deleteBucketEncryption(args: DeleteBucketEncryptionCommandInput, options?: __HttpHandlerOptions): Promise; + deleteBucketEncryption(args: DeleteBucketEncryptionCommandInput, cb: (err: any, data?: DeleteBucketEncryptionCommandOutput) => void): void; + deleteBucketEncryption(args: DeleteBucketEncryptionCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteBucketEncryptionCommandOutput) => void): void; + /** + * @see {@link DeleteBucketIntelligentTieringConfigurationCommand} + */ + deleteBucketIntelligentTieringConfiguration(args: DeleteBucketIntelligentTieringConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + deleteBucketIntelligentTieringConfiguration(args: DeleteBucketIntelligentTieringConfigurationCommandInput, cb: (err: any, data?: DeleteBucketIntelligentTieringConfigurationCommandOutput) => void): void; + deleteBucketIntelligentTieringConfiguration(args: DeleteBucketIntelligentTieringConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteBucketIntelligentTieringConfigurationCommandOutput) => void): void; + /** + * @see {@link DeleteBucketInventoryConfigurationCommand} + */ + deleteBucketInventoryConfiguration(args: DeleteBucketInventoryConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + deleteBucketInventoryConfiguration(args: DeleteBucketInventoryConfigurationCommandInput, cb: (err: any, data?: DeleteBucketInventoryConfigurationCommandOutput) => void): void; + deleteBucketInventoryConfiguration(args: DeleteBucketInventoryConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteBucketInventoryConfigurationCommandOutput) => void): void; + /** + * @see {@link DeleteBucketLifecycleCommand} + */ + deleteBucketLifecycle(args: DeleteBucketLifecycleCommandInput, options?: __HttpHandlerOptions): Promise; + deleteBucketLifecycle(args: DeleteBucketLifecycleCommandInput, cb: (err: any, data?: DeleteBucketLifecycleCommandOutput) => void): void; + deleteBucketLifecycle(args: DeleteBucketLifecycleCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteBucketLifecycleCommandOutput) => void): void; + /** + * @see {@link DeleteBucketMetadataTableConfigurationCommand} + */ + deleteBucketMetadataTableConfiguration(args: DeleteBucketMetadataTableConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + deleteBucketMetadataTableConfiguration(args: DeleteBucketMetadataTableConfigurationCommandInput, cb: (err: any, data?: DeleteBucketMetadataTableConfigurationCommandOutput) => void): void; + deleteBucketMetadataTableConfiguration(args: DeleteBucketMetadataTableConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteBucketMetadataTableConfigurationCommandOutput) => void): void; + /** + * @see {@link DeleteBucketMetricsConfigurationCommand} + */ + deleteBucketMetricsConfiguration(args: DeleteBucketMetricsConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + deleteBucketMetricsConfiguration(args: DeleteBucketMetricsConfigurationCommandInput, cb: (err: any, data?: DeleteBucketMetricsConfigurationCommandOutput) => void): void; + deleteBucketMetricsConfiguration(args: DeleteBucketMetricsConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteBucketMetricsConfigurationCommandOutput) => void): void; + /** + * @see {@link DeleteBucketOwnershipControlsCommand} + */ + deleteBucketOwnershipControls(args: DeleteBucketOwnershipControlsCommandInput, options?: __HttpHandlerOptions): Promise; + deleteBucketOwnershipControls(args: DeleteBucketOwnershipControlsCommandInput, cb: (err: any, data?: DeleteBucketOwnershipControlsCommandOutput) => void): void; + deleteBucketOwnershipControls(args: DeleteBucketOwnershipControlsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteBucketOwnershipControlsCommandOutput) => void): void; + /** + * @see {@link DeleteBucketPolicyCommand} + */ + deleteBucketPolicy(args: DeleteBucketPolicyCommandInput, options?: __HttpHandlerOptions): Promise; + deleteBucketPolicy(args: DeleteBucketPolicyCommandInput, cb: (err: any, data?: DeleteBucketPolicyCommandOutput) => void): void; + deleteBucketPolicy(args: DeleteBucketPolicyCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteBucketPolicyCommandOutput) => void): void; + /** + * @see {@link DeleteBucketReplicationCommand} + */ + deleteBucketReplication(args: DeleteBucketReplicationCommandInput, options?: __HttpHandlerOptions): Promise; + deleteBucketReplication(args: DeleteBucketReplicationCommandInput, cb: (err: any, data?: DeleteBucketReplicationCommandOutput) => void): void; + deleteBucketReplication(args: DeleteBucketReplicationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteBucketReplicationCommandOutput) => void): void; + /** + * @see {@link DeleteBucketTaggingCommand} + */ + deleteBucketTagging(args: DeleteBucketTaggingCommandInput, options?: __HttpHandlerOptions): Promise; + deleteBucketTagging(args: DeleteBucketTaggingCommandInput, cb: (err: any, data?: DeleteBucketTaggingCommandOutput) => void): void; + deleteBucketTagging(args: DeleteBucketTaggingCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteBucketTaggingCommandOutput) => void): void; + /** + * @see {@link DeleteBucketWebsiteCommand} + */ + deleteBucketWebsite(args: DeleteBucketWebsiteCommandInput, options?: __HttpHandlerOptions): Promise; + deleteBucketWebsite(args: DeleteBucketWebsiteCommandInput, cb: (err: any, data?: DeleteBucketWebsiteCommandOutput) => void): void; + deleteBucketWebsite(args: DeleteBucketWebsiteCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteBucketWebsiteCommandOutput) => void): void; + /** + * @see {@link DeleteObjectCommand} + */ + deleteObject(args: DeleteObjectCommandInput, options?: __HttpHandlerOptions): Promise; + deleteObject(args: DeleteObjectCommandInput, cb: (err: any, data?: DeleteObjectCommandOutput) => void): void; + deleteObject(args: DeleteObjectCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteObjectCommandOutput) => void): void; + /** + * @see {@link DeleteObjectsCommand} + */ + deleteObjects(args: DeleteObjectsCommandInput, options?: __HttpHandlerOptions): Promise; + deleteObjects(args: DeleteObjectsCommandInput, cb: (err: any, data?: DeleteObjectsCommandOutput) => void): void; + deleteObjects(args: DeleteObjectsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteObjectsCommandOutput) => void): void; + /** + * @see {@link DeleteObjectTaggingCommand} + */ + deleteObjectTagging(args: DeleteObjectTaggingCommandInput, options?: __HttpHandlerOptions): Promise; + deleteObjectTagging(args: DeleteObjectTaggingCommandInput, cb: (err: any, data?: DeleteObjectTaggingCommandOutput) => void): void; + deleteObjectTagging(args: DeleteObjectTaggingCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteObjectTaggingCommandOutput) => void): void; + /** + * @see {@link DeletePublicAccessBlockCommand} + */ + deletePublicAccessBlock(args: DeletePublicAccessBlockCommandInput, options?: __HttpHandlerOptions): Promise; + deletePublicAccessBlock(args: DeletePublicAccessBlockCommandInput, cb: (err: any, data?: DeletePublicAccessBlockCommandOutput) => void): void; + deletePublicAccessBlock(args: DeletePublicAccessBlockCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeletePublicAccessBlockCommandOutput) => void): void; + /** + * @see {@link GetBucketAccelerateConfigurationCommand} + */ + getBucketAccelerateConfiguration(args: GetBucketAccelerateConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketAccelerateConfiguration(args: GetBucketAccelerateConfigurationCommandInput, cb: (err: any, data?: GetBucketAccelerateConfigurationCommandOutput) => void): void; + getBucketAccelerateConfiguration(args: GetBucketAccelerateConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketAccelerateConfigurationCommandOutput) => void): void; + /** + * @see {@link GetBucketAclCommand} + */ + getBucketAcl(args: GetBucketAclCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketAcl(args: GetBucketAclCommandInput, cb: (err: any, data?: GetBucketAclCommandOutput) => void): void; + getBucketAcl(args: GetBucketAclCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketAclCommandOutput) => void): void; + /** + * @see {@link GetBucketAnalyticsConfigurationCommand} + */ + getBucketAnalyticsConfiguration(args: GetBucketAnalyticsConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketAnalyticsConfiguration(args: GetBucketAnalyticsConfigurationCommandInput, cb: (err: any, data?: GetBucketAnalyticsConfigurationCommandOutput) => void): void; + getBucketAnalyticsConfiguration(args: GetBucketAnalyticsConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketAnalyticsConfigurationCommandOutput) => void): void; + /** + * @see {@link GetBucketCorsCommand} + */ + getBucketCors(args: GetBucketCorsCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketCors(args: GetBucketCorsCommandInput, cb: (err: any, data?: GetBucketCorsCommandOutput) => void): void; + getBucketCors(args: GetBucketCorsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketCorsCommandOutput) => void): void; + /** + * @see {@link GetBucketEncryptionCommand} + */ + getBucketEncryption(args: GetBucketEncryptionCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketEncryption(args: GetBucketEncryptionCommandInput, cb: (err: any, data?: GetBucketEncryptionCommandOutput) => void): void; + getBucketEncryption(args: GetBucketEncryptionCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketEncryptionCommandOutput) => void): void; + /** + * @see {@link GetBucketIntelligentTieringConfigurationCommand} + */ + getBucketIntelligentTieringConfiguration(args: GetBucketIntelligentTieringConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketIntelligentTieringConfiguration(args: GetBucketIntelligentTieringConfigurationCommandInput, cb: (err: any, data?: GetBucketIntelligentTieringConfigurationCommandOutput) => void): void; + getBucketIntelligentTieringConfiguration(args: GetBucketIntelligentTieringConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketIntelligentTieringConfigurationCommandOutput) => void): void; + /** + * @see {@link GetBucketInventoryConfigurationCommand} + */ + getBucketInventoryConfiguration(args: GetBucketInventoryConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketInventoryConfiguration(args: GetBucketInventoryConfigurationCommandInput, cb: (err: any, data?: GetBucketInventoryConfigurationCommandOutput) => void): void; + getBucketInventoryConfiguration(args: GetBucketInventoryConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketInventoryConfigurationCommandOutput) => void): void; + /** + * @see {@link GetBucketLifecycleConfigurationCommand} + */ + getBucketLifecycleConfiguration(args: GetBucketLifecycleConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketLifecycleConfiguration(args: GetBucketLifecycleConfigurationCommandInput, cb: (err: any, data?: GetBucketLifecycleConfigurationCommandOutput) => void): void; + getBucketLifecycleConfiguration(args: GetBucketLifecycleConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketLifecycleConfigurationCommandOutput) => void): void; + /** + * @see {@link GetBucketLocationCommand} + */ + getBucketLocation(args: GetBucketLocationCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketLocation(args: GetBucketLocationCommandInput, cb: (err: any, data?: GetBucketLocationCommandOutput) => void): void; + getBucketLocation(args: GetBucketLocationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketLocationCommandOutput) => void): void; + /** + * @see {@link GetBucketLoggingCommand} + */ + getBucketLogging(args: GetBucketLoggingCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketLogging(args: GetBucketLoggingCommandInput, cb: (err: any, data?: GetBucketLoggingCommandOutput) => void): void; + getBucketLogging(args: GetBucketLoggingCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketLoggingCommandOutput) => void): void; + /** + * @see {@link GetBucketMetadataTableConfigurationCommand} + */ + getBucketMetadataTableConfiguration(args: GetBucketMetadataTableConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketMetadataTableConfiguration(args: GetBucketMetadataTableConfigurationCommandInput, cb: (err: any, data?: GetBucketMetadataTableConfigurationCommandOutput) => void): void; + getBucketMetadataTableConfiguration(args: GetBucketMetadataTableConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketMetadataTableConfigurationCommandOutput) => void): void; + /** + * @see {@link GetBucketMetricsConfigurationCommand} + */ + getBucketMetricsConfiguration(args: GetBucketMetricsConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketMetricsConfiguration(args: GetBucketMetricsConfigurationCommandInput, cb: (err: any, data?: GetBucketMetricsConfigurationCommandOutput) => void): void; + getBucketMetricsConfiguration(args: GetBucketMetricsConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketMetricsConfigurationCommandOutput) => void): void; + /** + * @see {@link GetBucketNotificationConfigurationCommand} + */ + getBucketNotificationConfiguration(args: GetBucketNotificationConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketNotificationConfiguration(args: GetBucketNotificationConfigurationCommandInput, cb: (err: any, data?: GetBucketNotificationConfigurationCommandOutput) => void): void; + getBucketNotificationConfiguration(args: GetBucketNotificationConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketNotificationConfigurationCommandOutput) => void): void; + /** + * @see {@link GetBucketOwnershipControlsCommand} + */ + getBucketOwnershipControls(args: GetBucketOwnershipControlsCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketOwnershipControls(args: GetBucketOwnershipControlsCommandInput, cb: (err: any, data?: GetBucketOwnershipControlsCommandOutput) => void): void; + getBucketOwnershipControls(args: GetBucketOwnershipControlsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketOwnershipControlsCommandOutput) => void): void; + /** + * @see {@link GetBucketPolicyCommand} + */ + getBucketPolicy(args: GetBucketPolicyCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketPolicy(args: GetBucketPolicyCommandInput, cb: (err: any, data?: GetBucketPolicyCommandOutput) => void): void; + getBucketPolicy(args: GetBucketPolicyCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketPolicyCommandOutput) => void): void; + /** + * @see {@link GetBucketPolicyStatusCommand} + */ + getBucketPolicyStatus(args: GetBucketPolicyStatusCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketPolicyStatus(args: GetBucketPolicyStatusCommandInput, cb: (err: any, data?: GetBucketPolicyStatusCommandOutput) => void): void; + getBucketPolicyStatus(args: GetBucketPolicyStatusCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketPolicyStatusCommandOutput) => void): void; + /** + * @see {@link GetBucketReplicationCommand} + */ + getBucketReplication(args: GetBucketReplicationCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketReplication(args: GetBucketReplicationCommandInput, cb: (err: any, data?: GetBucketReplicationCommandOutput) => void): void; + getBucketReplication(args: GetBucketReplicationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketReplicationCommandOutput) => void): void; + /** + * @see {@link GetBucketRequestPaymentCommand} + */ + getBucketRequestPayment(args: GetBucketRequestPaymentCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketRequestPayment(args: GetBucketRequestPaymentCommandInput, cb: (err: any, data?: GetBucketRequestPaymentCommandOutput) => void): void; + getBucketRequestPayment(args: GetBucketRequestPaymentCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketRequestPaymentCommandOutput) => void): void; + /** + * @see {@link GetBucketTaggingCommand} + */ + getBucketTagging(args: GetBucketTaggingCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketTagging(args: GetBucketTaggingCommandInput, cb: (err: any, data?: GetBucketTaggingCommandOutput) => void): void; + getBucketTagging(args: GetBucketTaggingCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketTaggingCommandOutput) => void): void; + /** + * @see {@link GetBucketVersioningCommand} + */ + getBucketVersioning(args: GetBucketVersioningCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketVersioning(args: GetBucketVersioningCommandInput, cb: (err: any, data?: GetBucketVersioningCommandOutput) => void): void; + getBucketVersioning(args: GetBucketVersioningCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketVersioningCommandOutput) => void): void; + /** + * @see {@link GetBucketWebsiteCommand} + */ + getBucketWebsite(args: GetBucketWebsiteCommandInput, options?: __HttpHandlerOptions): Promise; + getBucketWebsite(args: GetBucketWebsiteCommandInput, cb: (err: any, data?: GetBucketWebsiteCommandOutput) => void): void; + getBucketWebsite(args: GetBucketWebsiteCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetBucketWebsiteCommandOutput) => void): void; + /** + * @see {@link GetObjectCommand} + */ + getObject(args: GetObjectCommandInput, options?: __HttpHandlerOptions): Promise; + getObject(args: GetObjectCommandInput, cb: (err: any, data?: GetObjectCommandOutput) => void): void; + getObject(args: GetObjectCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetObjectCommandOutput) => void): void; + /** + * @see {@link GetObjectAclCommand} + */ + getObjectAcl(args: GetObjectAclCommandInput, options?: __HttpHandlerOptions): Promise; + getObjectAcl(args: GetObjectAclCommandInput, cb: (err: any, data?: GetObjectAclCommandOutput) => void): void; + getObjectAcl(args: GetObjectAclCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetObjectAclCommandOutput) => void): void; + /** + * @see {@link GetObjectAttributesCommand} + */ + getObjectAttributes(args: GetObjectAttributesCommandInput, options?: __HttpHandlerOptions): Promise; + getObjectAttributes(args: GetObjectAttributesCommandInput, cb: (err: any, data?: GetObjectAttributesCommandOutput) => void): void; + getObjectAttributes(args: GetObjectAttributesCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetObjectAttributesCommandOutput) => void): void; + /** + * @see {@link GetObjectLegalHoldCommand} + */ + getObjectLegalHold(args: GetObjectLegalHoldCommandInput, options?: __HttpHandlerOptions): Promise; + getObjectLegalHold(args: GetObjectLegalHoldCommandInput, cb: (err: any, data?: GetObjectLegalHoldCommandOutput) => void): void; + getObjectLegalHold(args: GetObjectLegalHoldCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetObjectLegalHoldCommandOutput) => void): void; + /** + * @see {@link GetObjectLockConfigurationCommand} + */ + getObjectLockConfiguration(args: GetObjectLockConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + getObjectLockConfiguration(args: GetObjectLockConfigurationCommandInput, cb: (err: any, data?: GetObjectLockConfigurationCommandOutput) => void): void; + getObjectLockConfiguration(args: GetObjectLockConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetObjectLockConfigurationCommandOutput) => void): void; + /** + * @see {@link GetObjectRetentionCommand} + */ + getObjectRetention(args: GetObjectRetentionCommandInput, options?: __HttpHandlerOptions): Promise; + getObjectRetention(args: GetObjectRetentionCommandInput, cb: (err: any, data?: GetObjectRetentionCommandOutput) => void): void; + getObjectRetention(args: GetObjectRetentionCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetObjectRetentionCommandOutput) => void): void; + /** + * @see {@link GetObjectTaggingCommand} + */ + getObjectTagging(args: GetObjectTaggingCommandInput, options?: __HttpHandlerOptions): Promise; + getObjectTagging(args: GetObjectTaggingCommandInput, cb: (err: any, data?: GetObjectTaggingCommandOutput) => void): void; + getObjectTagging(args: GetObjectTaggingCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetObjectTaggingCommandOutput) => void): void; + /** + * @see {@link GetObjectTorrentCommand} + */ + getObjectTorrent(args: GetObjectTorrentCommandInput, options?: __HttpHandlerOptions): Promise; + getObjectTorrent(args: GetObjectTorrentCommandInput, cb: (err: any, data?: GetObjectTorrentCommandOutput) => void): void; + getObjectTorrent(args: GetObjectTorrentCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetObjectTorrentCommandOutput) => void): void; + /** + * @see {@link GetPublicAccessBlockCommand} + */ + getPublicAccessBlock(args: GetPublicAccessBlockCommandInput, options?: __HttpHandlerOptions): Promise; + getPublicAccessBlock(args: GetPublicAccessBlockCommandInput, cb: (err: any, data?: GetPublicAccessBlockCommandOutput) => void): void; + getPublicAccessBlock(args: GetPublicAccessBlockCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetPublicAccessBlockCommandOutput) => void): void; + /** + * @see {@link HeadBucketCommand} + */ + headBucket(args: HeadBucketCommandInput, options?: __HttpHandlerOptions): Promise; + headBucket(args: HeadBucketCommandInput, cb: (err: any, data?: HeadBucketCommandOutput) => void): void; + headBucket(args: HeadBucketCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: HeadBucketCommandOutput) => void): void; + /** + * @see {@link HeadObjectCommand} + */ + headObject(args: HeadObjectCommandInput, options?: __HttpHandlerOptions): Promise; + headObject(args: HeadObjectCommandInput, cb: (err: any, data?: HeadObjectCommandOutput) => void): void; + headObject(args: HeadObjectCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: HeadObjectCommandOutput) => void): void; + /** + * @see {@link ListBucketAnalyticsConfigurationsCommand} + */ + listBucketAnalyticsConfigurations(args: ListBucketAnalyticsConfigurationsCommandInput, options?: __HttpHandlerOptions): Promise; + listBucketAnalyticsConfigurations(args: ListBucketAnalyticsConfigurationsCommandInput, cb: (err: any, data?: ListBucketAnalyticsConfigurationsCommandOutput) => void): void; + listBucketAnalyticsConfigurations(args: ListBucketAnalyticsConfigurationsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListBucketAnalyticsConfigurationsCommandOutput) => void): void; + /** + * @see {@link ListBucketIntelligentTieringConfigurationsCommand} + */ + listBucketIntelligentTieringConfigurations(args: ListBucketIntelligentTieringConfigurationsCommandInput, options?: __HttpHandlerOptions): Promise; + listBucketIntelligentTieringConfigurations(args: ListBucketIntelligentTieringConfigurationsCommandInput, cb: (err: any, data?: ListBucketIntelligentTieringConfigurationsCommandOutput) => void): void; + listBucketIntelligentTieringConfigurations(args: ListBucketIntelligentTieringConfigurationsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListBucketIntelligentTieringConfigurationsCommandOutput) => void): void; + /** + * @see {@link ListBucketInventoryConfigurationsCommand} + */ + listBucketInventoryConfigurations(args: ListBucketInventoryConfigurationsCommandInput, options?: __HttpHandlerOptions): Promise; + listBucketInventoryConfigurations(args: ListBucketInventoryConfigurationsCommandInput, cb: (err: any, data?: ListBucketInventoryConfigurationsCommandOutput) => void): void; + listBucketInventoryConfigurations(args: ListBucketInventoryConfigurationsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListBucketInventoryConfigurationsCommandOutput) => void): void; + /** + * @see {@link ListBucketMetricsConfigurationsCommand} + */ + listBucketMetricsConfigurations(args: ListBucketMetricsConfigurationsCommandInput, options?: __HttpHandlerOptions): Promise; + listBucketMetricsConfigurations(args: ListBucketMetricsConfigurationsCommandInput, cb: (err: any, data?: ListBucketMetricsConfigurationsCommandOutput) => void): void; + listBucketMetricsConfigurations(args: ListBucketMetricsConfigurationsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListBucketMetricsConfigurationsCommandOutput) => void): void; + /** + * @see {@link ListBucketsCommand} + */ + listBuckets(): Promise; + listBuckets(args: ListBucketsCommandInput, options?: __HttpHandlerOptions): Promise; + listBuckets(args: ListBucketsCommandInput, cb: (err: any, data?: ListBucketsCommandOutput) => void): void; + listBuckets(args: ListBucketsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListBucketsCommandOutput) => void): void; + /** + * @see {@link ListDirectoryBucketsCommand} + */ + listDirectoryBuckets(): Promise; + listDirectoryBuckets(args: ListDirectoryBucketsCommandInput, options?: __HttpHandlerOptions): Promise; + listDirectoryBuckets(args: ListDirectoryBucketsCommandInput, cb: (err: any, data?: ListDirectoryBucketsCommandOutput) => void): void; + listDirectoryBuckets(args: ListDirectoryBucketsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListDirectoryBucketsCommandOutput) => void): void; + /** + * @see {@link ListMultipartUploadsCommand} + */ + listMultipartUploads(args: ListMultipartUploadsCommandInput, options?: __HttpHandlerOptions): Promise; + listMultipartUploads(args: ListMultipartUploadsCommandInput, cb: (err: any, data?: ListMultipartUploadsCommandOutput) => void): void; + listMultipartUploads(args: ListMultipartUploadsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListMultipartUploadsCommandOutput) => void): void; + /** + * @see {@link ListObjectsCommand} + */ + listObjects(args: ListObjectsCommandInput, options?: __HttpHandlerOptions): Promise; + listObjects(args: ListObjectsCommandInput, cb: (err: any, data?: ListObjectsCommandOutput) => void): void; + listObjects(args: ListObjectsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListObjectsCommandOutput) => void): void; + /** + * @see {@link ListObjectsV2Command} + */ + listObjectsV2(args: ListObjectsV2CommandInput, options?: __HttpHandlerOptions): Promise; + listObjectsV2(args: ListObjectsV2CommandInput, cb: (err: any, data?: ListObjectsV2CommandOutput) => void): void; + listObjectsV2(args: ListObjectsV2CommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListObjectsV2CommandOutput) => void): void; + /** + * @see {@link ListObjectVersionsCommand} + */ + listObjectVersions(args: ListObjectVersionsCommandInput, options?: __HttpHandlerOptions): Promise; + listObjectVersions(args: ListObjectVersionsCommandInput, cb: (err: any, data?: ListObjectVersionsCommandOutput) => void): void; + listObjectVersions(args: ListObjectVersionsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListObjectVersionsCommandOutput) => void): void; + /** + * @see {@link ListPartsCommand} + */ + listParts(args: ListPartsCommandInput, options?: __HttpHandlerOptions): Promise; + listParts(args: ListPartsCommandInput, cb: (err: any, data?: ListPartsCommandOutput) => void): void; + listParts(args: ListPartsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListPartsCommandOutput) => void): void; + /** + * @see {@link PutBucketAccelerateConfigurationCommand} + */ + putBucketAccelerateConfiguration(args: PutBucketAccelerateConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + putBucketAccelerateConfiguration(args: PutBucketAccelerateConfigurationCommandInput, cb: (err: any, data?: PutBucketAccelerateConfigurationCommandOutput) => void): void; + putBucketAccelerateConfiguration(args: PutBucketAccelerateConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutBucketAccelerateConfigurationCommandOutput) => void): void; + /** + * @see {@link PutBucketAclCommand} + */ + putBucketAcl(args: PutBucketAclCommandInput, options?: __HttpHandlerOptions): Promise; + putBucketAcl(args: PutBucketAclCommandInput, cb: (err: any, data?: PutBucketAclCommandOutput) => void): void; + putBucketAcl(args: PutBucketAclCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutBucketAclCommandOutput) => void): void; + /** + * @see {@link PutBucketAnalyticsConfigurationCommand} + */ + putBucketAnalyticsConfiguration(args: PutBucketAnalyticsConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + putBucketAnalyticsConfiguration(args: PutBucketAnalyticsConfigurationCommandInput, cb: (err: any, data?: PutBucketAnalyticsConfigurationCommandOutput) => void): void; + putBucketAnalyticsConfiguration(args: PutBucketAnalyticsConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutBucketAnalyticsConfigurationCommandOutput) => void): void; + /** + * @see {@link PutBucketCorsCommand} + */ + putBucketCors(args: PutBucketCorsCommandInput, options?: __HttpHandlerOptions): Promise; + putBucketCors(args: PutBucketCorsCommandInput, cb: (err: any, data?: PutBucketCorsCommandOutput) => void): void; + putBucketCors(args: PutBucketCorsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutBucketCorsCommandOutput) => void): void; + /** + * @see {@link PutBucketEncryptionCommand} + */ + putBucketEncryption(args: PutBucketEncryptionCommandInput, options?: __HttpHandlerOptions): Promise; + putBucketEncryption(args: PutBucketEncryptionCommandInput, cb: (err: any, data?: PutBucketEncryptionCommandOutput) => void): void; + putBucketEncryption(args: PutBucketEncryptionCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutBucketEncryptionCommandOutput) => void): void; + /** + * @see {@link PutBucketIntelligentTieringConfigurationCommand} + */ + putBucketIntelligentTieringConfiguration(args: PutBucketIntelligentTieringConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + putBucketIntelligentTieringConfiguration(args: PutBucketIntelligentTieringConfigurationCommandInput, cb: (err: any, data?: PutBucketIntelligentTieringConfigurationCommandOutput) => void): void; + putBucketIntelligentTieringConfiguration(args: PutBucketIntelligentTieringConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutBucketIntelligentTieringConfigurationCommandOutput) => void): void; + /** + * @see {@link PutBucketInventoryConfigurationCommand} + */ + putBucketInventoryConfiguration(args: PutBucketInventoryConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + putBucketInventoryConfiguration(args: PutBucketInventoryConfigurationCommandInput, cb: (err: any, data?: PutBucketInventoryConfigurationCommandOutput) => void): void; + putBucketInventoryConfiguration(args: PutBucketInventoryConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutBucketInventoryConfigurationCommandOutput) => void): void; + /** + * @see {@link PutBucketLifecycleConfigurationCommand} + */ + putBucketLifecycleConfiguration(args: PutBucketLifecycleConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + putBucketLifecycleConfiguration(args: PutBucketLifecycleConfigurationCommandInput, cb: (err: any, data?: PutBucketLifecycleConfigurationCommandOutput) => void): void; + putBucketLifecycleConfiguration(args: PutBucketLifecycleConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutBucketLifecycleConfigurationCommandOutput) => void): void; + /** + * @see {@link PutBucketLoggingCommand} + */ + putBucketLogging(args: PutBucketLoggingCommandInput, options?: __HttpHandlerOptions): Promise; + putBucketLogging(args: PutBucketLoggingCommandInput, cb: (err: any, data?: PutBucketLoggingCommandOutput) => void): void; + putBucketLogging(args: PutBucketLoggingCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutBucketLoggingCommandOutput) => void): void; + /** + * @see {@link PutBucketMetricsConfigurationCommand} + */ + putBucketMetricsConfiguration(args: PutBucketMetricsConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + putBucketMetricsConfiguration(args: PutBucketMetricsConfigurationCommandInput, cb: (err: any, data?: PutBucketMetricsConfigurationCommandOutput) => void): void; + putBucketMetricsConfiguration(args: PutBucketMetricsConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutBucketMetricsConfigurationCommandOutput) => void): void; + /** + * @see {@link PutBucketNotificationConfigurationCommand} + */ + putBucketNotificationConfiguration(args: PutBucketNotificationConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + putBucketNotificationConfiguration(args: PutBucketNotificationConfigurationCommandInput, cb: (err: any, data?: PutBucketNotificationConfigurationCommandOutput) => void): void; + putBucketNotificationConfiguration(args: PutBucketNotificationConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutBucketNotificationConfigurationCommandOutput) => void): void; + /** + * @see {@link PutBucketOwnershipControlsCommand} + */ + putBucketOwnershipControls(args: PutBucketOwnershipControlsCommandInput, options?: __HttpHandlerOptions): Promise; + putBucketOwnershipControls(args: PutBucketOwnershipControlsCommandInput, cb: (err: any, data?: PutBucketOwnershipControlsCommandOutput) => void): void; + putBucketOwnershipControls(args: PutBucketOwnershipControlsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutBucketOwnershipControlsCommandOutput) => void): void; + /** + * @see {@link PutBucketPolicyCommand} + */ + putBucketPolicy(args: PutBucketPolicyCommandInput, options?: __HttpHandlerOptions): Promise; + putBucketPolicy(args: PutBucketPolicyCommandInput, cb: (err: any, data?: PutBucketPolicyCommandOutput) => void): void; + putBucketPolicy(args: PutBucketPolicyCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutBucketPolicyCommandOutput) => void): void; + /** + * @see {@link PutBucketReplicationCommand} + */ + putBucketReplication(args: PutBucketReplicationCommandInput, options?: __HttpHandlerOptions): Promise; + putBucketReplication(args: PutBucketReplicationCommandInput, cb: (err: any, data?: PutBucketReplicationCommandOutput) => void): void; + putBucketReplication(args: PutBucketReplicationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutBucketReplicationCommandOutput) => void): void; + /** + * @see {@link PutBucketRequestPaymentCommand} + */ + putBucketRequestPayment(args: PutBucketRequestPaymentCommandInput, options?: __HttpHandlerOptions): Promise; + putBucketRequestPayment(args: PutBucketRequestPaymentCommandInput, cb: (err: any, data?: PutBucketRequestPaymentCommandOutput) => void): void; + putBucketRequestPayment(args: PutBucketRequestPaymentCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutBucketRequestPaymentCommandOutput) => void): void; + /** + * @see {@link PutBucketTaggingCommand} + */ + putBucketTagging(args: PutBucketTaggingCommandInput, options?: __HttpHandlerOptions): Promise; + putBucketTagging(args: PutBucketTaggingCommandInput, cb: (err: any, data?: PutBucketTaggingCommandOutput) => void): void; + putBucketTagging(args: PutBucketTaggingCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutBucketTaggingCommandOutput) => void): void; + /** + * @see {@link PutBucketVersioningCommand} + */ + putBucketVersioning(args: PutBucketVersioningCommandInput, options?: __HttpHandlerOptions): Promise; + putBucketVersioning(args: PutBucketVersioningCommandInput, cb: (err: any, data?: PutBucketVersioningCommandOutput) => void): void; + putBucketVersioning(args: PutBucketVersioningCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutBucketVersioningCommandOutput) => void): void; + /** + * @see {@link PutBucketWebsiteCommand} + */ + putBucketWebsite(args: PutBucketWebsiteCommandInput, options?: __HttpHandlerOptions): Promise; + putBucketWebsite(args: PutBucketWebsiteCommandInput, cb: (err: any, data?: PutBucketWebsiteCommandOutput) => void): void; + putBucketWebsite(args: PutBucketWebsiteCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutBucketWebsiteCommandOutput) => void): void; + /** + * @see {@link PutObjectCommand} + */ + putObject(args: PutObjectCommandInput, options?: __HttpHandlerOptions): Promise; + putObject(args: PutObjectCommandInput, cb: (err: any, data?: PutObjectCommandOutput) => void): void; + putObject(args: PutObjectCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutObjectCommandOutput) => void): void; + /** + * @see {@link PutObjectAclCommand} + */ + putObjectAcl(args: PutObjectAclCommandInput, options?: __HttpHandlerOptions): Promise; + putObjectAcl(args: PutObjectAclCommandInput, cb: (err: any, data?: PutObjectAclCommandOutput) => void): void; + putObjectAcl(args: PutObjectAclCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutObjectAclCommandOutput) => void): void; + /** + * @see {@link PutObjectLegalHoldCommand} + */ + putObjectLegalHold(args: PutObjectLegalHoldCommandInput, options?: __HttpHandlerOptions): Promise; + putObjectLegalHold(args: PutObjectLegalHoldCommandInput, cb: (err: any, data?: PutObjectLegalHoldCommandOutput) => void): void; + putObjectLegalHold(args: PutObjectLegalHoldCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutObjectLegalHoldCommandOutput) => void): void; + /** + * @see {@link PutObjectLockConfigurationCommand} + */ + putObjectLockConfiguration(args: PutObjectLockConfigurationCommandInput, options?: __HttpHandlerOptions): Promise; + putObjectLockConfiguration(args: PutObjectLockConfigurationCommandInput, cb: (err: any, data?: PutObjectLockConfigurationCommandOutput) => void): void; + putObjectLockConfiguration(args: PutObjectLockConfigurationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutObjectLockConfigurationCommandOutput) => void): void; + /** + * @see {@link PutObjectRetentionCommand} + */ + putObjectRetention(args: PutObjectRetentionCommandInput, options?: __HttpHandlerOptions): Promise; + putObjectRetention(args: PutObjectRetentionCommandInput, cb: (err: any, data?: PutObjectRetentionCommandOutput) => void): void; + putObjectRetention(args: PutObjectRetentionCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutObjectRetentionCommandOutput) => void): void; + /** + * @see {@link PutObjectTaggingCommand} + */ + putObjectTagging(args: PutObjectTaggingCommandInput, options?: __HttpHandlerOptions): Promise; + putObjectTagging(args: PutObjectTaggingCommandInput, cb: (err: any, data?: PutObjectTaggingCommandOutput) => void): void; + putObjectTagging(args: PutObjectTaggingCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutObjectTaggingCommandOutput) => void): void; + /** + * @see {@link PutPublicAccessBlockCommand} + */ + putPublicAccessBlock(args: PutPublicAccessBlockCommandInput, options?: __HttpHandlerOptions): Promise; + putPublicAccessBlock(args: PutPublicAccessBlockCommandInput, cb: (err: any, data?: PutPublicAccessBlockCommandOutput) => void): void; + putPublicAccessBlock(args: PutPublicAccessBlockCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutPublicAccessBlockCommandOutput) => void): void; + /** + * @see {@link RestoreObjectCommand} + */ + restoreObject(args: RestoreObjectCommandInput, options?: __HttpHandlerOptions): Promise; + restoreObject(args: RestoreObjectCommandInput, cb: (err: any, data?: RestoreObjectCommandOutput) => void): void; + restoreObject(args: RestoreObjectCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: RestoreObjectCommandOutput) => void): void; + /** + * @see {@link SelectObjectContentCommand} + */ + selectObjectContent(args: SelectObjectContentCommandInput, options?: __HttpHandlerOptions): Promise; + selectObjectContent(args: SelectObjectContentCommandInput, cb: (err: any, data?: SelectObjectContentCommandOutput) => void): void; + selectObjectContent(args: SelectObjectContentCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: SelectObjectContentCommandOutput) => void): void; + /** + * @see {@link UploadPartCommand} + */ + uploadPart(args: UploadPartCommandInput, options?: __HttpHandlerOptions): Promise; + uploadPart(args: UploadPartCommandInput, cb: (err: any, data?: UploadPartCommandOutput) => void): void; + uploadPart(args: UploadPartCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UploadPartCommandOutput) => void): void; + /** + * @see {@link UploadPartCopyCommand} + */ + uploadPartCopy(args: UploadPartCopyCommandInput, options?: __HttpHandlerOptions): Promise; + uploadPartCopy(args: UploadPartCopyCommandInput, cb: (err: any, data?: UploadPartCopyCommandOutput) => void): void; + uploadPartCopy(args: UploadPartCopyCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UploadPartCopyCommandOutput) => void): void; + /** + * @see {@link WriteGetObjectResponseCommand} + */ + writeGetObjectResponse(args: WriteGetObjectResponseCommandInput, options?: __HttpHandlerOptions): Promise; + writeGetObjectResponse(args: WriteGetObjectResponseCommandInput, cb: (err: any, data?: WriteGetObjectResponseCommandOutput) => void): void; + writeGetObjectResponse(args: WriteGetObjectResponseCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: WriteGetObjectResponseCommandOutput) => void): void; +} +/** + *

+ * @public + */ +export declare class S3 extends S3Client implements S3 { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/S3Client.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/S3Client.d.ts new file mode 100644 index 00000000..d21932be --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/S3Client.d.ts @@ -0,0 +1,331 @@ +/// +import { FlexibleChecksumsInputConfig, FlexibleChecksumsResolvedConfig } from "@aws-sdk/middleware-flexible-checksums"; +import { HostHeaderInputConfig, HostHeaderResolvedConfig } from "@aws-sdk/middleware-host-header"; +import { S3InputConfig, S3ResolvedConfig } from "@aws-sdk/middleware-sdk-s3"; +import { UserAgentInputConfig, UserAgentResolvedConfig } from "@aws-sdk/middleware-user-agent"; +import { GetAwsChunkedEncodingStream } from "@aws-sdk/types"; +import { RegionInputConfig, RegionResolvedConfig } from "@smithy/config-resolver"; +import { EventStreamSerdeInputConfig, EventStreamSerdeResolvedConfig } from "@smithy/eventstream-serde-config-resolver"; +import { EndpointInputConfig, EndpointResolvedConfig } from "@smithy/middleware-endpoint"; +import { RetryInputConfig, RetryResolvedConfig } from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { Client as __Client, DefaultsMode as __DefaultsMode, SmithyConfiguration as __SmithyConfiguration, SmithyResolvedConfiguration as __SmithyResolvedConfiguration } from "@smithy/smithy-client"; +import { AwsCredentialIdentityProvider, BodyLengthCalculator as __BodyLengthCalculator, CheckOptionalClientConfig as __CheckOptionalClientConfig, ChecksumConstructor as __ChecksumConstructor, Decoder as __Decoder, Encoder as __Encoder, EventStreamSerdeProvider as __EventStreamSerdeProvider, HashConstructor as __HashConstructor, HttpHandlerOptions as __HttpHandlerOptions, Logger as __Logger, Provider as __Provider, Provider, SdkStreamMixinInjector as __SdkStreamMixinInjector, StreamCollector as __StreamCollector, StreamHasher as __StreamHasher, UrlParser as __UrlParser, UserAgent as __UserAgent } from "@smithy/types"; +import { Readable } from "stream"; +import { HttpAuthSchemeInputConfig, HttpAuthSchemeResolvedConfig } from "./auth/httpAuthSchemeProvider"; +import { AbortMultipartUploadCommandInput, AbortMultipartUploadCommandOutput } from "./commands/AbortMultipartUploadCommand"; +import { CompleteMultipartUploadCommandInput, CompleteMultipartUploadCommandOutput } from "./commands/CompleteMultipartUploadCommand"; +import { CopyObjectCommandInput, CopyObjectCommandOutput } from "./commands/CopyObjectCommand"; +import { CreateBucketCommandInput, CreateBucketCommandOutput } from "./commands/CreateBucketCommand"; +import { CreateBucketMetadataTableConfigurationCommandInput, CreateBucketMetadataTableConfigurationCommandOutput } from "./commands/CreateBucketMetadataTableConfigurationCommand"; +import { CreateMultipartUploadCommandInput, CreateMultipartUploadCommandOutput } from "./commands/CreateMultipartUploadCommand"; +import { CreateSessionCommandInput, CreateSessionCommandOutput } from "./commands/CreateSessionCommand"; +import { DeleteBucketAnalyticsConfigurationCommandInput, DeleteBucketAnalyticsConfigurationCommandOutput } from "./commands/DeleteBucketAnalyticsConfigurationCommand"; +import { DeleteBucketCommandInput, DeleteBucketCommandOutput } from "./commands/DeleteBucketCommand"; +import { DeleteBucketCorsCommandInput, DeleteBucketCorsCommandOutput } from "./commands/DeleteBucketCorsCommand"; +import { DeleteBucketEncryptionCommandInput, DeleteBucketEncryptionCommandOutput } from "./commands/DeleteBucketEncryptionCommand"; +import { DeleteBucketIntelligentTieringConfigurationCommandInput, DeleteBucketIntelligentTieringConfigurationCommandOutput } from "./commands/DeleteBucketIntelligentTieringConfigurationCommand"; +import { DeleteBucketInventoryConfigurationCommandInput, DeleteBucketInventoryConfigurationCommandOutput } from "./commands/DeleteBucketInventoryConfigurationCommand"; +import { DeleteBucketLifecycleCommandInput, DeleteBucketLifecycleCommandOutput } from "./commands/DeleteBucketLifecycleCommand"; +import { DeleteBucketMetadataTableConfigurationCommandInput, DeleteBucketMetadataTableConfigurationCommandOutput } from "./commands/DeleteBucketMetadataTableConfigurationCommand"; +import { DeleteBucketMetricsConfigurationCommandInput, DeleteBucketMetricsConfigurationCommandOutput } from "./commands/DeleteBucketMetricsConfigurationCommand"; +import { DeleteBucketOwnershipControlsCommandInput, DeleteBucketOwnershipControlsCommandOutput } from "./commands/DeleteBucketOwnershipControlsCommand"; +import { DeleteBucketPolicyCommandInput, DeleteBucketPolicyCommandOutput } from "./commands/DeleteBucketPolicyCommand"; +import { DeleteBucketReplicationCommandInput, DeleteBucketReplicationCommandOutput } from "./commands/DeleteBucketReplicationCommand"; +import { DeleteBucketTaggingCommandInput, DeleteBucketTaggingCommandOutput } from "./commands/DeleteBucketTaggingCommand"; +import { DeleteBucketWebsiteCommandInput, DeleteBucketWebsiteCommandOutput } from "./commands/DeleteBucketWebsiteCommand"; +import { DeleteObjectCommandInput, DeleteObjectCommandOutput } from "./commands/DeleteObjectCommand"; +import { DeleteObjectsCommandInput, DeleteObjectsCommandOutput } from "./commands/DeleteObjectsCommand"; +import { DeleteObjectTaggingCommandInput, DeleteObjectTaggingCommandOutput } from "./commands/DeleteObjectTaggingCommand"; +import { DeletePublicAccessBlockCommandInput, DeletePublicAccessBlockCommandOutput } from "./commands/DeletePublicAccessBlockCommand"; +import { GetBucketAccelerateConfigurationCommandInput, GetBucketAccelerateConfigurationCommandOutput } from "./commands/GetBucketAccelerateConfigurationCommand"; +import { GetBucketAclCommandInput, GetBucketAclCommandOutput } from "./commands/GetBucketAclCommand"; +import { GetBucketAnalyticsConfigurationCommandInput, GetBucketAnalyticsConfigurationCommandOutput } from "./commands/GetBucketAnalyticsConfigurationCommand"; +import { GetBucketCorsCommandInput, GetBucketCorsCommandOutput } from "./commands/GetBucketCorsCommand"; +import { GetBucketEncryptionCommandInput, GetBucketEncryptionCommandOutput } from "./commands/GetBucketEncryptionCommand"; +import { GetBucketIntelligentTieringConfigurationCommandInput, GetBucketIntelligentTieringConfigurationCommandOutput } from "./commands/GetBucketIntelligentTieringConfigurationCommand"; +import { GetBucketInventoryConfigurationCommandInput, GetBucketInventoryConfigurationCommandOutput } from "./commands/GetBucketInventoryConfigurationCommand"; +import { GetBucketLifecycleConfigurationCommandInput, GetBucketLifecycleConfigurationCommandOutput } from "./commands/GetBucketLifecycleConfigurationCommand"; +import { GetBucketLocationCommandInput, GetBucketLocationCommandOutput } from "./commands/GetBucketLocationCommand"; +import { GetBucketLoggingCommandInput, GetBucketLoggingCommandOutput } from "./commands/GetBucketLoggingCommand"; +import { GetBucketMetadataTableConfigurationCommandInput, GetBucketMetadataTableConfigurationCommandOutput } from "./commands/GetBucketMetadataTableConfigurationCommand"; +import { GetBucketMetricsConfigurationCommandInput, GetBucketMetricsConfigurationCommandOutput } from "./commands/GetBucketMetricsConfigurationCommand"; +import { GetBucketNotificationConfigurationCommandInput, GetBucketNotificationConfigurationCommandOutput } from "./commands/GetBucketNotificationConfigurationCommand"; +import { GetBucketOwnershipControlsCommandInput, GetBucketOwnershipControlsCommandOutput } from "./commands/GetBucketOwnershipControlsCommand"; +import { GetBucketPolicyCommandInput, GetBucketPolicyCommandOutput } from "./commands/GetBucketPolicyCommand"; +import { GetBucketPolicyStatusCommandInput, GetBucketPolicyStatusCommandOutput } from "./commands/GetBucketPolicyStatusCommand"; +import { GetBucketReplicationCommandInput, GetBucketReplicationCommandOutput } from "./commands/GetBucketReplicationCommand"; +import { GetBucketRequestPaymentCommandInput, GetBucketRequestPaymentCommandOutput } from "./commands/GetBucketRequestPaymentCommand"; +import { GetBucketTaggingCommandInput, GetBucketTaggingCommandOutput } from "./commands/GetBucketTaggingCommand"; +import { GetBucketVersioningCommandInput, GetBucketVersioningCommandOutput } from "./commands/GetBucketVersioningCommand"; +import { GetBucketWebsiteCommandInput, GetBucketWebsiteCommandOutput } from "./commands/GetBucketWebsiteCommand"; +import { GetObjectAclCommandInput, GetObjectAclCommandOutput } from "./commands/GetObjectAclCommand"; +import { GetObjectAttributesCommandInput, GetObjectAttributesCommandOutput } from "./commands/GetObjectAttributesCommand"; +import { GetObjectCommandInput, GetObjectCommandOutput } from "./commands/GetObjectCommand"; +import { GetObjectLegalHoldCommandInput, GetObjectLegalHoldCommandOutput } from "./commands/GetObjectLegalHoldCommand"; +import { GetObjectLockConfigurationCommandInput, GetObjectLockConfigurationCommandOutput } from "./commands/GetObjectLockConfigurationCommand"; +import { GetObjectRetentionCommandInput, GetObjectRetentionCommandOutput } from "./commands/GetObjectRetentionCommand"; +import { GetObjectTaggingCommandInput, GetObjectTaggingCommandOutput } from "./commands/GetObjectTaggingCommand"; +import { GetObjectTorrentCommandInput, GetObjectTorrentCommandOutput } from "./commands/GetObjectTorrentCommand"; +import { GetPublicAccessBlockCommandInput, GetPublicAccessBlockCommandOutput } from "./commands/GetPublicAccessBlockCommand"; +import { HeadBucketCommandInput, HeadBucketCommandOutput } from "./commands/HeadBucketCommand"; +import { HeadObjectCommandInput, HeadObjectCommandOutput } from "./commands/HeadObjectCommand"; +import { ListBucketAnalyticsConfigurationsCommandInput, ListBucketAnalyticsConfigurationsCommandOutput } from "./commands/ListBucketAnalyticsConfigurationsCommand"; +import { ListBucketIntelligentTieringConfigurationsCommandInput, ListBucketIntelligentTieringConfigurationsCommandOutput } from "./commands/ListBucketIntelligentTieringConfigurationsCommand"; +import { ListBucketInventoryConfigurationsCommandInput, ListBucketInventoryConfigurationsCommandOutput } from "./commands/ListBucketInventoryConfigurationsCommand"; +import { ListBucketMetricsConfigurationsCommandInput, ListBucketMetricsConfigurationsCommandOutput } from "./commands/ListBucketMetricsConfigurationsCommand"; +import { ListBucketsCommandInput, ListBucketsCommandOutput } from "./commands/ListBucketsCommand"; +import { ListDirectoryBucketsCommandInput, ListDirectoryBucketsCommandOutput } from "./commands/ListDirectoryBucketsCommand"; +import { ListMultipartUploadsCommandInput, ListMultipartUploadsCommandOutput } from "./commands/ListMultipartUploadsCommand"; +import { ListObjectsCommandInput, ListObjectsCommandOutput } from "./commands/ListObjectsCommand"; +import { ListObjectsV2CommandInput, ListObjectsV2CommandOutput } from "./commands/ListObjectsV2Command"; +import { ListObjectVersionsCommandInput, ListObjectVersionsCommandOutput } from "./commands/ListObjectVersionsCommand"; +import { ListPartsCommandInput, ListPartsCommandOutput } from "./commands/ListPartsCommand"; +import { PutBucketAccelerateConfigurationCommandInput, PutBucketAccelerateConfigurationCommandOutput } from "./commands/PutBucketAccelerateConfigurationCommand"; +import { PutBucketAclCommandInput, PutBucketAclCommandOutput } from "./commands/PutBucketAclCommand"; +import { PutBucketAnalyticsConfigurationCommandInput, PutBucketAnalyticsConfigurationCommandOutput } from "./commands/PutBucketAnalyticsConfigurationCommand"; +import { PutBucketCorsCommandInput, PutBucketCorsCommandOutput } from "./commands/PutBucketCorsCommand"; +import { PutBucketEncryptionCommandInput, PutBucketEncryptionCommandOutput } from "./commands/PutBucketEncryptionCommand"; +import { PutBucketIntelligentTieringConfigurationCommandInput, PutBucketIntelligentTieringConfigurationCommandOutput } from "./commands/PutBucketIntelligentTieringConfigurationCommand"; +import { PutBucketInventoryConfigurationCommandInput, PutBucketInventoryConfigurationCommandOutput } from "./commands/PutBucketInventoryConfigurationCommand"; +import { PutBucketLifecycleConfigurationCommandInput, PutBucketLifecycleConfigurationCommandOutput } from "./commands/PutBucketLifecycleConfigurationCommand"; +import { PutBucketLoggingCommandInput, PutBucketLoggingCommandOutput } from "./commands/PutBucketLoggingCommand"; +import { PutBucketMetricsConfigurationCommandInput, PutBucketMetricsConfigurationCommandOutput } from "./commands/PutBucketMetricsConfigurationCommand"; +import { PutBucketNotificationConfigurationCommandInput, PutBucketNotificationConfigurationCommandOutput } from "./commands/PutBucketNotificationConfigurationCommand"; +import { PutBucketOwnershipControlsCommandInput, PutBucketOwnershipControlsCommandOutput } from "./commands/PutBucketOwnershipControlsCommand"; +import { PutBucketPolicyCommandInput, PutBucketPolicyCommandOutput } from "./commands/PutBucketPolicyCommand"; +import { PutBucketReplicationCommandInput, PutBucketReplicationCommandOutput } from "./commands/PutBucketReplicationCommand"; +import { PutBucketRequestPaymentCommandInput, PutBucketRequestPaymentCommandOutput } from "./commands/PutBucketRequestPaymentCommand"; +import { PutBucketTaggingCommandInput, PutBucketTaggingCommandOutput } from "./commands/PutBucketTaggingCommand"; +import { PutBucketVersioningCommandInput, PutBucketVersioningCommandOutput } from "./commands/PutBucketVersioningCommand"; +import { PutBucketWebsiteCommandInput, PutBucketWebsiteCommandOutput } from "./commands/PutBucketWebsiteCommand"; +import { PutObjectAclCommandInput, PutObjectAclCommandOutput } from "./commands/PutObjectAclCommand"; +import { PutObjectCommandInput, PutObjectCommandOutput } from "./commands/PutObjectCommand"; +import { PutObjectLegalHoldCommandInput, PutObjectLegalHoldCommandOutput } from "./commands/PutObjectLegalHoldCommand"; +import { PutObjectLockConfigurationCommandInput, PutObjectLockConfigurationCommandOutput } from "./commands/PutObjectLockConfigurationCommand"; +import { PutObjectRetentionCommandInput, PutObjectRetentionCommandOutput } from "./commands/PutObjectRetentionCommand"; +import { PutObjectTaggingCommandInput, PutObjectTaggingCommandOutput } from "./commands/PutObjectTaggingCommand"; +import { PutPublicAccessBlockCommandInput, PutPublicAccessBlockCommandOutput } from "./commands/PutPublicAccessBlockCommand"; +import { RestoreObjectCommandInput, RestoreObjectCommandOutput } from "./commands/RestoreObjectCommand"; +import { SelectObjectContentCommandInput, SelectObjectContentCommandOutput } from "./commands/SelectObjectContentCommand"; +import { UploadPartCommandInput, UploadPartCommandOutput } from "./commands/UploadPartCommand"; +import { UploadPartCopyCommandInput, UploadPartCopyCommandOutput } from "./commands/UploadPartCopyCommand"; +import { WriteGetObjectResponseCommandInput, WriteGetObjectResponseCommandOutput } from "./commands/WriteGetObjectResponseCommand"; +import { ClientInputEndpointParameters, ClientResolvedEndpointParameters, EndpointParameters } from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +/** + * @public + */ +export type ServiceInputTypes = AbortMultipartUploadCommandInput | CompleteMultipartUploadCommandInput | CopyObjectCommandInput | CreateBucketCommandInput | CreateBucketMetadataTableConfigurationCommandInput | CreateMultipartUploadCommandInput | CreateSessionCommandInput | DeleteBucketAnalyticsConfigurationCommandInput | DeleteBucketCommandInput | DeleteBucketCorsCommandInput | DeleteBucketEncryptionCommandInput | DeleteBucketIntelligentTieringConfigurationCommandInput | DeleteBucketInventoryConfigurationCommandInput | DeleteBucketLifecycleCommandInput | DeleteBucketMetadataTableConfigurationCommandInput | DeleteBucketMetricsConfigurationCommandInput | DeleteBucketOwnershipControlsCommandInput | DeleteBucketPolicyCommandInput | DeleteBucketReplicationCommandInput | DeleteBucketTaggingCommandInput | DeleteBucketWebsiteCommandInput | DeleteObjectCommandInput | DeleteObjectTaggingCommandInput | DeleteObjectsCommandInput | DeletePublicAccessBlockCommandInput | GetBucketAccelerateConfigurationCommandInput | GetBucketAclCommandInput | GetBucketAnalyticsConfigurationCommandInput | GetBucketCorsCommandInput | GetBucketEncryptionCommandInput | GetBucketIntelligentTieringConfigurationCommandInput | GetBucketInventoryConfigurationCommandInput | GetBucketLifecycleConfigurationCommandInput | GetBucketLocationCommandInput | GetBucketLoggingCommandInput | GetBucketMetadataTableConfigurationCommandInput | GetBucketMetricsConfigurationCommandInput | GetBucketNotificationConfigurationCommandInput | GetBucketOwnershipControlsCommandInput | GetBucketPolicyCommandInput | GetBucketPolicyStatusCommandInput | GetBucketReplicationCommandInput | GetBucketRequestPaymentCommandInput | GetBucketTaggingCommandInput | GetBucketVersioningCommandInput | GetBucketWebsiteCommandInput | GetObjectAclCommandInput | GetObjectAttributesCommandInput | GetObjectCommandInput | GetObjectLegalHoldCommandInput | GetObjectLockConfigurationCommandInput | GetObjectRetentionCommandInput | GetObjectTaggingCommandInput | GetObjectTorrentCommandInput | GetPublicAccessBlockCommandInput | HeadBucketCommandInput | HeadObjectCommandInput | ListBucketAnalyticsConfigurationsCommandInput | ListBucketIntelligentTieringConfigurationsCommandInput | ListBucketInventoryConfigurationsCommandInput | ListBucketMetricsConfigurationsCommandInput | ListBucketsCommandInput | ListDirectoryBucketsCommandInput | ListMultipartUploadsCommandInput | ListObjectVersionsCommandInput | ListObjectsCommandInput | ListObjectsV2CommandInput | ListPartsCommandInput | PutBucketAccelerateConfigurationCommandInput | PutBucketAclCommandInput | PutBucketAnalyticsConfigurationCommandInput | PutBucketCorsCommandInput | PutBucketEncryptionCommandInput | PutBucketIntelligentTieringConfigurationCommandInput | PutBucketInventoryConfigurationCommandInput | PutBucketLifecycleConfigurationCommandInput | PutBucketLoggingCommandInput | PutBucketMetricsConfigurationCommandInput | PutBucketNotificationConfigurationCommandInput | PutBucketOwnershipControlsCommandInput | PutBucketPolicyCommandInput | PutBucketReplicationCommandInput | PutBucketRequestPaymentCommandInput | PutBucketTaggingCommandInput | PutBucketVersioningCommandInput | PutBucketWebsiteCommandInput | PutObjectAclCommandInput | PutObjectCommandInput | PutObjectLegalHoldCommandInput | PutObjectLockConfigurationCommandInput | PutObjectRetentionCommandInput | PutObjectTaggingCommandInput | PutPublicAccessBlockCommandInput | RestoreObjectCommandInput | SelectObjectContentCommandInput | UploadPartCommandInput | UploadPartCopyCommandInput | WriteGetObjectResponseCommandInput; +/** + * @public + */ +export type ServiceOutputTypes = AbortMultipartUploadCommandOutput | CompleteMultipartUploadCommandOutput | CopyObjectCommandOutput | CreateBucketCommandOutput | CreateBucketMetadataTableConfigurationCommandOutput | CreateMultipartUploadCommandOutput | CreateSessionCommandOutput | DeleteBucketAnalyticsConfigurationCommandOutput | DeleteBucketCommandOutput | DeleteBucketCorsCommandOutput | DeleteBucketEncryptionCommandOutput | DeleteBucketIntelligentTieringConfigurationCommandOutput | DeleteBucketInventoryConfigurationCommandOutput | DeleteBucketLifecycleCommandOutput | DeleteBucketMetadataTableConfigurationCommandOutput | DeleteBucketMetricsConfigurationCommandOutput | DeleteBucketOwnershipControlsCommandOutput | DeleteBucketPolicyCommandOutput | DeleteBucketReplicationCommandOutput | DeleteBucketTaggingCommandOutput | DeleteBucketWebsiteCommandOutput | DeleteObjectCommandOutput | DeleteObjectTaggingCommandOutput | DeleteObjectsCommandOutput | DeletePublicAccessBlockCommandOutput | GetBucketAccelerateConfigurationCommandOutput | GetBucketAclCommandOutput | GetBucketAnalyticsConfigurationCommandOutput | GetBucketCorsCommandOutput | GetBucketEncryptionCommandOutput | GetBucketIntelligentTieringConfigurationCommandOutput | GetBucketInventoryConfigurationCommandOutput | GetBucketLifecycleConfigurationCommandOutput | GetBucketLocationCommandOutput | GetBucketLoggingCommandOutput | GetBucketMetadataTableConfigurationCommandOutput | GetBucketMetricsConfigurationCommandOutput | GetBucketNotificationConfigurationCommandOutput | GetBucketOwnershipControlsCommandOutput | GetBucketPolicyCommandOutput | GetBucketPolicyStatusCommandOutput | GetBucketReplicationCommandOutput | GetBucketRequestPaymentCommandOutput | GetBucketTaggingCommandOutput | GetBucketVersioningCommandOutput | GetBucketWebsiteCommandOutput | GetObjectAclCommandOutput | GetObjectAttributesCommandOutput | GetObjectCommandOutput | GetObjectLegalHoldCommandOutput | GetObjectLockConfigurationCommandOutput | GetObjectRetentionCommandOutput | GetObjectTaggingCommandOutput | GetObjectTorrentCommandOutput | GetPublicAccessBlockCommandOutput | HeadBucketCommandOutput | HeadObjectCommandOutput | ListBucketAnalyticsConfigurationsCommandOutput | ListBucketIntelligentTieringConfigurationsCommandOutput | ListBucketInventoryConfigurationsCommandOutput | ListBucketMetricsConfigurationsCommandOutput | ListBucketsCommandOutput | ListDirectoryBucketsCommandOutput | ListMultipartUploadsCommandOutput | ListObjectVersionsCommandOutput | ListObjectsCommandOutput | ListObjectsV2CommandOutput | ListPartsCommandOutput | PutBucketAccelerateConfigurationCommandOutput | PutBucketAclCommandOutput | PutBucketAnalyticsConfigurationCommandOutput | PutBucketCorsCommandOutput | PutBucketEncryptionCommandOutput | PutBucketIntelligentTieringConfigurationCommandOutput | PutBucketInventoryConfigurationCommandOutput | PutBucketLifecycleConfigurationCommandOutput | PutBucketLoggingCommandOutput | PutBucketMetricsConfigurationCommandOutput | PutBucketNotificationConfigurationCommandOutput | PutBucketOwnershipControlsCommandOutput | PutBucketPolicyCommandOutput | PutBucketReplicationCommandOutput | PutBucketRequestPaymentCommandOutput | PutBucketTaggingCommandOutput | PutBucketVersioningCommandOutput | PutBucketWebsiteCommandOutput | PutObjectAclCommandOutput | PutObjectCommandOutput | PutObjectLegalHoldCommandOutput | PutObjectLockConfigurationCommandOutput | PutObjectRetentionCommandOutput | PutObjectTaggingCommandOutput | PutPublicAccessBlockCommandOutput | RestoreObjectCommandOutput | SelectObjectContentCommandOutput | UploadPartCommandOutput | UploadPartCopyCommandOutput | WriteGetObjectResponseCommandOutput; +/** + * @public + */ +export interface ClientDefaults extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use or its constructor options. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandlerUserInput; + /** + * A constructor for a class implementing the {@link @smithy/types#ChecksumConstructor} interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + * @internal + */ + sha256?: __ChecksumConstructor | __HashConstructor; + /** + * The function that will be used to convert strings into HTTP endpoints. + * @internal + */ + urlParser?: __UrlParser; + /** + * A function that can calculate the length of a request body. + * @internal + */ + bodyLengthChecker?: __BodyLengthCalculator; + /** + * A function that converts a stream into an array of bytes. + * @internal + */ + streamCollector?: __StreamCollector; + /** + * The function that will be used to convert a base64-encoded string to a byte array. + * @internal + */ + base64Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a base64-encoded string. + * @internal + */ + base64Encoder?: __Encoder; + /** + * The function that will be used to convert a UTF8-encoded string to a byte array. + * @internal + */ + utf8Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a UTF-8 encoded string. + * @internal + */ + utf8Encoder?: __Encoder; + /** + * The runtime environment. + * @internal + */ + runtime?: string; + /** + * Disable dynamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | __Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | __Provider; + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + /** + * Setting a client profile is similar to setting a value for the + * AWS_PROFILE environment variable. Setting a profile on a client + * in code only affects the single client instance, unlike AWS_PROFILE. + * + * When set, and only for environments where an AWS configuration + * file exists, fields configurable by this file will be retrieved + * from the specified profile within that file. + * Conflicting code configuration and environment variables will + * still have higher priority. + * + * For client credential resolution that involves checking the AWS + * configuration file, the client's profile (this value) will be + * used unless a different profile is set in the credential + * provider options. + * + */ + profile?: string; + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header + * @internal + */ + defaultUserAgentProvider?: Provider<__UserAgent>; + /** + * A function that, given a hash constructor and a stream, calculates the + * hash of the streamed value. + * @internal + */ + streamHasher?: __StreamHasher | __StreamHasher; + /** + * A constructor for a class implementing the {@link __Checksum} interface + * that computes MD5 hashes. + * @internal + */ + md5?: __ChecksumConstructor | __HashConstructor; + /** + * A constructor for a class implementing the {@link __Checksum} interface + * that computes SHA1 hashes. + * @internal + */ + sha1?: __ChecksumConstructor | __HashConstructor; + /** + * A function that returns Readable Stream which follows aws-chunked encoding stream. + * @internal + */ + getAwsChunkedEncodingStream?: GetAwsChunkedEncodingStream; + /** + * Default credentials provider; Not available in browser runtime. + * @deprecated + * @internal + */ + credentialDefaultProvider?: (input: any) => AwsCredentialIdentityProvider; + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + /** + * Specifies which retry algorithm to use. + * @see https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-util-retry/Enum/RETRY_MODES/ + * + */ + retryMode?: string | __Provider; + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + /** + * Optional extensions + */ + extensions?: RuntimeExtension[]; + /** + * The function that provides necessary utilities for generating and parsing event stream + */ + eventStreamSerdeProvider?: __EventStreamSerdeProvider; + /** + * The {@link @smithy/smithy-client#DefaultsMode} that will be used to determine how certain default configuration options are resolved in the SDK. + */ + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; + /** + * Whether to escape request path when signing the request. + */ + signingEscapePath?: boolean; + /** + * Whether to override the request region with the region inferred from requested resource's ARN. Defaults to false. + */ + useArnRegion?: boolean | Provider; + /** + * The internal function that inject utilities to runtime-specific stream to help users consume the data + * @internal + */ + sdkStreamMixin?: __SdkStreamMixinInjector; +} +/** + * @public + */ +export type S3ClientConfigType = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & ClientDefaults & UserAgentInputConfig & FlexibleChecksumsInputConfig & RetryInputConfig & RegionInputConfig & HostHeaderInputConfig & EndpointInputConfig & EventStreamSerdeInputConfig & HttpAuthSchemeInputConfig & S3InputConfig & ClientInputEndpointParameters; +/** + * @public + * + * The configuration interface of S3Client class constructor that set the region, credentials and other options. + */ +export interface S3ClientConfig extends S3ClientConfigType { +} +/** + * @public + */ +export type S3ClientResolvedConfigType = __SmithyResolvedConfiguration<__HttpHandlerOptions> & Required & RuntimeExtensionsConfig & UserAgentResolvedConfig & FlexibleChecksumsResolvedConfig & RetryResolvedConfig & RegionResolvedConfig & HostHeaderResolvedConfig & EndpointResolvedConfig & EventStreamSerdeResolvedConfig & HttpAuthSchemeResolvedConfig & S3ResolvedConfig & ClientResolvedEndpointParameters; +/** + * @public + * + * The resolved configuration interface of S3Client class. This is resolved and normalized from the {@link S3ClientConfig | constructor configuration interface}. + */ +export interface S3ClientResolvedConfig extends S3ClientResolvedConfigType { +} +/** + *

+ * @public + */ +export declare class S3Client extends __Client<__HttpHandlerOptions, ServiceInputTypes, ServiceOutputTypes, S3ClientResolvedConfig> { + /** + * The resolved configuration of S3Client class. This is resolved and normalized from the {@link S3ClientConfig | constructor configuration interface}. + */ + readonly config: S3ClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy(): void; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/auth/httpAuthExtensionConfiguration.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 00000000..2eb54d9c --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,29 @@ +import { AwsCredentialIdentity, AwsCredentialIdentityProvider, HttpAuthScheme } from "@smithy/types"; +import { S3HttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +/** + * @internal + */ +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider(httpAuthSchemeProvider: S3HttpAuthSchemeProvider): void; + httpAuthSchemeProvider(): S3HttpAuthSchemeProvider; + setCredentials(credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider): void; + credentials(): AwsCredentialIdentity | AwsCredentialIdentityProvider | undefined; +} +/** + * @internal + */ +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: S3HttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +/** + * @internal + */ +export declare const getHttpAuthExtensionConfiguration: (runtimeConfig: HttpAuthRuntimeConfig) => HttpAuthExtensionConfiguration; +/** + * @internal + */ +export declare const resolveHttpAuthRuntimeConfig: (config: HttpAuthExtensionConfiguration) => HttpAuthRuntimeConfig; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/auth/httpAuthSchemeProvider.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 00000000..8acdeb2d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,83 @@ +import { AwsSdkSigV4AAuthInputConfig, AwsSdkSigV4AAuthResolvedConfig, AwsSdkSigV4APreviouslyResolved, AwsSdkSigV4AuthInputConfig, AwsSdkSigV4AuthResolvedConfig, AwsSdkSigV4PreviouslyResolved } from "@aws-sdk/core"; +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, Provider } from "@smithy/types"; +import { EndpointParameters } from "../endpoint/EndpointParameters"; +import { S3ClientResolvedConfig } from "../S3Client"; +/** + * @internal + */ +interface _S3HttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +/** + * @internal + */ +export interface S3HttpAuthSchemeParameters extends _S3HttpAuthSchemeParameters, EndpointParameters { + region?: string; +} +/** + * @internal + */ +export interface S3HttpAuthSchemeParametersProvider extends HttpAuthSchemeParametersProvider { +} +/** + * @internal + */ +export declare const defaultS3HttpAuthSchemeParametersProvider: S3HttpAuthSchemeParametersProvider; +/** + * @internal + */ +export interface S3HttpAuthSchemeProvider extends HttpAuthSchemeProvider { +} +/** + * @internal + */ +export declare const defaultS3HttpAuthSchemeProvider: S3HttpAuthSchemeProvider; +/** + * @internal + */ +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig, AwsSdkSigV4AAuthInputConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + authSchemePreference?: string[] | Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + httpAuthSchemes?: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + httpAuthSchemeProvider?: S3HttpAuthSchemeProvider; +} +/** + * @internal + */ +export interface HttpAuthSchemeResolvedConfig extends AwsSdkSigV4AuthResolvedConfig, AwsSdkSigV4AAuthResolvedConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + readonly authSchemePreference: Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + readonly httpAuthSchemes: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + readonly httpAuthSchemeProvider: S3HttpAuthSchemeProvider; +} +/** + * @internal + */ +export declare const resolveHttpAuthSchemeConfig: (config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved & AwsSdkSigV4APreviouslyResolved) => T & HttpAuthSchemeResolvedConfig; +export {}; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/AbortMultipartUploadCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/AbortMultipartUploadCommand.d.ts new file mode 100644 index 00000000..dd65df2c --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/AbortMultipartUploadCommand.d.ts @@ -0,0 +1,182 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { AbortMultipartUploadOutput, AbortMultipartUploadRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link AbortMultipartUploadCommand}. + */ +export interface AbortMultipartUploadCommandInput extends AbortMultipartUploadRequest { +} +/** + * @public + * + * The output of {@link AbortMultipartUploadCommand}. + */ +export interface AbortMultipartUploadCommandOutput extends AbortMultipartUploadOutput, __MetadataBearer { +} +declare const AbortMultipartUploadCommand_base: { + new (input: AbortMultipartUploadCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: AbortMultipartUploadCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

This operation aborts a multipart upload. After a multipart upload is aborted, no + * additional parts can be uploaded using that upload ID. The storage consumed by any + * previously uploaded parts will be freed. However, if any part uploads are currently in + * progress, those part uploads might or might not succeed. As a result, it might be necessary + * to abort a given multipart upload multiple times in order to completely free all storage + * consumed by all parts.

+ *

To verify that all parts have been removed and prevent getting charged for the part + * storage, you should call the ListParts API operation and ensure + * that the parts list is empty.

+ * + *
    + *
  • + *

    + * Directory buckets - If multipart + * uploads in a directory bucket are in progress, you can't delete the bucket until + * all the in-progress multipart uploads are aborted or completed. To delete these + * in-progress multipart uploads, use the ListMultipartUploads operation + * to list the in-progress multipart uploads in the bucket and use the + * AbortMultipartUpload operation to abort all the in-progress + * multipart uploads.

    + *
  • + *
  • + *

    + * Directory buckets - + * For directory buckets, you must make requests for this API operation to the Zonal endpoint. These endpoints support virtual-hosted-style requests in the format https://amzn-s3-demo-bucket.s3express-zone-id.region-code.amazonaws.com/key-name + * . Path-style requests are not supported. For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

    + *
  • + *
+ *
+ *
+ *
Permissions
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - For + * information about permissions required to use the multipart upload, see + * Multipart Upload and + * Permissions in the Amazon S3 User Guide.

    + *
  • + *
  • + *

    + * Directory bucket permissions - To grant access to this API operation on a directory bucket, we recommend that you use the + * CreateSession + * API operation for session-based authorization. Specifically, you grant the s3express:CreateSession permission to the directory bucket in a bucket policy or an IAM identity-based policy. Then, you make the CreateSession API call on the bucket to obtain a session token. With the session token in your request header, you can make API requests to this operation. After the session token expires, you make another CreateSession API call to generate a new session token for use. + * Amazon Web Services CLI or SDKs create session and refresh the session token automatically to avoid service interruptions when a session expires. For more information about authorization, see + * CreateSession + * .

    + *
  • + *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is + * Bucket-name.s3express-zone-id.region-code.amazonaws.com.

+ *
+ *
+ *

The following operations are related to AbortMultipartUpload:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, AbortMultipartUploadCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, AbortMultipartUploadCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // AbortMultipartUploadRequest + * Bucket: "STRING_VALUE", // required + * Key: "STRING_VALUE", // required + * UploadId: "STRING_VALUE", // required + * RequestPayer: "requester", + * ExpectedBucketOwner: "STRING_VALUE", + * IfMatchInitiatedTime: new Date("TIMESTAMP"), + * }; + * const command = new AbortMultipartUploadCommand(input); + * const response = await client.send(command); + * // { // AbortMultipartUploadOutput + * // RequestCharged: "requester", + * // }; + * + * ``` + * + * @param AbortMultipartUploadCommandInput - {@link AbortMultipartUploadCommandInput} + * @returns {@link AbortMultipartUploadCommandOutput} + * @see {@link AbortMultipartUploadCommandInput} for command's `input` shape. + * @see {@link AbortMultipartUploadCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link NoSuchUpload} (client fault) + *

The specified multipart upload does not exist.

+ * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To abort a multipart upload + * ```javascript + * // The following example aborts a multipart upload. + * const input = { + * Bucket: "examplebucket", + * Key: "bigobject", + * UploadId: "xadcOB_7YPBOJuoFiQ9cz4P3Pe6FIZwO4f7wN93uHsNBEw97pl5eNwzExg0LAT2dUN91cOmrEQHDsP3WA60CEg--" + * }; + * const command = new AbortMultipartUploadCommand(input); + * const response = await client.send(command); + * /* response is + * { /* empty *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class AbortMultipartUploadCommand extends AbortMultipartUploadCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: AbortMultipartUploadRequest; + output: AbortMultipartUploadOutput; + }; + sdk: { + input: AbortMultipartUploadCommandInput; + output: AbortMultipartUploadCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/CompleteMultipartUploadCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/CompleteMultipartUploadCommand.d.ts new file mode 100644 index 00000000..9e952f7f --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/CompleteMultipartUploadCommand.d.ts @@ -0,0 +1,317 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { CompleteMultipartUploadOutput, CompleteMultipartUploadRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CompleteMultipartUploadCommand}. + */ +export interface CompleteMultipartUploadCommandInput extends CompleteMultipartUploadRequest { +} +/** + * @public + * + * The output of {@link CompleteMultipartUploadCommand}. + */ +export interface CompleteMultipartUploadCommandOutput extends CompleteMultipartUploadOutput, __MetadataBearer { +} +declare const CompleteMultipartUploadCommand_base: { + new (input: CompleteMultipartUploadCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: CompleteMultipartUploadCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Completes a multipart upload by assembling previously uploaded parts.

+ *

You first initiate the multipart upload and then upload all parts using the UploadPart + * operation or the UploadPartCopy operation. + * After successfully uploading all relevant parts of an upload, you call this + * CompleteMultipartUpload operation to complete the upload. Upon receiving + * this request, Amazon S3 concatenates all the parts in ascending order by part number to create a + * new object. In the CompleteMultipartUpload request, you must provide the parts list and + * ensure that the parts list is complete. The CompleteMultipartUpload API operation + * concatenates the parts that you provide in the list. For each part in the list, you must + * provide the PartNumber value and the ETag value that are returned + * after that part was uploaded.

+ *

The processing of a CompleteMultipartUpload request could take several minutes to + * finalize. After Amazon S3 begins processing the request, it sends an HTTP response header that + * specifies a 200 OK response. While processing is in progress, Amazon S3 + * periodically sends white space characters to keep the connection from timing out. A request + * could fail after the initial 200 OK response has been sent. This means that a + * 200 OK response can contain either a success or an error. The error + * response might be embedded in the 200 OK response. If you call this API + * operation directly, make sure to design your application to parse the contents of the + * response and handle it appropriately. If you use Amazon Web Services SDKs, SDKs handle this condition. + * The SDKs detect the embedded error and apply error handling per your configuration settings + * (including automatically retrying the request as appropriate). If the condition persists, + * the SDKs throw an exception (or, for the SDKs that don't use exceptions, they return an + * error).

+ *

Note that if CompleteMultipartUpload fails, applications should be prepared + * to retry any failed requests (including 500 error responses). For more information, see + * Amazon S3 Error + * Best Practices.

+ * + *

You can't use Content-Type: application/x-www-form-urlencoded for the + * CompleteMultipartUpload requests. Also, if you don't provide a Content-Type + * header, CompleteMultipartUpload can still return a 200 OK + * response.

+ *
+ *

For more information about multipart uploads, see Uploading Objects Using Multipart + * Upload in the Amazon S3 User Guide.

+ * + *

+ * Directory buckets - + * For directory buckets, you must make requests for this API operation to the Zonal endpoint. These endpoints support virtual-hosted-style requests in the format https://amzn-s3-demo-bucket.s3express-zone-id.region-code.amazonaws.com/key-name + * . Path-style requests are not supported. For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

+ *
+ *
+ *
Permissions
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - For + * information about permissions required to use the multipart upload API, see + * Multipart Upload and + * Permissions in the Amazon S3 User Guide.

    + *

    If you provide an additional checksum + * value in your MultipartUpload requests and the + * object is encrypted with Key Management Service, you must have permission to use the + * kms:Decrypt action for the + * CompleteMultipartUpload request to succeed.

    + *
  • + *
  • + *

    + * Directory bucket permissions - To grant access to this API operation on a directory bucket, we recommend that you use the + * CreateSession + * API operation for session-based authorization. Specifically, you grant the s3express:CreateSession permission to the directory bucket in a bucket policy or an IAM identity-based policy. Then, you make the CreateSession API call on the bucket to obtain a session token. With the session token in your request header, you can make API requests to this operation. After the session token expires, you make another CreateSession API call to generate a new session token for use. + * Amazon Web Services CLI or SDKs create session and refresh the session token automatically to avoid service interruptions when a session expires. For more information about authorization, see + * CreateSession + * .

    + *

    If the object is encrypted with SSE-KMS, you must also have the + * kms:GenerateDataKey and kms:Decrypt permissions + * in IAM identity-based policies and KMS key policies for the KMS + * key.

    + *
  • + *
+ *
+ *
Special errors
+ *
+ *
    + *
  • + *

    Error Code: EntityTooSmall + *

    + *
      + *
    • + *

      Description: Your proposed upload is smaller than the minimum + * allowed object size. Each part must be at least 5 MB in size, except + * the last part.

      + *
    • + *
    • + *

      HTTP Status Code: 400 Bad Request

      + *
    • + *
    + *
  • + *
  • + *

    Error Code: InvalidPart + *

    + *
      + *
    • + *

      Description: One or more of the specified parts could not be found. + * The part might not have been uploaded, or the specified ETag might not + * have matched the uploaded part's ETag.

      + *
    • + *
    • + *

      HTTP Status Code: 400 Bad Request

      + *
    • + *
    + *
  • + *
  • + *

    Error Code: InvalidPartOrder + *

    + *
      + *
    • + *

      Description: The list of parts was not in ascending order. The + * parts list must be specified in order by part number.

      + *
    • + *
    • + *

      HTTP Status Code: 400 Bad Request

      + *
    • + *
    + *
  • + *
  • + *

    Error Code: NoSuchUpload + *

    + *
      + *
    • + *

      Description: The specified multipart upload does not exist. The + * upload ID might be invalid, or the multipart upload might have been + * aborted or completed.

      + *
    • + *
    • + *

      HTTP Status Code: 404 Not Found

      + *
    • + *
    + *
  • + *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is + * Bucket-name.s3express-zone-id.region-code.amazonaws.com.

+ *
+ *
+ *

The following operations are related to CompleteMultipartUpload:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, CompleteMultipartUploadCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, CompleteMultipartUploadCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // CompleteMultipartUploadRequest + * Bucket: "STRING_VALUE", // required + * Key: "STRING_VALUE", // required + * MultipartUpload: { // CompletedMultipartUpload + * Parts: [ // CompletedPartList + * { // CompletedPart + * ETag: "STRING_VALUE", + * ChecksumCRC32: "STRING_VALUE", + * ChecksumCRC32C: "STRING_VALUE", + * ChecksumCRC64NVME: "STRING_VALUE", + * ChecksumSHA1: "STRING_VALUE", + * ChecksumSHA256: "STRING_VALUE", + * PartNumber: Number("int"), + * }, + * ], + * }, + * UploadId: "STRING_VALUE", // required + * ChecksumCRC32: "STRING_VALUE", + * ChecksumCRC32C: "STRING_VALUE", + * ChecksumCRC64NVME: "STRING_VALUE", + * ChecksumSHA1: "STRING_VALUE", + * ChecksumSHA256: "STRING_VALUE", + * ChecksumType: "COMPOSITE" || "FULL_OBJECT", + * MpuObjectSize: Number("long"), + * RequestPayer: "requester", + * ExpectedBucketOwner: "STRING_VALUE", + * IfMatch: "STRING_VALUE", + * IfNoneMatch: "STRING_VALUE", + * SSECustomerAlgorithm: "STRING_VALUE", + * SSECustomerKey: "STRING_VALUE", + * SSECustomerKeyMD5: "STRING_VALUE", + * }; + * const command = new CompleteMultipartUploadCommand(input); + * const response = await client.send(command); + * // { // CompleteMultipartUploadOutput + * // Location: "STRING_VALUE", + * // Bucket: "STRING_VALUE", + * // Key: "STRING_VALUE", + * // Expiration: "STRING_VALUE", + * // ETag: "STRING_VALUE", + * // ChecksumCRC32: "STRING_VALUE", + * // ChecksumCRC32C: "STRING_VALUE", + * // ChecksumCRC64NVME: "STRING_VALUE", + * // ChecksumSHA1: "STRING_VALUE", + * // ChecksumSHA256: "STRING_VALUE", + * // ChecksumType: "COMPOSITE" || "FULL_OBJECT", + * // ServerSideEncryption: "AES256" || "aws:kms" || "aws:kms:dsse", + * // VersionId: "STRING_VALUE", + * // SSEKMSKeyId: "STRING_VALUE", + * // BucketKeyEnabled: true || false, + * // RequestCharged: "requester", + * // }; + * + * ``` + * + * @param CompleteMultipartUploadCommandInput - {@link CompleteMultipartUploadCommandInput} + * @returns {@link CompleteMultipartUploadCommandOutput} + * @see {@link CompleteMultipartUploadCommandInput} for command's `input` shape. + * @see {@link CompleteMultipartUploadCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To complete multipart upload + * ```javascript + * // The following example completes a multipart upload. + * const input = { + * Bucket: "examplebucket", + * Key: "bigobject", + * MultipartUpload: { + * Parts: [ + * { + * ETag: `"d8c2eafd90c266e19ab9dcacc479f8af"`, + * PartNumber: 1 + * }, + * { + * ETag: `"d8c2eafd90c266e19ab9dcacc479f8af"`, + * PartNumber: 2 + * } + * ] + * }, + * UploadId: "7YPBOJuoFiQ9cz4P3Pe6FIZwO4f7wN93uHsNBEw97pl5eNwzExg0LAT2dUN91cOmrEQHDsP3WA60CEg--" + * }; + * const command = new CompleteMultipartUploadCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Bucket: "acexamplebucket", + * ETag: `"4d9031c7644d8081c2829f4ea23c55f7-2"`, + * Key: "bigobject", + * Location: "https://examplebucket.s3..amazonaws.com/bigobject" + * } + * *\/ + * ``` + * + * @public + */ +export declare class CompleteMultipartUploadCommand extends CompleteMultipartUploadCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: CompleteMultipartUploadRequest; + output: CompleteMultipartUploadOutput; + }; + sdk: { + input: CompleteMultipartUploadCommandInput; + output: CompleteMultipartUploadCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/CopyObjectCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/CopyObjectCommand.d.ts new file mode 100644 index 00000000..d190825f --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/CopyObjectCommand.d.ts @@ -0,0 +1,360 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { CopyObjectOutput, CopyObjectRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CopyObjectCommand}. + */ +export interface CopyObjectCommandInput extends CopyObjectRequest { +} +/** + * @public + * + * The output of {@link CopyObjectCommand}. + */ +export interface CopyObjectCommandOutput extends CopyObjectOutput, __MetadataBearer { +} +declare const CopyObjectCommand_base: { + new (input: CopyObjectCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: CopyObjectCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates a copy of an object that is already stored in Amazon S3.

+ * + *

You can store individual objects of up to 5 TB in Amazon S3. You create a copy of your + * object up to 5 GB in size in a single atomic action using this API. However, to copy an + * object greater than 5 GB, you must use the multipart upload Upload Part - Copy + * (UploadPartCopy) API. For more information, see Copy Object Using the + * REST Multipart Upload API.

+ *
+ *

You can copy individual objects between general purpose buckets, between directory buckets, + * and between general purpose buckets and directory buckets.

+ * + *
    + *
  • + *

    Amazon S3 supports copy operations using Multi-Region Access Points only as a + * destination when using the Multi-Region Access Point ARN.

    + *
  • + *
  • + *

    + * Directory buckets - + * For directory buckets, you must make requests for this API operation to the Zonal endpoint. These endpoints support virtual-hosted-style requests in the format https://amzn-s3-demo-bucket.s3express-zone-id.region-code.amazonaws.com/key-name + * . Path-style requests are not supported. For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

    + *
  • + *
  • + *

    VPC endpoints don't support cross-Region requests (including copies). If you're + * using VPC endpoints, your source and destination buckets should be in the same + * Amazon Web Services Region as your VPC endpoint.

    + *
  • + *
+ *
+ *

Both the Region that you want to copy the object from and the Region that you want to + * copy the object to must be enabled for your account. For more information about how to + * enable a Region for your account, see Enable or disable a Region for standalone accounts in the Amazon Web Services + * Account Management Guide.

+ * + *

Amazon S3 transfer acceleration does not support cross-Region copies. If you request a + * cross-Region copy using a transfer acceleration endpoint, you get a 400 Bad + * Request error. For more information, see Transfer + * Acceleration.

+ *
+ *
+ *
Authentication and authorization
+ *
+ *

All CopyObject requests must be authenticated and signed by using + * IAM credentials (access key ID and secret access key for the IAM identities). + * All headers with the x-amz- prefix, including + * x-amz-copy-source, must be signed. For more information, see + * REST Authentication.

+ *

+ * Directory buckets - You must use the + * IAM credentials to authenticate and authorize your access to the + * CopyObject API operation, instead of using the temporary security + * credentials through the CreateSession API operation.

+ *

Amazon Web Services CLI or SDKs handles authentication and authorization on your + * behalf.

+ *
+ *
Permissions
+ *
+ *

You must have read access to the source object and + * write access to the destination bucket.

+ *
    + *
  • + *

    + * General purpose bucket permissions - You + * must have permissions in an IAM policy based on the source and destination + * bucket types in a CopyObject operation.

    + *
      + *
    • + *

      If the source object is in a general purpose bucket, you must have + * + * s3:GetObject + * + * permission to read the source object that is being copied.

      + *
    • + *
    • + *

      If the destination bucket is a general purpose bucket, you must have + * + * s3:PutObject + * + * permission to write the object copy to the destination bucket.

      + *
    • + *
    + *
  • + *
  • + *

    + * Directory bucket permissions - + * You must have permissions in a bucket policy or an IAM identity-based policy based on the + * source and destination bucket types in a CopyObject + * operation.

    + *
      + *
    • + *

      If the source object that you want to copy is in a + * directory bucket, you must have the + * s3express:CreateSession + * permission in + * the Action element of a policy to read the object. By + * default, the session is in the ReadWrite mode. If you + * want to restrict the access, you can explicitly set the + * s3express:SessionMode condition key to + * ReadOnly on the copy source bucket.

      + *
    • + *
    • + *

      If the copy destination is a directory bucket, you must have the + * + * s3express:CreateSession + * permission in the + * Action element of a policy to write the object to the + * destination. The s3express:SessionMode condition key + * can't be set to ReadOnly on the copy destination bucket. + *

      + *
    • + *
    + *

    If the object is encrypted with SSE-KMS, you must also have the + * kms:GenerateDataKey and kms:Decrypt permissions + * in IAM identity-based policies and KMS key policies for the KMS + * key.

    + *

    For example policies, see Example bucket policies for S3 Express One Zone and Amazon Web Services Identity and Access Management (IAM) identity-based policies for + * S3 Express One Zone in the Amazon S3 User Guide.

    + *
  • + *
+ *
+ *
Response and special errors
+ *
+ *

When the request is an HTTP 1.1 request, the response is chunk encoded. When + * the request is not an HTTP 1.1 request, the response would not contain the + * Content-Length. You always need to read the entire response body + * to check if the copy succeeds.

+ *
    + *
  • + *

    If the copy is successful, you receive a response with information about + * the copied object.

    + *
  • + *
  • + *

    A copy request might return an error when Amazon S3 receives the copy request + * or while Amazon S3 is copying the files. A 200 OK response can + * contain either a success or an error.

    + *
      + *
    • + *

      If the error occurs before the copy action starts, you receive a + * standard Amazon S3 error.

      + *
    • + *
    • + *

      If the error occurs during the copy operation, the error response + * is embedded in the 200 OK response. For example, in a + * cross-region copy, you may encounter throttling and receive a + * 200 OK response. For more information, see Resolve the Error 200 response when copying objects to + * Amazon S3. The 200 OK status code means the copy + * was accepted, but it doesn't mean the copy is complete. Another + * example is when you disconnect from Amazon S3 before the copy is complete, + * Amazon S3 might cancel the copy and you may receive a 200 OK + * response. You must stay connected to Amazon S3 until the entire response is + * successfully received and processed.

      + *

      If you call this API operation directly, make sure to design your + * application to parse the content of the response and handle it + * appropriately. If you use Amazon Web Services SDKs, SDKs handle this condition. The + * SDKs detect the embedded error and apply error handling per your + * configuration settings (including automatically retrying the request + * as appropriate). If the condition persists, the SDKs throw an + * exception (or, for the SDKs that don't use exceptions, they return an + * error).

      + *
    • + *
    + *
  • + *
+ *
+ *
Charge
+ *
+ *

The copy request charge is based on the storage class and Region that you + * specify for the destination object. The request can also result in a data + * retrieval charge for the source if the source storage class bills for data + * retrieval. If the copy source is in a different region, the data transfer is + * billed to the copy source account. For pricing information, see Amazon S3 pricing.

+ *
+ *
HTTP Host header syntax
+ *
+ *
    + *
  • + *

    + * Directory buckets - The HTTP Host header syntax is + * Bucket-name.s3express-zone-id.region-code.amazonaws.com.

    + *
  • + *
  • + *

    + * Amazon S3 on Outposts - When you use this action with S3 on Outposts through the REST API, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. The hostname isn't required when you use the Amazon Web Services CLI or SDKs.

    + *
  • + *
+ *
+ *
+ *

The following operations are related to CopyObject:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, CopyObjectCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, CopyObjectCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // CopyObjectRequest + * ACL: "private" || "public-read" || "public-read-write" || "authenticated-read" || "aws-exec-read" || "bucket-owner-read" || "bucket-owner-full-control", + * Bucket: "STRING_VALUE", // required + * CacheControl: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * ContentDisposition: "STRING_VALUE", + * ContentEncoding: "STRING_VALUE", + * ContentLanguage: "STRING_VALUE", + * ContentType: "STRING_VALUE", + * CopySource: "STRING_VALUE", // required + * CopySourceIfMatch: "STRING_VALUE", + * CopySourceIfModifiedSince: new Date("TIMESTAMP"), + * CopySourceIfNoneMatch: "STRING_VALUE", + * CopySourceIfUnmodifiedSince: new Date("TIMESTAMP"), + * Expires: new Date("TIMESTAMP"), + * GrantFullControl: "STRING_VALUE", + * GrantRead: "STRING_VALUE", + * GrantReadACP: "STRING_VALUE", + * GrantWriteACP: "STRING_VALUE", + * Key: "STRING_VALUE", // required + * Metadata: { // Metadata + * "": "STRING_VALUE", + * }, + * MetadataDirective: "COPY" || "REPLACE", + * TaggingDirective: "COPY" || "REPLACE", + * ServerSideEncryption: "AES256" || "aws:kms" || "aws:kms:dsse", + * StorageClass: "STANDARD" || "REDUCED_REDUNDANCY" || "STANDARD_IA" || "ONEZONE_IA" || "INTELLIGENT_TIERING" || "GLACIER" || "DEEP_ARCHIVE" || "OUTPOSTS" || "GLACIER_IR" || "SNOW" || "EXPRESS_ONEZONE", + * WebsiteRedirectLocation: "STRING_VALUE", + * SSECustomerAlgorithm: "STRING_VALUE", + * SSECustomerKey: "STRING_VALUE", + * SSECustomerKeyMD5: "STRING_VALUE", + * SSEKMSKeyId: "STRING_VALUE", + * SSEKMSEncryptionContext: "STRING_VALUE", + * BucketKeyEnabled: true || false, + * CopySourceSSECustomerAlgorithm: "STRING_VALUE", + * CopySourceSSECustomerKey: "STRING_VALUE", + * CopySourceSSECustomerKeyMD5: "STRING_VALUE", + * RequestPayer: "requester", + * Tagging: "STRING_VALUE", + * ObjectLockMode: "GOVERNANCE" || "COMPLIANCE", + * ObjectLockRetainUntilDate: new Date("TIMESTAMP"), + * ObjectLockLegalHoldStatus: "ON" || "OFF", + * ExpectedBucketOwner: "STRING_VALUE", + * ExpectedSourceBucketOwner: "STRING_VALUE", + * }; + * const command = new CopyObjectCommand(input); + * const response = await client.send(command); + * // { // CopyObjectOutput + * // CopyObjectResult: { // CopyObjectResult + * // ETag: "STRING_VALUE", + * // LastModified: new Date("TIMESTAMP"), + * // ChecksumType: "COMPOSITE" || "FULL_OBJECT", + * // ChecksumCRC32: "STRING_VALUE", + * // ChecksumCRC32C: "STRING_VALUE", + * // ChecksumCRC64NVME: "STRING_VALUE", + * // ChecksumSHA1: "STRING_VALUE", + * // ChecksumSHA256: "STRING_VALUE", + * // }, + * // Expiration: "STRING_VALUE", + * // CopySourceVersionId: "STRING_VALUE", + * // VersionId: "STRING_VALUE", + * // ServerSideEncryption: "AES256" || "aws:kms" || "aws:kms:dsse", + * // SSECustomerAlgorithm: "STRING_VALUE", + * // SSECustomerKeyMD5: "STRING_VALUE", + * // SSEKMSKeyId: "STRING_VALUE", + * // SSEKMSEncryptionContext: "STRING_VALUE", + * // BucketKeyEnabled: true || false, + * // RequestCharged: "requester", + * // }; + * + * ``` + * + * @param CopyObjectCommandInput - {@link CopyObjectCommandInput} + * @returns {@link CopyObjectCommandOutput} + * @see {@link CopyObjectCommandInput} for command's `input` shape. + * @see {@link CopyObjectCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link ObjectNotInActiveTierError} (client fault) + *

The source object of the COPY action is not in the active tier and is only stored in + * Amazon S3 Glacier.

+ * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To copy an object + * ```javascript + * // The following example copies an object from one bucket to another. + * const input = { + * Bucket: "destinationbucket", + * CopySource: "/sourcebucket/HappyFacejpg", + * Key: "HappyFaceCopyjpg" + * }; + * const command = new CopyObjectCommand(input); + * const response = await client.send(command); + * /* response is + * { + * CopyObjectResult: { + * ETag: `"6805f2cfc46c0f04559748bb039d69ae"`, + * LastModified: "2016-12-15T17:38:53.000Z" + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class CopyObjectCommand extends CopyObjectCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: CopyObjectRequest; + output: CopyObjectOutput; + }; + sdk: { + input: CopyObjectCommandInput; + output: CopyObjectCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/CreateBucketCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/CreateBucketCommand.d.ts new file mode 100644 index 00000000..b49c6d9e --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/CreateBucketCommand.d.ts @@ -0,0 +1,282 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { CreateBucketOutput, CreateBucketRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CreateBucketCommand}. + */ +export interface CreateBucketCommandInput extends CreateBucketRequest { +} +/** + * @public + * + * The output of {@link CreateBucketCommand}. + */ +export interface CreateBucketCommandOutput extends CreateBucketOutput, __MetadataBearer { +} +declare const CreateBucketCommand_base: { + new (input: CreateBucketCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: CreateBucketCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This action creates an Amazon S3 bucket. To create an Amazon S3 on Outposts bucket, see + * CreateBucket + * .

+ *
+ *

Creates a new S3 bucket. To create a bucket, you must set up Amazon S3 and have a valid Amazon Web Services + * Access Key ID to authenticate requests. Anonymous requests are never allowed to create + * buckets. By creating the bucket, you become the bucket owner.

+ *

There are two types of buckets: general purpose buckets and directory buckets. For more + * information about these bucket types, see Creating, configuring, and + * working with Amazon S3 buckets in the Amazon S3 User Guide.

+ * + *
    + *
  • + *

    + * General purpose buckets - If you send your + * CreateBucket request to the s3.amazonaws.com global + * endpoint, the request goes to the us-east-1 Region. So the signature + * calculations in Signature Version 4 must use us-east-1 as the Region, + * even if the location constraint in the request specifies another Region where the + * bucket is to be created. If you create a bucket in a Region other than US East (N. + * Virginia), your application must be able to handle 307 redirect. For more + * information, see Virtual hosting of + * buckets in the Amazon S3 User Guide.

    + *
  • + *
  • + *

    + * Directory buckets - For directory buckets, you must make requests for this API operation to the Regional endpoint. These endpoints support path-style requests in the format https://s3express-control.region-code.amazonaws.com/bucket-name + * . Virtual-hosted-style requests aren't supported. + * For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

    + *
  • + *
+ *
+ *
+ *
Permissions
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - In + * addition to the s3:CreateBucket permission, the following + * permissions are required in a policy when your CreateBucket + * request includes specific headers:

    + *
      + *
    • + *

      + * Access control lists (ACLs) + * - In your CreateBucket request, if you specify an + * access control list (ACL) and set it to public-read, + * public-read-write, authenticated-read, or + * if you explicitly specify any other custom ACLs, both + * s3:CreateBucket and s3:PutBucketAcl + * permissions are required. In your CreateBucket request, + * if you set the ACL to private, or if you don't specify + * any ACLs, only the s3:CreateBucket permission is + * required.

      + *
    • + *
    • + *

      + * Object Lock - In your + * CreateBucket request, if you set + * x-amz-bucket-object-lock-enabled to true, the + * s3:PutBucketObjectLockConfiguration and + * s3:PutBucketVersioning permissions are + * required.

      + *
    • + *
    • + *

      + * S3 Object Ownership - If + * your CreateBucket request includes the + * x-amz-object-ownership header, then the + * s3:PutBucketOwnershipControls permission is + * required.

      + * + *

      To set an ACL on a bucket as part of a + * CreateBucket request, you must explicitly set S3 + * Object Ownership for the bucket to a different value than the + * default, BucketOwnerEnforced. Additionally, if your + * desired bucket ACL grants public access, you must first create the + * bucket (without the bucket ACL) and then explicitly disable Block + * Public Access on the bucket before using PutBucketAcl + * to set the ACL. If you try to create a bucket with a public ACL, + * the request will fail.

      + *

      For the majority of modern use cases in S3, we recommend that + * you keep all Block Public Access settings enabled and keep ACLs + * disabled. If you would like to share data with users outside of + * your account, you can use bucket policies as needed. For more + * information, see Controlling ownership of objects and disabling ACLs for your + * bucket and Blocking public access to your Amazon S3 storage in + * the Amazon S3 User Guide.

      + *
      + *
    • + *
    • + *

      + * S3 Block Public Access - If + * your specific use case requires granting public access to your S3 + * resources, you can disable Block Public Access. Specifically, you can + * create a new bucket with Block Public Access enabled, then separately + * call the + * DeletePublicAccessBlock + * API. To use this operation, you must have the + * s3:PutBucketPublicAccessBlock permission. For more + * information about S3 Block Public Access, see Blocking public access to your Amazon S3 storage in the + * Amazon S3 User Guide.

      + *
    • + *
    + *
  • + *
  • + *

    + * Directory bucket permissions - + * You must have the s3express:CreateBucket permission in + * an IAM identity-based policy instead of a bucket policy. Cross-account access to this API operation isn't supported. This operation can only be performed by the Amazon Web Services account that owns the resource. + * For more information about directory bucket policies and permissions, see Amazon Web Services Identity and Access Management (IAM) for S3 Express One Zone in the Amazon S3 User Guide.

    + * + *

    The permissions for ACLs, Object Lock, S3 Object Ownership, and S3 + * Block Public Access are not supported for directory buckets. For + * directory buckets, all Block Public Access settings are enabled at the + * bucket level and S3 Object Ownership is set to Bucket owner enforced + * (ACLs disabled). These settings can't be modified.

    + *

    For more information about permissions for creating and working with + * directory buckets, see Directory buckets in the + * Amazon S3 User Guide. For more information about + * supported S3 features for directory buckets, see Features of S3 Express One Zone in the + * Amazon S3 User Guide.

    + *
    + *
  • + *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is s3express-control.region-code.amazonaws.com.

+ *
+ *
+ *

The following operations are related to CreateBucket:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, CreateBucketCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, CreateBucketCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // CreateBucketRequest + * ACL: "private" || "public-read" || "public-read-write" || "authenticated-read", + * Bucket: "STRING_VALUE", // required + * CreateBucketConfiguration: { // CreateBucketConfiguration + * LocationConstraint: "af-south-1" || "ap-east-1" || "ap-northeast-1" || "ap-northeast-2" || "ap-northeast-3" || "ap-south-1" || "ap-south-2" || "ap-southeast-1" || "ap-southeast-2" || "ap-southeast-3" || "ap-southeast-4" || "ap-southeast-5" || "ca-central-1" || "cn-north-1" || "cn-northwest-1" || "EU" || "eu-central-1" || "eu-central-2" || "eu-north-1" || "eu-south-1" || "eu-south-2" || "eu-west-1" || "eu-west-2" || "eu-west-3" || "il-central-1" || "me-central-1" || "me-south-1" || "sa-east-1" || "us-east-2" || "us-gov-east-1" || "us-gov-west-1" || "us-west-1" || "us-west-2", + * Location: { // LocationInfo + * Type: "AvailabilityZone" || "LocalZone", + * Name: "STRING_VALUE", + * }, + * Bucket: { // BucketInfo + * DataRedundancy: "SingleAvailabilityZone" || "SingleLocalZone", + * Type: "Directory", + * }, + * }, + * GrantFullControl: "STRING_VALUE", + * GrantRead: "STRING_VALUE", + * GrantReadACP: "STRING_VALUE", + * GrantWrite: "STRING_VALUE", + * GrantWriteACP: "STRING_VALUE", + * ObjectLockEnabledForBucket: true || false, + * ObjectOwnership: "BucketOwnerPreferred" || "ObjectWriter" || "BucketOwnerEnforced", + * }; + * const command = new CreateBucketCommand(input); + * const response = await client.send(command); + * // { // CreateBucketOutput + * // Location: "STRING_VALUE", + * // }; + * + * ``` + * + * @param CreateBucketCommandInput - {@link CreateBucketCommandInput} + * @returns {@link CreateBucketCommandOutput} + * @see {@link CreateBucketCommandInput} for command's `input` shape. + * @see {@link CreateBucketCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link BucketAlreadyExists} (client fault) + *

The requested bucket name is not available. The bucket namespace is shared by all users + * of the system. Select a different name and try again.

+ * + * @throws {@link BucketAlreadyOwnedByYou} (client fault) + *

The bucket you tried to create already exists, and you own it. Amazon S3 returns this error + * in all Amazon Web Services Regions except in the North Virginia Region. For legacy compatibility, if you + * re-create an existing bucket that you already own in the North Virginia Region, Amazon S3 + * returns 200 OK and resets the bucket access control lists (ACLs).

+ * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To create a bucket in a specific region + * ```javascript + * // The following example creates a bucket. The request specifies an AWS region where to create the bucket. + * const input = { + * Bucket: "examplebucket", + * CreateBucketConfiguration: { + * LocationConstraint: "eu-west-1" + * } + * }; + * const command = new CreateBucketCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Location: "http://examplebucket..s3.amazonaws.com/" + * } + * *\/ + * ``` + * + * @example To create a bucket + * ```javascript + * // The following example creates a bucket. + * const input = { + * Bucket: "examplebucket" + * }; + * const command = new CreateBucketCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Location: "/examplebucket" + * } + * *\/ + * ``` + * + * @public + */ +export declare class CreateBucketCommand extends CreateBucketCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: CreateBucketRequest; + output: CreateBucketOutput; + }; + sdk: { + input: CreateBucketCommandInput; + output: CreateBucketCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/CreateBucketMetadataTableConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/CreateBucketMetadataTableConfigurationCommand.d.ts new file mode 100644 index 00000000..2469d6b9 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/CreateBucketMetadataTableConfigurationCommand.d.ts @@ -0,0 +1,135 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { CreateBucketMetadataTableConfigurationRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CreateBucketMetadataTableConfigurationCommand}. + */ +export interface CreateBucketMetadataTableConfigurationCommandInput extends CreateBucketMetadataTableConfigurationRequest { +} +/** + * @public + * + * The output of {@link CreateBucketMetadataTableConfigurationCommand}. + */ +export interface CreateBucketMetadataTableConfigurationCommandOutput extends __MetadataBearer { +} +declare const CreateBucketMetadataTableConfigurationCommand_base: { + new (input: CreateBucketMetadataTableConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: CreateBucketMetadataTableConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates a metadata table configuration for a general purpose bucket. For more + * information, see Accelerating data + * discovery with S3 Metadata in the Amazon S3 User Guide.

+ *
+ *
Permissions
+ *
+ *

To use this operation, you must have the following permissions. For more + * information, see Setting up + * permissions for configuring metadata tables in the + * Amazon S3 User Guide.

+ *

If you also want to integrate your table bucket with Amazon Web Services analytics services so that you + * can query your metadata table, you need additional permissions. For more information, see + * + * Integrating Amazon S3 Tables with Amazon Web Services analytics services in the + * Amazon S3 User Guide.

+ *
    + *
  • + *

    + * s3:CreateBucketMetadataTableConfiguration + *

    + *
  • + *
  • + *

    + * s3tables:CreateNamespace + *

    + *
  • + *
  • + *

    + * s3tables:GetTable + *

    + *
  • + *
  • + *

    + * s3tables:CreateTable + *

    + *
  • + *
  • + *

    + * s3tables:PutTablePolicy + *

    + *
  • + *
+ *
+ *
+ *

The following operations are related to CreateBucketMetadataTableConfiguration:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, CreateBucketMetadataTableConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, CreateBucketMetadataTableConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // CreateBucketMetadataTableConfigurationRequest + * Bucket: "STRING_VALUE", // required + * ContentMD5: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * MetadataTableConfiguration: { // MetadataTableConfiguration + * S3TablesDestination: { // S3TablesDestination + * TableBucketArn: "STRING_VALUE", // required + * TableName: "STRING_VALUE", // required + * }, + * }, + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new CreateBucketMetadataTableConfigurationCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param CreateBucketMetadataTableConfigurationCommandInput - {@link CreateBucketMetadataTableConfigurationCommandInput} + * @returns {@link CreateBucketMetadataTableConfigurationCommandOutput} + * @see {@link CreateBucketMetadataTableConfigurationCommandInput} for command's `input` shape. + * @see {@link CreateBucketMetadataTableConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class CreateBucketMetadataTableConfigurationCommand extends CreateBucketMetadataTableConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: CreateBucketMetadataTableConfigurationRequest; + output: {}; + }; + sdk: { + input: CreateBucketMetadataTableConfigurationCommandInput; + output: CreateBucketMetadataTableConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/CreateMultipartUploadCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/CreateMultipartUploadCommand.d.ts new file mode 100644 index 00000000..a60b70e4 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/CreateMultipartUploadCommand.d.ts @@ -0,0 +1,401 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { CreateMultipartUploadOutput, CreateMultipartUploadRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CreateMultipartUploadCommand}. + */ +export interface CreateMultipartUploadCommandInput extends CreateMultipartUploadRequest { +} +/** + * @public + * + * The output of {@link CreateMultipartUploadCommand}. + */ +export interface CreateMultipartUploadCommandOutput extends CreateMultipartUploadOutput, __MetadataBearer { +} +declare const CreateMultipartUploadCommand_base: { + new (input: CreateMultipartUploadCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: CreateMultipartUploadCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

This action initiates a multipart upload and returns an upload ID. This upload ID is + * used to associate all of the parts in the specific multipart upload. You specify this + * upload ID in each of your subsequent upload part requests (see UploadPart). You also include this + * upload ID in the final request to either complete or abort the multipart upload request. + * For more information about multipart uploads, see Multipart Upload Overview in the + * Amazon S3 User Guide.

+ * + *

After you initiate a multipart upload and upload one or more parts, to stop being + * charged for storing the uploaded parts, you must either complete or abort the multipart + * upload. Amazon S3 frees up the space used to store the parts and stops charging you for + * storing them only after you either complete or abort a multipart upload.

+ *
+ *

If you have configured a lifecycle rule to abort incomplete multipart uploads, the + * created multipart upload must be completed within the number of days specified in the + * bucket lifecycle configuration. Otherwise, the incomplete multipart upload becomes eligible + * for an abort action and Amazon S3 aborts the multipart upload. For more information, see Aborting Incomplete Multipart Uploads Using a Bucket Lifecycle + * Configuration.

+ * + *
    + *
  • + *

    + * Directory buckets - + * S3 Lifecycle is not supported by directory buckets.

    + *
  • + *
  • + *

    + * Directory buckets - + * For directory buckets, you must make requests for this API operation to the Zonal endpoint. These endpoints support virtual-hosted-style requests in the format https://amzn-s3-demo-bucket.s3express-zone-id.region-code.amazonaws.com/key-name + * . Path-style requests are not supported. For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

    + *
  • + *
+ *
+ *
+ *
Request signing
+ *
+ *

For request signing, multipart upload is just a series of regular requests. You + * initiate a multipart upload, send one or more requests to upload parts, and then + * complete the multipart upload process. You sign each request individually. There + * is nothing special about signing multipart upload requests. For more information + * about signing, see Authenticating + * Requests (Amazon Web Services Signature Version 4) in the + * Amazon S3 User Guide.

+ *
+ *
Permissions
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - To + * perform a multipart upload with encryption using an Key Management Service (KMS) + * KMS key, the requester must have permission to the + * kms:Decrypt and kms:GenerateDataKey actions on + * the key. The requester must also have permissions for the + * kms:GenerateDataKey action for the + * CreateMultipartUpload API. Then, the requester needs + * permissions for the kms:Decrypt action on the + * UploadPart and UploadPartCopy APIs. These + * permissions are required because Amazon S3 must decrypt and read data from the + * encrypted file parts before it completes the multipart upload. For more + * information, see Multipart upload API and permissions and Protecting data + * using server-side encryption with Amazon Web Services KMS in the + * Amazon S3 User Guide.

    + *
  • + *
  • + *

    + * Directory bucket permissions - To grant access to this API operation on a directory bucket, we recommend that you use the + * CreateSession + * API operation for session-based authorization. Specifically, you grant the s3express:CreateSession permission to the directory bucket in a bucket policy or an IAM identity-based policy. Then, you make the CreateSession API call on the bucket to obtain a session token. With the session token in your request header, you can make API requests to this operation. After the session token expires, you make another CreateSession API call to generate a new session token for use. + * Amazon Web Services CLI or SDKs create session and refresh the session token automatically to avoid service interruptions when a session expires. For more information about authorization, see + * CreateSession + * .

    + *
  • + *
+ *
+ *
Encryption
+ *
+ *
    + *
  • + *

    + * General purpose buckets - Server-side + * encryption is for data encryption at rest. Amazon S3 encrypts your data as it + * writes it to disks in its data centers and decrypts it when you access it. + * Amazon S3 automatically encrypts all new objects that are uploaded to an S3 + * bucket. When doing a multipart upload, if you don't specify encryption + * information in your request, the encryption setting of the uploaded parts is + * set to the default encryption configuration of the destination bucket. By + * default, all buckets have a base level of encryption configuration that uses + * server-side encryption with Amazon S3 managed keys (SSE-S3). If the destination + * bucket has a default encryption configuration that uses server-side + * encryption with an Key Management Service (KMS) key (SSE-KMS), or a customer-provided + * encryption key (SSE-C), Amazon S3 uses the corresponding KMS key, or a + * customer-provided key to encrypt the uploaded parts. When you perform a + * CreateMultipartUpload operation, if you want to use a different type of + * encryption setting for the uploaded parts, you can request that Amazon S3 + * encrypts the object with a different encryption key (such as an Amazon S3 managed + * key, a KMS key, or a customer-provided key). When the encryption setting + * in your request is different from the default encryption configuration of + * the destination bucket, the encryption setting in your request takes + * precedence. If you choose to provide your own encryption key, the request + * headers you provide in UploadPart and + * UploadPartCopy + * requests must match the headers you used in the + * CreateMultipartUpload request.

    + *
      + *
    • + *

      Use KMS keys (SSE-KMS) that include the Amazon Web Services managed key + * (aws/s3) and KMS customer managed keys stored in Key Management Service + * (KMS) – If you want Amazon Web Services to manage the keys used to encrypt data, + * specify the following headers in the request.

      + *
        + *
      • + *

        + * x-amz-server-side-encryption + *

        + *
      • + *
      • + *

        + * x-amz-server-side-encryption-aws-kms-key-id + *

        + *
      • + *
      • + *

        + * x-amz-server-side-encryption-context + *

        + *
      • + *
      + * + *
        + *
      • + *

        If you specify + * x-amz-server-side-encryption:aws:kms, but + * don't provide + * x-amz-server-side-encryption-aws-kms-key-id, + * Amazon S3 uses the Amazon Web Services managed key (aws/s3 key) in + * KMS to protect the data.

        + *
      • + *
      • + *

        To perform a multipart upload with encryption by using an + * Amazon Web Services KMS key, the requester must have permission to the + * kms:Decrypt and + * kms:GenerateDataKey* actions on the key. + * These permissions are required because Amazon S3 must decrypt and + * read data from the encrypted file parts before it completes + * the multipart upload. For more information, see Multipart upload API and permissions and Protecting data using server-side encryption with Amazon Web Services + * KMS in the + * Amazon S3 User Guide.

        + *
      • + *
      • + *

        If your Identity and Access Management (IAM) user or role is in the same + * Amazon Web Services account as the KMS key, then you must have these + * permissions on the key policy. If your IAM user or role is + * in a different account from the key, then you must have the + * permissions on both the key policy and your IAM user or + * role.

        + *
      • + *
      • + *

        All GET and PUT requests for an + * object protected by KMS fail if you don't make them by + * using Secure Sockets Layer (SSL), Transport Layer Security + * (TLS), or Signature Version 4. For information about + * configuring any of the officially supported Amazon Web Services SDKs and + * Amazon Web Services CLI, see Specifying the Signature Version in + * Request Authentication in the + * Amazon S3 User Guide.

        + *
      • + *
      + *
      + *

      For more information about server-side encryption with KMS keys + * (SSE-KMS), see Protecting + * Data Using Server-Side Encryption with KMS keys in the + * Amazon S3 User Guide.

      + *
    • + *
    • + *

      Use customer-provided encryption keys (SSE-C) – If you want to + * manage your own encryption keys, provide all the following headers in + * the request.

      + *
        + *
      • + *

        + * x-amz-server-side-encryption-customer-algorithm + *

        + *
      • + *
      • + *

        + * x-amz-server-side-encryption-customer-key + *

        + *
      • + *
      • + *

        + * x-amz-server-side-encryption-customer-key-MD5 + *

        + *
      • + *
      + *

      For more information about server-side encryption with + * customer-provided encryption keys (SSE-C), see Protecting data using server-side encryption with + * customer-provided encryption keys (SSE-C) in the + * Amazon S3 User Guide.

      + *
    • + *
    + *
  • + *
  • + *

    + * Directory buckets - + * For directory buckets, there are only two supported options for server-side encryption: server-side encryption with Amazon S3 managed keys (SSE-S3) (AES256) and server-side encryption with KMS keys (SSE-KMS) (aws:kms). We recommend that the bucket's default encryption uses the desired encryption configuration and you don't override the bucket default encryption in your + * CreateSession requests or PUT object requests. Then, new objects + * are automatically encrypted with the desired encryption settings. For more + * information, see Protecting data with server-side encryption in the Amazon S3 User Guide. For more information about the encryption overriding behaviors in directory buckets, see Specifying server-side encryption with KMS for new object uploads.

    + *

    In the Zonal endpoint API calls (except CopyObject and UploadPartCopy) using the REST API, the encryption request headers must match the encryption settings that are specified in the CreateSession request. + * You can't override the values of the encryption settings (x-amz-server-side-encryption, x-amz-server-side-encryption-aws-kms-key-id, x-amz-server-side-encryption-context, and x-amz-server-side-encryption-bucket-key-enabled) that are specified in the CreateSession request. + * You don't need to explicitly specify these encryption settings values in Zonal endpoint API calls, and + * Amazon S3 will use the encryption settings values from the CreateSession request to protect new objects in the directory bucket. + *

    + * + *

    When you use the CLI or the Amazon Web Services SDKs, for CreateSession, the session token refreshes automatically to avoid service interruptions when a session expires. The CLI or the Amazon Web Services SDKs use the bucket's default encryption configuration for the + * CreateSession request. It's not supported to override the encryption settings values in the CreateSession request. + * So in the Zonal endpoint API calls (except CopyObject and UploadPartCopy), + * the encryption request headers must match the default encryption configuration of the directory bucket. + * + *

    + *
    + * + *

    For directory buckets, when you perform a + * CreateMultipartUpload operation and an + * UploadPartCopy operation, the request headers you provide + * in the CreateMultipartUpload request must match the default + * encryption configuration of the destination bucket.

    + *
    + *
  • + *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is + * Bucket-name.s3express-zone-id.region-code.amazonaws.com.

+ *
+ *
+ *

The following operations are related to CreateMultipartUpload:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, CreateMultipartUploadCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, CreateMultipartUploadCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // CreateMultipartUploadRequest + * ACL: "private" || "public-read" || "public-read-write" || "authenticated-read" || "aws-exec-read" || "bucket-owner-read" || "bucket-owner-full-control", + * Bucket: "STRING_VALUE", // required + * CacheControl: "STRING_VALUE", + * ContentDisposition: "STRING_VALUE", + * ContentEncoding: "STRING_VALUE", + * ContentLanguage: "STRING_VALUE", + * ContentType: "STRING_VALUE", + * Expires: new Date("TIMESTAMP"), + * GrantFullControl: "STRING_VALUE", + * GrantRead: "STRING_VALUE", + * GrantReadACP: "STRING_VALUE", + * GrantWriteACP: "STRING_VALUE", + * Key: "STRING_VALUE", // required + * Metadata: { // Metadata + * "": "STRING_VALUE", + * }, + * ServerSideEncryption: "AES256" || "aws:kms" || "aws:kms:dsse", + * StorageClass: "STANDARD" || "REDUCED_REDUNDANCY" || "STANDARD_IA" || "ONEZONE_IA" || "INTELLIGENT_TIERING" || "GLACIER" || "DEEP_ARCHIVE" || "OUTPOSTS" || "GLACIER_IR" || "SNOW" || "EXPRESS_ONEZONE", + * WebsiteRedirectLocation: "STRING_VALUE", + * SSECustomerAlgorithm: "STRING_VALUE", + * SSECustomerKey: "STRING_VALUE", + * SSECustomerKeyMD5: "STRING_VALUE", + * SSEKMSKeyId: "STRING_VALUE", + * SSEKMSEncryptionContext: "STRING_VALUE", + * BucketKeyEnabled: true || false, + * RequestPayer: "requester", + * Tagging: "STRING_VALUE", + * ObjectLockMode: "GOVERNANCE" || "COMPLIANCE", + * ObjectLockRetainUntilDate: new Date("TIMESTAMP"), + * ObjectLockLegalHoldStatus: "ON" || "OFF", + * ExpectedBucketOwner: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * ChecksumType: "COMPOSITE" || "FULL_OBJECT", + * }; + * const command = new CreateMultipartUploadCommand(input); + * const response = await client.send(command); + * // { // CreateMultipartUploadOutput + * // AbortDate: new Date("TIMESTAMP"), + * // AbortRuleId: "STRING_VALUE", + * // Bucket: "STRING_VALUE", + * // Key: "STRING_VALUE", + * // UploadId: "STRING_VALUE", + * // ServerSideEncryption: "AES256" || "aws:kms" || "aws:kms:dsse", + * // SSECustomerAlgorithm: "STRING_VALUE", + * // SSECustomerKeyMD5: "STRING_VALUE", + * // SSEKMSKeyId: "STRING_VALUE", + * // SSEKMSEncryptionContext: "STRING_VALUE", + * // BucketKeyEnabled: true || false, + * // RequestCharged: "requester", + * // ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * // ChecksumType: "COMPOSITE" || "FULL_OBJECT", + * // }; + * + * ``` + * + * @param CreateMultipartUploadCommandInput - {@link CreateMultipartUploadCommandInput} + * @returns {@link CreateMultipartUploadCommandOutput} + * @see {@link CreateMultipartUploadCommandInput} for command's `input` shape. + * @see {@link CreateMultipartUploadCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To initiate a multipart upload + * ```javascript + * // The following example initiates a multipart upload. + * const input = { + * Bucket: "examplebucket", + * Key: "largeobject" + * }; + * const command = new CreateMultipartUploadCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Bucket: "examplebucket", + * Key: "largeobject", + * UploadId: "ibZBv_75gd9r8lH_gqXatLdxMVpAlj6ZQjEs.OwyF3953YdwbcQnMA2BLGn8Lx12fQNICtMw5KyteFeHw.Sjng--" + * } + * *\/ + * ``` + * + * @public + */ +export declare class CreateMultipartUploadCommand extends CreateMultipartUploadCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: CreateMultipartUploadRequest; + output: CreateMultipartUploadOutput; + }; + sdk: { + input: CreateMultipartUploadCommandInput; + output: CreateMultipartUploadCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/CreateSessionCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/CreateSessionCommand.d.ts new file mode 100644 index 00000000..5099bf79 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/CreateSessionCommand.d.ts @@ -0,0 +1,197 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { CreateSessionOutput, CreateSessionRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CreateSessionCommand}. + */ +export interface CreateSessionCommandInput extends CreateSessionRequest { +} +/** + * @public + * + * The output of {@link CreateSessionCommand}. + */ +export interface CreateSessionCommandOutput extends CreateSessionOutput, __MetadataBearer { +} +declare const CreateSessionCommand_base: { + new (input: CreateSessionCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: CreateSessionCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates a session that establishes temporary security credentials to support fast + * authentication and authorization for the Zonal endpoint API operations on directory buckets. For more + * information about Zonal endpoint API operations that include the Availability Zone in the request endpoint, see S3 Express One Zone + * APIs in the Amazon S3 User Guide.

+ *

To make Zonal endpoint API requests on a directory bucket, use the CreateSession + * API operation. Specifically, you grant s3express:CreateSession permission to a + * bucket in a bucket policy or an IAM identity-based policy. Then, you use IAM credentials to make the + * CreateSession API request on the bucket, which returns temporary security + * credentials that include the access key ID, secret access key, session token, and + * expiration. These credentials have associated permissions to access the Zonal endpoint API operations. After + * the session is created, you don’t need to use other policies to grant permissions to each + * Zonal endpoint API individually. Instead, in your Zonal endpoint API requests, you sign your requests by + * applying the temporary security credentials of the session to the request headers and + * following the SigV4 protocol for authentication. You also apply the session token to the + * x-amz-s3session-token request header for authorization. Temporary security + * credentials are scoped to the bucket and expire after 5 minutes. After the expiration time, + * any calls that you make with those credentials will fail. You must use IAM credentials + * again to make a CreateSession API request that generates a new set of + * temporary credentials for use. Temporary credentials cannot be extended or refreshed beyond + * the original specified interval.

+ *

If you use Amazon Web Services SDKs, SDKs handle the session token refreshes automatically to avoid + * service interruptions when a session expires. We recommend that you use the Amazon Web Services SDKs to + * initiate and manage requests to the CreateSession API. For more information, see Performance guidelines and design patterns in the + * Amazon S3 User Guide.

+ * + *
    + *
  • + *

    You must make requests for this API operation to the Zonal endpoint. These endpoints support virtual-hosted-style requests in the format https://bucket-name.s3express-zone-id.region-code.amazonaws.com. Path-style requests are not supported. For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

    + *
  • + *
  • + *

    + * + * CopyObject API operation - + * Unlike other Zonal endpoint API operations, the CopyObject API operation doesn't use + * the temporary security credentials returned from the CreateSession + * API operation for authentication and authorization. For information about + * authentication and authorization of the CopyObject API operation on + * directory buckets, see CopyObject.

    + *
  • + *
  • + *

    + * + * HeadBucket API operation - + * Unlike other Zonal endpoint API operations, the HeadBucket API operation doesn't use + * the temporary security credentials returned from the CreateSession + * API operation for authentication and authorization. For information about + * authentication and authorization of the HeadBucket API operation on + * directory buckets, see HeadBucket.

    + *
  • + *
+ *
+ *
+ *
Permissions
+ *
+ *

To obtain temporary security credentials, you must create + * a bucket policy or an IAM identity-based policy that grants s3express:CreateSession + * permission to the bucket. In a policy, you can have the + * s3express:SessionMode condition key to control who can create a + * ReadWrite or ReadOnly session. For more information + * about ReadWrite or ReadOnly sessions, see + * x-amz-create-session-mode + * . For example policies, see + * Example bucket policies for S3 Express One Zone and Amazon Web Services Identity and Access Management (IAM) identity-based policies for + * S3 Express One Zone in the Amazon S3 User Guide.

+ *

To grant cross-account access to Zonal endpoint API operations, the bucket policy should also + * grant both accounts the s3express:CreateSession permission.

+ *

If you want to encrypt objects with SSE-KMS, you must also have the + * kms:GenerateDataKey and the kms:Decrypt permissions + * in IAM identity-based policies and KMS key policies for the target KMS + * key.

+ *
+ *
Encryption
+ *
+ *

For directory buckets, there are only two supported options for server-side encryption: server-side encryption with Amazon S3 managed keys (SSE-S3) (AES256) and server-side encryption with KMS keys (SSE-KMS) (aws:kms). We recommend that the bucket's default encryption uses the desired encryption configuration and you don't override the bucket default encryption in your + * CreateSession requests or PUT object requests. Then, new objects + * are automatically encrypted with the desired encryption settings. For more + * information, see Protecting data with server-side encryption in the Amazon S3 User Guide. For more information about the encryption overriding behaviors in directory buckets, see Specifying server-side encryption with KMS for new object uploads.

+ *

For Zonal endpoint (object-level) API operations except CopyObject and UploadPartCopy, + * you authenticate and authorize requests through CreateSession for low latency. + * To encrypt new objects in a directory bucket with SSE-KMS, you must specify SSE-KMS as the directory bucket's default encryption configuration with a KMS key (specifically, a customer managed key). Then, when a session is created for Zonal endpoint API operations, new objects are automatically encrypted and decrypted with SSE-KMS and S3 Bucket Keys during the session.

+ * + *

+ * Only 1 customer managed key is supported per directory bucket for the lifetime of the bucket. The Amazon Web Services managed key (aws/s3) isn't supported. + * After you specify SSE-KMS as your bucket's default encryption configuration with a customer managed key, you can't change the customer managed key for the bucket's SSE-KMS configuration. + *

+ *
+ *

In the Zonal endpoint API calls (except CopyObject and UploadPartCopy) using the REST API, + * you can't override the values of the encryption settings (x-amz-server-side-encryption, x-amz-server-side-encryption-aws-kms-key-id, x-amz-server-side-encryption-context, and x-amz-server-side-encryption-bucket-key-enabled) from the CreateSession request. + * You don't need to explicitly specify these encryption settings values in Zonal endpoint API calls, and + * Amazon S3 will use the encryption settings values from the CreateSession request to protect new objects in the directory bucket. + *

+ * + *

When you use the CLI or the Amazon Web Services SDKs, for CreateSession, the session token refreshes automatically to avoid service interruptions when a session expires. The CLI or the Amazon Web Services SDKs use the bucket's default encryption configuration for the + * CreateSession request. It's not supported to override the encryption settings values in the CreateSession request. + * Also, in the Zonal endpoint API calls (except CopyObject and UploadPartCopy), + * it's not supported to override the values of the encryption settings from the CreateSession request. + * + *

+ *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is + * Bucket-name.s3express-zone-id.region-code.amazonaws.com.

+ *
+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, CreateSessionCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, CreateSessionCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // CreateSessionRequest + * SessionMode: "ReadOnly" || "ReadWrite", + * Bucket: "STRING_VALUE", // required + * ServerSideEncryption: "AES256" || "aws:kms" || "aws:kms:dsse", + * SSEKMSKeyId: "STRING_VALUE", + * SSEKMSEncryptionContext: "STRING_VALUE", + * BucketKeyEnabled: true || false, + * }; + * const command = new CreateSessionCommand(input); + * const response = await client.send(command); + * // { // CreateSessionOutput + * // ServerSideEncryption: "AES256" || "aws:kms" || "aws:kms:dsse", + * // SSEKMSKeyId: "STRING_VALUE", + * // SSEKMSEncryptionContext: "STRING_VALUE", + * // BucketKeyEnabled: true || false, + * // Credentials: { // SessionCredentials + * // AccessKeyId: "STRING_VALUE", // required + * // SecretAccessKey: "STRING_VALUE", // required + * // SessionToken: "STRING_VALUE", // required + * // Expiration: new Date("TIMESTAMP"), // required + * // }, + * // }; + * + * ``` + * + * @param CreateSessionCommandInput - {@link CreateSessionCommandInput} + * @returns {@link CreateSessionCommandOutput} + * @see {@link CreateSessionCommandInput} for command's `input` shape. + * @see {@link CreateSessionCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link NoSuchBucket} (client fault) + *

The specified bucket does not exist.

+ * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class CreateSessionCommand extends CreateSessionCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: CreateSessionRequest; + output: CreateSessionOutput; + }; + sdk: { + input: CreateSessionCommandInput; + output: CreateSessionCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketAnalyticsConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketAnalyticsConfigurationCommand.d.ts new file mode 100644 index 00000000..ce3dfc55 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketAnalyticsConfigurationCommand.d.ts @@ -0,0 +1,102 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketAnalyticsConfigurationRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteBucketAnalyticsConfigurationCommand}. + */ +export interface DeleteBucketAnalyticsConfigurationCommandInput extends DeleteBucketAnalyticsConfigurationRequest { +} +/** + * @public + * + * The output of {@link DeleteBucketAnalyticsConfigurationCommand}. + */ +export interface DeleteBucketAnalyticsConfigurationCommandOutput extends __MetadataBearer { +} +declare const DeleteBucketAnalyticsConfigurationCommand_base: { + new (input: DeleteBucketAnalyticsConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteBucketAnalyticsConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Deletes an analytics configuration for the bucket (specified by the analytics + * configuration ID).

+ *

To use this operation, you must have permissions to perform the + * s3:PutAnalyticsConfiguration action. The bucket owner has this permission + * by default. The bucket owner can grant this permission to others. For more information + * about permissions, see Permissions Related to Bucket Subresource Operations and Managing + * Access Permissions to Your Amazon S3 Resources.

+ *

For information about the Amazon S3 analytics feature, see Amazon S3 Analytics – Storage Class + * Analysis.

+ *

The following operations are related to + * DeleteBucketAnalyticsConfiguration:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, DeleteBucketAnalyticsConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, DeleteBucketAnalyticsConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // DeleteBucketAnalyticsConfigurationRequest + * Bucket: "STRING_VALUE", // required + * Id: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new DeleteBucketAnalyticsConfigurationCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param DeleteBucketAnalyticsConfigurationCommandInput - {@link DeleteBucketAnalyticsConfigurationCommandInput} + * @returns {@link DeleteBucketAnalyticsConfigurationCommandOutput} + * @see {@link DeleteBucketAnalyticsConfigurationCommandInput} for command's `input` shape. + * @see {@link DeleteBucketAnalyticsConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class DeleteBucketAnalyticsConfigurationCommand extends DeleteBucketAnalyticsConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteBucketAnalyticsConfigurationRequest; + output: {}; + }; + sdk: { + input: DeleteBucketAnalyticsConfigurationCommandInput; + output: DeleteBucketAnalyticsConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketCommand.d.ts new file mode 100644 index 00000000..5986988d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketCommand.d.ts @@ -0,0 +1,141 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteBucketCommand}. + */ +export interface DeleteBucketCommandInput extends DeleteBucketRequest { +} +/** + * @public + * + * The output of {@link DeleteBucketCommand}. + */ +export interface DeleteBucketCommandOutput extends __MetadataBearer { +} +declare const DeleteBucketCommand_base: { + new (input: DeleteBucketCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteBucketCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Deletes the S3 bucket. All objects (including all object versions and delete markers) in + * the bucket must be deleted before the bucket itself can be deleted.

+ * + *
    + *
  • + *

    + * Directory buckets - If multipart + * uploads in a directory bucket are in progress, you can't delete the bucket until + * all the in-progress multipart uploads are aborted or completed.

    + *
  • + *
  • + *

    + * Directory buckets - For directory buckets, you must make requests for this API operation to the Regional endpoint. These endpoints support path-style requests in the format https://s3express-control.region-code.amazonaws.com/bucket-name + * . Virtual-hosted-style requests aren't supported. + * For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

    + *
  • + *
+ *
+ *
+ *
Permissions
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - You + * must have the s3:DeleteBucket permission on the specified + * bucket in a policy.

    + *
  • + *
  • + *

    + * Directory bucket permissions - + * You must have the s3express:DeleteBucket permission in + * an IAM identity-based policy instead of a bucket policy. Cross-account access to this API operation isn't supported. This operation can only be performed by the Amazon Web Services account that owns the resource. + * For more information about directory bucket policies and permissions, see Amazon Web Services Identity and Access Management (IAM) for S3 Express One Zone in the Amazon S3 User Guide.

    + *
  • + *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is s3express-control.region-code.amazonaws.com.

+ *
+ *
+ *

The following operations are related to DeleteBucket:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, DeleteBucketCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, DeleteBucketCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // DeleteBucketRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new DeleteBucketCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param DeleteBucketCommandInput - {@link DeleteBucketCommandInput} + * @returns {@link DeleteBucketCommandOutput} + * @see {@link DeleteBucketCommandInput} for command's `input` shape. + * @see {@link DeleteBucketCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To delete a bucket + * ```javascript + * // The following example deletes the specified bucket. + * const input = { + * Bucket: "forrandall2" + * }; + * const command = new DeleteBucketCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class DeleteBucketCommand extends DeleteBucketCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteBucketRequest; + output: {}; + }; + sdk: { + input: DeleteBucketCommandInput; + output: DeleteBucketCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketCorsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketCorsCommand.d.ts new file mode 100644 index 00000000..21c3bf77 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketCorsCommand.d.ts @@ -0,0 +1,107 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketCorsRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteBucketCorsCommand}. + */ +export interface DeleteBucketCorsCommandInput extends DeleteBucketCorsRequest { +} +/** + * @public + * + * The output of {@link DeleteBucketCorsCommand}. + */ +export interface DeleteBucketCorsCommandOutput extends __MetadataBearer { +} +declare const DeleteBucketCorsCommand_base: { + new (input: DeleteBucketCorsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteBucketCorsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Deletes the cors configuration information set for the bucket.

+ *

To use this operation, you must have permission to perform the + * s3:PutBucketCORS action. The bucket owner has this permission by default + * and can grant this permission to others.

+ *

For information about cors, see Enabling Cross-Origin Resource Sharing in + * the Amazon S3 User Guide.

+ *

+ * Related Resources + *

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, DeleteBucketCorsCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, DeleteBucketCorsCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // DeleteBucketCorsRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new DeleteBucketCorsCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param DeleteBucketCorsCommandInput - {@link DeleteBucketCorsCommandInput} + * @returns {@link DeleteBucketCorsCommandOutput} + * @see {@link DeleteBucketCorsCommandInput} for command's `input` shape. + * @see {@link DeleteBucketCorsCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To delete cors configuration on a bucket. + * ```javascript + * // The following example deletes CORS configuration on a bucket. + * const input = { + * Bucket: "examplebucket" + * }; + * const command = new DeleteBucketCorsCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class DeleteBucketCorsCommand extends DeleteBucketCorsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteBucketCorsRequest; + output: {}; + }; + sdk: { + input: DeleteBucketCorsCommandInput; + output: DeleteBucketCorsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketEncryptionCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketEncryptionCommand.d.ts new file mode 100644 index 00000000..50625693 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketEncryptionCommand.d.ts @@ -0,0 +1,131 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketEncryptionRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteBucketEncryptionCommand}. + */ +export interface DeleteBucketEncryptionCommandInput extends DeleteBucketEncryptionRequest { +} +/** + * @public + * + * The output of {@link DeleteBucketEncryptionCommand}. + */ +export interface DeleteBucketEncryptionCommandOutput extends __MetadataBearer { +} +declare const DeleteBucketEncryptionCommand_base: { + new (input: DeleteBucketEncryptionCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteBucketEncryptionCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

This implementation of the DELETE action resets the default encryption for the bucket as + * server-side encryption with Amazon S3 managed keys (SSE-S3).

+ * + * + * + *
+ *
Permissions
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - The + * s3:PutEncryptionConfiguration permission is required in a + * policy. The bucket owner has this permission by default. The bucket owner + * can grant this permission to others. For more information about permissions, + * see Permissions Related to Bucket Operations and Managing Access + * Permissions to Your Amazon S3 Resources.

    + *
  • + *
  • + *

    + * Directory bucket permissions - + * To grant access to this API operation, you must have the + * s3express:PutEncryptionConfiguration permission in + * an IAM identity-based policy instead of a bucket policy. Cross-account access to this API operation isn't supported. This operation can only be performed by the Amazon Web Services account that owns the resource. + * For more information about directory bucket policies and permissions, see Amazon Web Services Identity and Access Management (IAM) for S3 Express One Zone in the Amazon S3 User Guide.

    + *
  • + *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is s3express-control.region-code.amazonaws.com.

+ *
+ *
+ *

The following operations are related to DeleteBucketEncryption:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, DeleteBucketEncryptionCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, DeleteBucketEncryptionCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // DeleteBucketEncryptionRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new DeleteBucketEncryptionCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param DeleteBucketEncryptionCommandInput - {@link DeleteBucketEncryptionCommandInput} + * @returns {@link DeleteBucketEncryptionCommandOutput} + * @see {@link DeleteBucketEncryptionCommandInput} for command's `input` shape. + * @see {@link DeleteBucketEncryptionCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class DeleteBucketEncryptionCommand extends DeleteBucketEncryptionCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteBucketEncryptionRequest; + output: {}; + }; + sdk: { + input: DeleteBucketEncryptionCommandInput; + output: DeleteBucketEncryptionCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketIntelligentTieringConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketIntelligentTieringConfigurationCommand.d.ts new file mode 100644 index 00000000..66db5b4e --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketIntelligentTieringConfigurationCommand.d.ts @@ -0,0 +1,95 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketIntelligentTieringConfigurationRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteBucketIntelligentTieringConfigurationCommand}. + */ +export interface DeleteBucketIntelligentTieringConfigurationCommandInput extends DeleteBucketIntelligentTieringConfigurationRequest { +} +/** + * @public + * + * The output of {@link DeleteBucketIntelligentTieringConfigurationCommand}. + */ +export interface DeleteBucketIntelligentTieringConfigurationCommandOutput extends __MetadataBearer { +} +declare const DeleteBucketIntelligentTieringConfigurationCommand_base: { + new (input: DeleteBucketIntelligentTieringConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteBucketIntelligentTieringConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Deletes the S3 Intelligent-Tiering configuration from the specified bucket.

+ *

The S3 Intelligent-Tiering storage class is designed to optimize storage costs by automatically moving data to the most cost-effective storage access tier, without performance impact or operational overhead. S3 Intelligent-Tiering delivers automatic cost savings in three low latency and high throughput access tiers. To get the lowest storage cost on data that can be accessed in minutes to hours, you can choose to activate additional archiving capabilities.

+ *

The S3 Intelligent-Tiering storage class is the ideal storage class for data with unknown, changing, or unpredictable access patterns, independent of object size or retention period. If the size of an object is less than 128 KB, it is not monitored and not eligible for auto-tiering. Smaller objects can be stored, but they are always charged at the Frequent Access tier rates in the S3 Intelligent-Tiering storage class.

+ *

For more information, see Storage class for automatically optimizing frequently and infrequently accessed objects.

+ *

Operations related to DeleteBucketIntelligentTieringConfiguration include:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, DeleteBucketIntelligentTieringConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, DeleteBucketIntelligentTieringConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // DeleteBucketIntelligentTieringConfigurationRequest + * Bucket: "STRING_VALUE", // required + * Id: "STRING_VALUE", // required + * }; + * const command = new DeleteBucketIntelligentTieringConfigurationCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param DeleteBucketIntelligentTieringConfigurationCommandInput - {@link DeleteBucketIntelligentTieringConfigurationCommandInput} + * @returns {@link DeleteBucketIntelligentTieringConfigurationCommandOutput} + * @see {@link DeleteBucketIntelligentTieringConfigurationCommandInput} for command's `input` shape. + * @see {@link DeleteBucketIntelligentTieringConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class DeleteBucketIntelligentTieringConfigurationCommand extends DeleteBucketIntelligentTieringConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteBucketIntelligentTieringConfigurationRequest; + output: {}; + }; + sdk: { + input: DeleteBucketIntelligentTieringConfigurationCommandInput; + output: DeleteBucketIntelligentTieringConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketInventoryConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketInventoryConfigurationCommand.d.ts new file mode 100644 index 00000000..d0c2a915 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketInventoryConfigurationCommand.d.ts @@ -0,0 +1,100 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketInventoryConfigurationRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteBucketInventoryConfigurationCommand}. + */ +export interface DeleteBucketInventoryConfigurationCommandInput extends DeleteBucketInventoryConfigurationRequest { +} +/** + * @public + * + * The output of {@link DeleteBucketInventoryConfigurationCommand}. + */ +export interface DeleteBucketInventoryConfigurationCommandOutput extends __MetadataBearer { +} +declare const DeleteBucketInventoryConfigurationCommand_base: { + new (input: DeleteBucketInventoryConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteBucketInventoryConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Deletes an inventory configuration (identified by the inventory ID) from the + * bucket.

+ *

To use this operation, you must have permissions to perform the + * s3:PutInventoryConfiguration action. The bucket owner has this permission + * by default. The bucket owner can grant this permission to others. For more information + * about permissions, see Permissions Related to Bucket Subresource Operations and Managing + * Access Permissions to Your Amazon S3 Resources.

+ *

For information about the Amazon S3 inventory feature, see Amazon S3 Inventory.

+ *

Operations related to DeleteBucketInventoryConfiguration include:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, DeleteBucketInventoryConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, DeleteBucketInventoryConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // DeleteBucketInventoryConfigurationRequest + * Bucket: "STRING_VALUE", // required + * Id: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new DeleteBucketInventoryConfigurationCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param DeleteBucketInventoryConfigurationCommandInput - {@link DeleteBucketInventoryConfigurationCommandInput} + * @returns {@link DeleteBucketInventoryConfigurationCommandOutput} + * @see {@link DeleteBucketInventoryConfigurationCommandInput} for command's `input` shape. + * @see {@link DeleteBucketInventoryConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class DeleteBucketInventoryConfigurationCommand extends DeleteBucketInventoryConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteBucketInventoryConfigurationRequest; + output: {}; + }; + sdk: { + input: DeleteBucketInventoryConfigurationCommandInput; + output: DeleteBucketInventoryConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketLifecycleCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketLifecycleCommand.d.ts new file mode 100644 index 00000000..7947a455 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketLifecycleCommand.d.ts @@ -0,0 +1,153 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketLifecycleRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteBucketLifecycleCommand}. + */ +export interface DeleteBucketLifecycleCommandInput extends DeleteBucketLifecycleRequest { +} +/** + * @public + * + * The output of {@link DeleteBucketLifecycleCommand}. + */ +export interface DeleteBucketLifecycleCommandOutput extends __MetadataBearer { +} +declare const DeleteBucketLifecycleCommand_base: { + new (input: DeleteBucketLifecycleCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteBucketLifecycleCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Deletes the lifecycle configuration from the specified bucket. Amazon S3 removes all the + * lifecycle configuration rules in the lifecycle subresource associated with the bucket. Your + * objects never expire, and Amazon S3 no longer automatically deletes any objects on the basis of + * rules contained in the deleted lifecycle configuration.

+ *
+ *
Permissions
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - By + * default, all Amazon S3 resources are private, including buckets, objects, and + * related subresources (for example, lifecycle configuration and website + * configuration). Only the resource owner (that is, the Amazon Web Services account that + * created it) can access the resource. The resource owner can optionally grant + * access permissions to others by writing an access policy. For this + * operation, a user must have the s3:PutLifecycleConfiguration + * permission.

    + *

    For more information about permissions, see Managing Access + * Permissions to Your Amazon S3 Resources.

    + *
  • + *
+ *
    + *
  • + *

    + * Directory bucket permissions - + * You must have the s3express:PutLifecycleConfiguration + * permission in an IAM identity-based policy to use this operation. + * Cross-account access to this API operation isn't supported. The resource + * owner can optionally grant access permissions to others by creating a role + * or user for them as long as they are within the same account as the owner + * and resource.

    + *

    For more information about directory bucket policies and permissions, see + * Authorizing Regional endpoint APIs with IAM in the + * Amazon S3 User Guide.

    + * + *

    + * Directory buckets - For directory buckets, you must make requests for this API operation to the Regional endpoint. These endpoints support path-style requests in the format https://s3express-control.region-code.amazonaws.com/bucket-name + * . Virtual-hosted-style requests aren't supported. + * For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

    + *
    + *
  • + *
+ *
+ *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host + * header syntax is + * s3express-control.region.amazonaws.com.

+ *
+ *
+ *

For more information about the object expiration, see Elements to Describe Lifecycle Actions.

+ *

Related actions include:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, DeleteBucketLifecycleCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, DeleteBucketLifecycleCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // DeleteBucketLifecycleRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new DeleteBucketLifecycleCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param DeleteBucketLifecycleCommandInput - {@link DeleteBucketLifecycleCommandInput} + * @returns {@link DeleteBucketLifecycleCommandOutput} + * @see {@link DeleteBucketLifecycleCommandInput} for command's `input` shape. + * @see {@link DeleteBucketLifecycleCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To delete lifecycle configuration on a bucket. + * ```javascript + * // The following example deletes lifecycle configuration on a bucket. + * const input = { + * Bucket: "examplebucket" + * }; + * const command = new DeleteBucketLifecycleCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class DeleteBucketLifecycleCommand extends DeleteBucketLifecycleCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteBucketLifecycleRequest; + output: {}; + }; + sdk: { + input: DeleteBucketLifecycleCommandInput; + output: DeleteBucketLifecycleCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketMetadataTableConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketMetadataTableConfigurationCommand.d.ts new file mode 100644 index 00000000..e225ff8e --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketMetadataTableConfigurationCommand.d.ts @@ -0,0 +1,96 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketMetadataTableConfigurationRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteBucketMetadataTableConfigurationCommand}. + */ +export interface DeleteBucketMetadataTableConfigurationCommandInput extends DeleteBucketMetadataTableConfigurationRequest { +} +/** + * @public + * + * The output of {@link DeleteBucketMetadataTableConfigurationCommand}. + */ +export interface DeleteBucketMetadataTableConfigurationCommandOutput extends __MetadataBearer { +} +declare const DeleteBucketMetadataTableConfigurationCommand_base: { + new (input: DeleteBucketMetadataTableConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteBucketMetadataTableConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

+ * Deletes a metadata table configuration from a general purpose bucket. For more + * information, see Accelerating data + * discovery with S3 Metadata in the Amazon S3 User Guide.

+ *
+ *
Permissions
+ *
+ *

To use this operation, you must have the s3:DeleteBucketMetadataTableConfiguration permission. For more + * information, see Setting up + * permissions for configuring metadata tables in the + * Amazon S3 User Guide.

+ *
+ *
+ *

The following operations are related to DeleteBucketMetadataTableConfiguration:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, DeleteBucketMetadataTableConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, DeleteBucketMetadataTableConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // DeleteBucketMetadataTableConfigurationRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new DeleteBucketMetadataTableConfigurationCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param DeleteBucketMetadataTableConfigurationCommandInput - {@link DeleteBucketMetadataTableConfigurationCommandInput} + * @returns {@link DeleteBucketMetadataTableConfigurationCommandOutput} + * @see {@link DeleteBucketMetadataTableConfigurationCommandInput} for command's `input` shape. + * @see {@link DeleteBucketMetadataTableConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class DeleteBucketMetadataTableConfigurationCommand extends DeleteBucketMetadataTableConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteBucketMetadataTableConfigurationRequest; + output: {}; + }; + sdk: { + input: DeleteBucketMetadataTableConfigurationCommandInput; + output: DeleteBucketMetadataTableConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketMetricsConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketMetricsConfigurationCommand.d.ts new file mode 100644 index 00000000..b19c28bb --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketMetricsConfigurationCommand.d.ts @@ -0,0 +1,108 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketMetricsConfigurationRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteBucketMetricsConfigurationCommand}. + */ +export interface DeleteBucketMetricsConfigurationCommandInput extends DeleteBucketMetricsConfigurationRequest { +} +/** + * @public + * + * The output of {@link DeleteBucketMetricsConfigurationCommand}. + */ +export interface DeleteBucketMetricsConfigurationCommandOutput extends __MetadataBearer { +} +declare const DeleteBucketMetricsConfigurationCommand_base: { + new (input: DeleteBucketMetricsConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteBucketMetricsConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Deletes a metrics configuration for the Amazon CloudWatch request metrics (specified by the + * metrics configuration ID) from the bucket. Note that this doesn't include the daily storage + * metrics.

+ *

To use this operation, you must have permissions to perform the + * s3:PutMetricsConfiguration action. The bucket owner has this permission by + * default. The bucket owner can grant this permission to others. For more information about + * permissions, see Permissions Related to Bucket Subresource Operations and Managing + * Access Permissions to Your Amazon S3 Resources.

+ *

For information about CloudWatch request metrics for Amazon S3, see Monitoring Metrics with + * Amazon CloudWatch.

+ *

The following operations are related to + * DeleteBucketMetricsConfiguration:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, DeleteBucketMetricsConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, DeleteBucketMetricsConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // DeleteBucketMetricsConfigurationRequest + * Bucket: "STRING_VALUE", // required + * Id: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new DeleteBucketMetricsConfigurationCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param DeleteBucketMetricsConfigurationCommandInput - {@link DeleteBucketMetricsConfigurationCommandInput} + * @returns {@link DeleteBucketMetricsConfigurationCommandOutput} + * @see {@link DeleteBucketMetricsConfigurationCommandInput} for command's `input` shape. + * @see {@link DeleteBucketMetricsConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class DeleteBucketMetricsConfigurationCommand extends DeleteBucketMetricsConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteBucketMetricsConfigurationRequest; + output: {}; + }; + sdk: { + input: DeleteBucketMetricsConfigurationCommandInput; + output: DeleteBucketMetricsConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketOwnershipControlsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketOwnershipControlsCommand.d.ts new file mode 100644 index 00000000..bfc5ee1d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketOwnershipControlsCommand.d.ts @@ -0,0 +1,92 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketOwnershipControlsRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteBucketOwnershipControlsCommand}. + */ +export interface DeleteBucketOwnershipControlsCommandInput extends DeleteBucketOwnershipControlsRequest { +} +/** + * @public + * + * The output of {@link DeleteBucketOwnershipControlsCommand}. + */ +export interface DeleteBucketOwnershipControlsCommandOutput extends __MetadataBearer { +} +declare const DeleteBucketOwnershipControlsCommand_base: { + new (input: DeleteBucketOwnershipControlsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteBucketOwnershipControlsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Removes OwnershipControls for an Amazon S3 bucket. To use this operation, you + * must have the s3:PutBucketOwnershipControls permission. For more information + * about Amazon S3 permissions, see Specifying Permissions in a + * Policy.

+ *

For information about Amazon S3 Object Ownership, see Using Object Ownership.

+ *

The following operations are related to + * DeleteBucketOwnershipControls:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, DeleteBucketOwnershipControlsCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, DeleteBucketOwnershipControlsCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // DeleteBucketOwnershipControlsRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new DeleteBucketOwnershipControlsCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param DeleteBucketOwnershipControlsCommandInput - {@link DeleteBucketOwnershipControlsCommandInput} + * @returns {@link DeleteBucketOwnershipControlsCommandOutput} + * @see {@link DeleteBucketOwnershipControlsCommandInput} for command's `input` shape. + * @see {@link DeleteBucketOwnershipControlsCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class DeleteBucketOwnershipControlsCommand extends DeleteBucketOwnershipControlsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteBucketOwnershipControlsRequest; + output: {}; + }; + sdk: { + input: DeleteBucketOwnershipControlsCommandInput; + output: DeleteBucketOwnershipControlsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketPolicyCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketPolicyCommand.d.ts new file mode 100644 index 00000000..c6a22080 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketPolicyCommand.d.ts @@ -0,0 +1,150 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketPolicyRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteBucketPolicyCommand}. + */ +export interface DeleteBucketPolicyCommandInput extends DeleteBucketPolicyRequest { +} +/** + * @public + * + * The output of {@link DeleteBucketPolicyCommand}. + */ +export interface DeleteBucketPolicyCommandOutput extends __MetadataBearer { +} +declare const DeleteBucketPolicyCommand_base: { + new (input: DeleteBucketPolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteBucketPolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Deletes the policy of a specified bucket.

+ * + *

+ * Directory buckets - For directory buckets, you must make requests for this API operation to the Regional endpoint. These endpoints support path-style requests in the format https://s3express-control.region-code.amazonaws.com/bucket-name + * . Virtual-hosted-style requests aren't supported. + * For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

+ *
+ *
+ *
Permissions
+ *
+ *

If you are using an identity other than the root user of the Amazon Web Services account that + * owns the bucket, the calling identity must both have the + * DeleteBucketPolicy permissions on the specified bucket and belong + * to the bucket owner's account in order to use this operation.

+ *

If you don't have DeleteBucketPolicy permissions, Amazon S3 returns a + * 403 Access Denied error. If you have the correct permissions, but + * you're not using an identity that belongs to the bucket owner's account, Amazon S3 + * returns a 405 Method Not Allowed error.

+ * + *

To ensure that bucket owners don't inadvertently lock themselves out of + * their own buckets, the root principal in a bucket owner's Amazon Web Services account can + * perform the GetBucketPolicy, PutBucketPolicy, and + * DeleteBucketPolicy API actions, even if their bucket policy + * explicitly denies the root principal's access. Bucket owner root principals can + * only be blocked from performing these API actions by VPC endpoint policies and + * Amazon Web Services Organizations policies.

+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - The + * s3:DeleteBucketPolicy permission is required in a policy. + * For more information about general purpose buckets bucket policies, see Using Bucket Policies and User Policies in the + * Amazon S3 User Guide.

    + *
  • + *
  • + *

    + * Directory bucket permissions - + * To grant access to this API operation, you must have the + * s3express:DeleteBucketPolicy permission in + * an IAM identity-based policy instead of a bucket policy. Cross-account access to this API operation isn't supported. This operation can only be performed by the Amazon Web Services account that owns the resource. + * For more information about directory bucket policies and permissions, see Amazon Web Services Identity and Access Management (IAM) for S3 Express One Zone in the Amazon S3 User Guide.

    + *
  • + *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is s3express-control.region-code.amazonaws.com.

+ *
+ *
+ *

The following operations are related to DeleteBucketPolicy + *

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, DeleteBucketPolicyCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, DeleteBucketPolicyCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // DeleteBucketPolicyRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new DeleteBucketPolicyCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param DeleteBucketPolicyCommandInput - {@link DeleteBucketPolicyCommandInput} + * @returns {@link DeleteBucketPolicyCommandOutput} + * @see {@link DeleteBucketPolicyCommandInput} for command's `input` shape. + * @see {@link DeleteBucketPolicyCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To delete bucket policy + * ```javascript + * // The following example deletes bucket policy on the specified bucket. + * const input = { + * Bucket: "examplebucket" + * }; + * const command = new DeleteBucketPolicyCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class DeleteBucketPolicyCommand extends DeleteBucketPolicyCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteBucketPolicyRequest; + output: {}; + }; + sdk: { + input: DeleteBucketPolicyCommandInput; + output: DeleteBucketPolicyCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketReplicationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketReplicationCommand.d.ts new file mode 100644 index 00000000..23a1d35c --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketReplicationCommand.d.ts @@ -0,0 +1,111 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketReplicationRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteBucketReplicationCommand}. + */ +export interface DeleteBucketReplicationCommandInput extends DeleteBucketReplicationRequest { +} +/** + * @public + * + * The output of {@link DeleteBucketReplicationCommand}. + */ +export interface DeleteBucketReplicationCommandOutput extends __MetadataBearer { +} +declare const DeleteBucketReplicationCommand_base: { + new (input: DeleteBucketReplicationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteBucketReplicationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Deletes the replication configuration from the bucket.

+ *

To use this operation, you must have permissions to perform the + * s3:PutReplicationConfiguration action. The bucket owner has these + * permissions by default and can grant it to others. For more information about permissions, + * see Permissions Related to Bucket Subresource Operations and Managing + * Access Permissions to Your Amazon S3 Resources.

+ * + *

It can take a while for the deletion of a replication configuration to fully + * propagate.

+ *
+ *

For information about replication configuration, see Replication in the + * Amazon S3 User Guide.

+ *

The following operations are related to DeleteBucketReplication:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, DeleteBucketReplicationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, DeleteBucketReplicationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // DeleteBucketReplicationRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new DeleteBucketReplicationCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param DeleteBucketReplicationCommandInput - {@link DeleteBucketReplicationCommandInput} + * @returns {@link DeleteBucketReplicationCommandOutput} + * @see {@link DeleteBucketReplicationCommandInput} for command's `input` shape. + * @see {@link DeleteBucketReplicationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To delete bucket replication configuration + * ```javascript + * // The following example deletes replication configuration set on bucket. + * const input = { + * Bucket: "example" + * }; + * const command = new DeleteBucketReplicationCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class DeleteBucketReplicationCommand extends DeleteBucketReplicationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteBucketReplicationRequest; + output: {}; + }; + sdk: { + input: DeleteBucketReplicationCommandInput; + output: DeleteBucketReplicationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketTaggingCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketTaggingCommand.d.ts new file mode 100644 index 00000000..cd6d57c9 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketTaggingCommand.d.ts @@ -0,0 +1,103 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketTaggingRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteBucketTaggingCommand}. + */ +export interface DeleteBucketTaggingCommandInput extends DeleteBucketTaggingRequest { +} +/** + * @public + * + * The output of {@link DeleteBucketTaggingCommand}. + */ +export interface DeleteBucketTaggingCommandOutput extends __MetadataBearer { +} +declare const DeleteBucketTaggingCommand_base: { + new (input: DeleteBucketTaggingCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteBucketTaggingCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Deletes the tags from the bucket.

+ *

To use this operation, you must have permission to perform the + * s3:PutBucketTagging action. By default, the bucket owner has this + * permission and can grant this permission to others.

+ *

The following operations are related to DeleteBucketTagging:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, DeleteBucketTaggingCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, DeleteBucketTaggingCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // DeleteBucketTaggingRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new DeleteBucketTaggingCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param DeleteBucketTaggingCommandInput - {@link DeleteBucketTaggingCommandInput} + * @returns {@link DeleteBucketTaggingCommandOutput} + * @see {@link DeleteBucketTaggingCommandInput} for command's `input` shape. + * @see {@link DeleteBucketTaggingCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To delete bucket tags + * ```javascript + * // The following example deletes bucket tags. + * const input = { + * Bucket: "examplebucket" + * }; + * const command = new DeleteBucketTaggingCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class DeleteBucketTaggingCommand extends DeleteBucketTaggingCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteBucketTaggingRequest; + output: {}; + }; + sdk: { + input: DeleteBucketTaggingCommandInput; + output: DeleteBucketTaggingCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketWebsiteCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketWebsiteCommand.d.ts new file mode 100644 index 00000000..a2ff59f8 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteBucketWebsiteCommand.d.ts @@ -0,0 +1,110 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketWebsiteRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteBucketWebsiteCommand}. + */ +export interface DeleteBucketWebsiteCommandInput extends DeleteBucketWebsiteRequest { +} +/** + * @public + * + * The output of {@link DeleteBucketWebsiteCommand}. + */ +export interface DeleteBucketWebsiteCommandOutput extends __MetadataBearer { +} +declare const DeleteBucketWebsiteCommand_base: { + new (input: DeleteBucketWebsiteCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteBucketWebsiteCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

This action removes the website configuration for a bucket. Amazon S3 returns a 200 + * OK response upon successfully deleting a website configuration on the specified + * bucket. You will get a 200 OK response if the website configuration you are + * trying to delete does not exist on the bucket. Amazon S3 returns a 404 response if + * the bucket specified in the request does not exist.

+ *

This DELETE action requires the S3:DeleteBucketWebsite permission. By + * default, only the bucket owner can delete the website configuration attached to a bucket. + * However, bucket owners can grant other users permission to delete the website configuration + * by writing a bucket policy granting them the S3:DeleteBucketWebsite + * permission.

+ *

For more information about hosting websites, see Hosting Websites on Amazon S3.

+ *

The following operations are related to DeleteBucketWebsite:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, DeleteBucketWebsiteCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, DeleteBucketWebsiteCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // DeleteBucketWebsiteRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new DeleteBucketWebsiteCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param DeleteBucketWebsiteCommandInput - {@link DeleteBucketWebsiteCommandInput} + * @returns {@link DeleteBucketWebsiteCommandOutput} + * @see {@link DeleteBucketWebsiteCommandInput} for command's `input` shape. + * @see {@link DeleteBucketWebsiteCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To delete bucket website configuration + * ```javascript + * // The following example deletes bucket website configuration. + * const input = { + * Bucket: "examplebucket" + * }; + * const command = new DeleteBucketWebsiteCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class DeleteBucketWebsiteCommand extends DeleteBucketWebsiteCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteBucketWebsiteRequest; + output: {}; + }; + sdk: { + input: DeleteBucketWebsiteCommandInput; + output: DeleteBucketWebsiteCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteObjectCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteObjectCommand.d.ts new file mode 100644 index 00000000..8b6a69ee --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteObjectCommand.d.ts @@ -0,0 +1,210 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteObjectOutput, DeleteObjectRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteObjectCommand}. + */ +export interface DeleteObjectCommandInput extends DeleteObjectRequest { +} +/** + * @public + * + * The output of {@link DeleteObjectCommand}. + */ +export interface DeleteObjectCommandOutput extends DeleteObjectOutput, __MetadataBearer { +} +declare const DeleteObjectCommand_base: { + new (input: DeleteObjectCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteObjectCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Removes an object from a bucket. The behavior depends on the bucket's versioning state:

+ *
    + *
  • + *

    If bucket versioning is not enabled, the operation permanently deletes the object.

    + *
  • + *
  • + *

    If bucket versioning is enabled, the operation inserts a delete marker, which becomes the current version of the object. To permanently delete an object in a versioned bucket, you must include the object’s versionId in the request. For more information about versioning-enabled buckets, see Deleting object versions from a versioning-enabled bucket.

    + *
  • + *
  • + *

    If bucket versioning is suspended, the operation removes the object that has a null versionId, if there is one, and inserts a delete marker that becomes the current version of the object. If there isn't an object with a null versionId, and all versions of the object have a versionId, Amazon S3 does not remove the object and only inserts a delete marker. To permanently delete an object that has a versionId, you must include the object’s versionId in the request. For more information about versioning-suspended buckets, see Deleting objects from versioning-suspended buckets.

    + *
  • + *
+ * + *
    + *
  • + *

    + * Directory buckets - S3 Versioning isn't enabled and supported for directory buckets. For this API operation, only the null value of the version ID is supported by directory buckets. You can only specify null + * to the versionId query parameter in the request.

    + *
  • + *
  • + *

    + * Directory buckets - For directory buckets, you must make requests for this API operation to the Zonal endpoint. These endpoints support virtual-hosted-style requests in the format https://amzn-s3-demo-bucket.s3express-zone-id.region-code.amazonaws.com/key-name + * . Path-style requests are not supported. For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

    + *
  • + *
+ *
+ *

To remove a specific version, you must use the versionId query parameter. Using this + * query parameter permanently deletes the version. If the object deleted is a delete marker, Amazon S3 + * sets the response header x-amz-delete-marker to true.

+ *

If the object you want to delete is in a bucket where the bucket versioning + * configuration is MFA Delete enabled, you must include the x-amz-mfa request + * header in the DELETE versionId request. Requests that include + * x-amz-mfa must use HTTPS. For more information about MFA Delete, see Using MFA Delete in the Amazon S3 + * User Guide. To see sample + * requests that use versioning, see Sample + * Request.

+ * + *

+ * Directory buckets - MFA delete is not supported by directory buckets.

+ *
+ *

You can delete objects by explicitly calling DELETE Object or calling + * (PutBucketLifecycle) to enable Amazon S3 to remove them for you. If you want to block + * users or accounts from removing or deleting objects from your bucket, you must deny them + * the s3:DeleteObject, s3:DeleteObjectVersion, and + * s3:PutLifeCycleConfiguration actions.

+ * + *

+ * Directory buckets - S3 Lifecycle is not supported by directory buckets.

+ *
+ *
+ *
Permissions
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - The following permissions are required in your policies when your + * DeleteObjects request includes specific headers.

    + *
      + *
    • + *

      + * + * s3:DeleteObject + * - To delete an object from a bucket, you must always have the s3:DeleteObject permission.

      + *
    • + *
    • + *

      + * + * s3:DeleteObjectVersion + * - To delete a specific version of an object from a versioning-enabled bucket, you must have the s3:DeleteObjectVersion permission.

      + *
    • + *
    + *
  • + *
  • + *

    + * Directory bucket permissions - To grant access to this API operation on a directory bucket, we recommend that you use the + * CreateSession + * API operation for session-based authorization. Specifically, you grant the s3express:CreateSession permission to the directory bucket in a bucket policy or an IAM identity-based policy. Then, you make the CreateSession API call on the bucket to obtain a session token. With the session token in your request header, you can make API requests to this operation. After the session token expires, you make another CreateSession API call to generate a new session token for use. + * Amazon Web Services CLI or SDKs create session and refresh the session token automatically to avoid service interruptions when a session expires. For more information about authorization, see + * CreateSession + * .

    + *
  • + *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is + * Bucket-name.s3express-zone-id.region-code.amazonaws.com.

+ *
+ *
+ *

The following action is related to DeleteObject:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, DeleteObjectCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, DeleteObjectCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // DeleteObjectRequest + * Bucket: "STRING_VALUE", // required + * Key: "STRING_VALUE", // required + * MFA: "STRING_VALUE", + * VersionId: "STRING_VALUE", + * RequestPayer: "requester", + * BypassGovernanceRetention: true || false, + * ExpectedBucketOwner: "STRING_VALUE", + * IfMatch: "STRING_VALUE", + * IfMatchLastModifiedTime: new Date("TIMESTAMP"), + * IfMatchSize: Number("long"), + * }; + * const command = new DeleteObjectCommand(input); + * const response = await client.send(command); + * // { // DeleteObjectOutput + * // DeleteMarker: true || false, + * // VersionId: "STRING_VALUE", + * // RequestCharged: "requester", + * // }; + * + * ``` + * + * @param DeleteObjectCommandInput - {@link DeleteObjectCommandInput} + * @returns {@link DeleteObjectCommandOutput} + * @see {@link DeleteObjectCommandInput} for command's `input` shape. + * @see {@link DeleteObjectCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To delete an object (from a non-versioned bucket) + * ```javascript + * // The following example deletes an object from a non-versioned bucket. + * const input = { + * Bucket: "ExampleBucket", + * Key: "HappyFace.jpg" + * }; + * const command = new DeleteObjectCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @example To delete an object + * ```javascript + * // The following example deletes an object from an S3 bucket. + * const input = { + * Bucket: "examplebucket", + * Key: "objectkey.jpg" + * }; + * const command = new DeleteObjectCommand(input); + * const response = await client.send(command); + * /* response is + * { /* empty *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class DeleteObjectCommand extends DeleteObjectCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteObjectRequest; + output: DeleteObjectOutput; + }; + sdk: { + input: DeleteObjectCommandInput; + output: DeleteObjectCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteObjectTaggingCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteObjectTaggingCommand.d.ts new file mode 100644 index 00000000..3c207688 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteObjectTaggingCommand.d.ts @@ -0,0 +1,130 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteObjectTaggingOutput, DeleteObjectTaggingRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteObjectTaggingCommand}. + */ +export interface DeleteObjectTaggingCommandInput extends DeleteObjectTaggingRequest { +} +/** + * @public + * + * The output of {@link DeleteObjectTaggingCommand}. + */ +export interface DeleteObjectTaggingCommandOutput extends DeleteObjectTaggingOutput, __MetadataBearer { +} +declare const DeleteObjectTaggingCommand_base: { + new (input: DeleteObjectTaggingCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteObjectTaggingCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Removes the entire tag set from the specified object. For more information about + * managing object tags, see Object Tagging.

+ *

To use this operation, you must have permission to perform the + * s3:DeleteObjectTagging action.

+ *

To delete tags of a specific object version, add the versionId query + * parameter in the request. You will need permission for the + * s3:DeleteObjectVersionTagging action.

+ *

The following operations are related to DeleteObjectTagging:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, DeleteObjectTaggingCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, DeleteObjectTaggingCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // DeleteObjectTaggingRequest + * Bucket: "STRING_VALUE", // required + * Key: "STRING_VALUE", // required + * VersionId: "STRING_VALUE", + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new DeleteObjectTaggingCommand(input); + * const response = await client.send(command); + * // { // DeleteObjectTaggingOutput + * // VersionId: "STRING_VALUE", + * // }; + * + * ``` + * + * @param DeleteObjectTaggingCommandInput - {@link DeleteObjectTaggingCommandInput} + * @returns {@link DeleteObjectTaggingCommandOutput} + * @see {@link DeleteObjectTaggingCommandInput} for command's `input` shape. + * @see {@link DeleteObjectTaggingCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To remove tag set from an object + * ```javascript + * // The following example removes tag set associated with the specified object. If the bucket is versioning enabled, the operation removes tag set from the latest object version. + * const input = { + * Bucket: "examplebucket", + * Key: "HappyFace.jpg" + * }; + * const command = new DeleteObjectTaggingCommand(input); + * const response = await client.send(command); + * /* response is + * { + * VersionId: "null" + * } + * *\/ + * ``` + * + * @example To remove tag set from an object version + * ```javascript + * // The following example removes tag set associated with the specified object version. The request specifies both the object key and object version. + * const input = { + * Bucket: "examplebucket", + * Key: "HappyFace.jpg", + * VersionId: "ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI" + * }; + * const command = new DeleteObjectTaggingCommand(input); + * const response = await client.send(command); + * /* response is + * { + * VersionId: "ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI" + * } + * *\/ + * ``` + * + * @public + */ +export declare class DeleteObjectTaggingCommand extends DeleteObjectTaggingCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteObjectTaggingRequest; + output: DeleteObjectTaggingOutput; + }; + sdk: { + input: DeleteObjectTaggingCommandInput; + output: DeleteObjectTaggingCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteObjectsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteObjectsCommand.d.ts new file mode 100644 index 00000000..92784e00 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeleteObjectsCommand.d.ts @@ -0,0 +1,315 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteObjectsOutput, DeleteObjectsRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteObjectsCommand}. + */ +export interface DeleteObjectsCommandInput extends DeleteObjectsRequest { +} +/** + * @public + * + * The output of {@link DeleteObjectsCommand}. + */ +export interface DeleteObjectsCommandOutput extends DeleteObjectsOutput, __MetadataBearer { +} +declare const DeleteObjectsCommand_base: { + new (input: DeleteObjectsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteObjectsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

This operation enables you to delete multiple objects from a bucket using a single HTTP + * request. If you know the object keys that you want to delete, then this operation provides + * a suitable alternative to sending individual delete requests, reducing per-request + * overhead.

+ *

The request can contain a list of up to 1,000 keys that you want to delete. In the XML, you + * provide the object key names, and optionally, version IDs if you want to delete a specific + * version of the object from a versioning-enabled bucket. For each key, Amazon S3 performs a + * delete operation and returns the result of that delete, success or failure, in the response. + * If the object specified in the request isn't found, Amazon S3 confirms the deletion by returning the result as deleted.

+ * + *
    + *
  • + *

    + * Directory buckets - + * S3 Versioning isn't enabled and supported for directory buckets.

    + *
  • + *
  • + *

    + * Directory buckets - + * For directory buckets, you must make requests for this API operation to the Zonal endpoint. These endpoints support virtual-hosted-style requests in the format https://amzn-s3-demo-bucket.s3express-zone-id.region-code.amazonaws.com/key-name + * . Path-style requests are not supported. For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

    + *
  • + *
+ *
+ *

The operation supports two modes for the response: verbose and quiet. By default, the + * operation uses verbose mode in which the response includes the result of deletion of each + * key in your request. In quiet mode the response includes only keys where the delete + * operation encountered an error. For a successful deletion in a quiet mode, the operation + * does not return any information about the delete in the response body.

+ *

When performing this action on an MFA Delete enabled bucket, that attempts to delete any + * versioned objects, you must include an MFA token. If you do not provide one, the entire + * request will fail, even if there are non-versioned objects you are trying to delete. If you + * provide an invalid token, whether there are versioned keys in the request or not, the + * entire Multi-Object Delete request will fail. For information about MFA Delete, see MFA + * Delete in the Amazon S3 User Guide.

+ * + *

+ * Directory buckets - + * MFA delete is not supported by directory buckets.

+ *
+ *
+ *
Permissions
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - The + * following permissions are required in your policies when your + * DeleteObjects request includes specific headers.

    + *
      + *
    • + *

      + * + * s3:DeleteObject + * + * - To delete an object from a bucket, you must always specify + * the s3:DeleteObject permission.

      + *
    • + *
    • + *

      + * + * s3:DeleteObjectVersion + * - To delete a specific version of an object from a + * versioning-enabled bucket, you must specify the + * s3:DeleteObjectVersion permission.

      + *
    • + *
    + *
  • + *
  • + *

    + * Directory bucket permissions - To grant access to this API operation on a directory bucket, we recommend that you use the + * CreateSession + * API operation for session-based authorization. Specifically, you grant the s3express:CreateSession permission to the directory bucket in a bucket policy or an IAM identity-based policy. Then, you make the CreateSession API call on the bucket to obtain a session token. With the session token in your request header, you can make API requests to this operation. After the session token expires, you make another CreateSession API call to generate a new session token for use. + * Amazon Web Services CLI or SDKs create session and refresh the session token automatically to avoid service interruptions when a session expires. For more information about authorization, see + * CreateSession + * .

    + *
  • + *
+ *
+ *
Content-MD5 request header
+ *
+ *
    + *
  • + *

    + * General purpose bucket - The Content-MD5 + * request header is required for all Multi-Object Delete requests. Amazon S3 uses + * the header value to ensure that your request body has not been altered in + * transit.

    + *
  • + *
  • + *

    + * Directory bucket - The + * Content-MD5 request header or a additional checksum request header + * (including x-amz-checksum-crc32, + * x-amz-checksum-crc32c, x-amz-checksum-sha1, or + * x-amz-checksum-sha256) is required for all Multi-Object + * Delete requests.

    + *
  • + *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is + * Bucket-name.s3express-zone-id.region-code.amazonaws.com.

+ *
+ *
+ *

The following operations are related to DeleteObjects:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, DeleteObjectsCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, DeleteObjectsCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // DeleteObjectsRequest + * Bucket: "STRING_VALUE", // required + * Delete: { // Delete + * Objects: [ // ObjectIdentifierList // required + * { // ObjectIdentifier + * Key: "STRING_VALUE", // required + * VersionId: "STRING_VALUE", + * ETag: "STRING_VALUE", + * LastModifiedTime: new Date("TIMESTAMP"), + * Size: Number("long"), + * }, + * ], + * Quiet: true || false, + * }, + * MFA: "STRING_VALUE", + * RequestPayer: "requester", + * BypassGovernanceRetention: true || false, + * ExpectedBucketOwner: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * }; + * const command = new DeleteObjectsCommand(input); + * const response = await client.send(command); + * // { // DeleteObjectsOutput + * // Deleted: [ // DeletedObjects + * // { // DeletedObject + * // Key: "STRING_VALUE", + * // VersionId: "STRING_VALUE", + * // DeleteMarker: true || false, + * // DeleteMarkerVersionId: "STRING_VALUE", + * // }, + * // ], + * // RequestCharged: "requester", + * // Errors: [ // Errors + * // { // Error + * // Key: "STRING_VALUE", + * // VersionId: "STRING_VALUE", + * // Code: "STRING_VALUE", + * // Message: "STRING_VALUE", + * // }, + * // ], + * // }; + * + * ``` + * + * @param DeleteObjectsCommandInput - {@link DeleteObjectsCommandInput} + * @returns {@link DeleteObjectsCommandOutput} + * @see {@link DeleteObjectsCommandInput} for command's `input` shape. + * @see {@link DeleteObjectsCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To delete multiple object versions from a versioned bucket + * ```javascript + * // The following example deletes objects from a bucket. The request specifies object versions. S3 deletes specific object versions and returns the key and versions of deleted objects in the response. + * const input = { + * Bucket: "examplebucket", + * Delete: { + * Objects: [ + * { + * Key: "HappyFace.jpg", + * VersionId: "2LWg7lQLnY41.maGB5Z6SWW.dcq0vx7b" + * }, + * { + * Key: "HappyFace.jpg", + * VersionId: "yoz3HB.ZhCS_tKVEmIOr7qYyyAaZSKVd" + * } + * ], + * Quiet: false + * } + * }; + * const command = new DeleteObjectsCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Deleted: [ + * { + * Key: "HappyFace.jpg", + * VersionId: "yoz3HB.ZhCS_tKVEmIOr7qYyyAaZSKVd" + * }, + * { + * Key: "HappyFace.jpg", + * VersionId: "2LWg7lQLnY41.maGB5Z6SWW.dcq0vx7b" + * } + * ] + * } + * *\/ + * ``` + * + * @example To delete multiple objects from a versioned bucket + * ```javascript + * // The following example deletes objects from a bucket. The bucket is versioned, and the request does not specify the object version to delete. In this case, all versions remain in the bucket and S3 adds a delete marker. + * const input = { + * Bucket: "examplebucket", + * Delete: { + * Objects: [ + * { + * Key: "objectkey1" + * }, + * { + * Key: "objectkey2" + * } + * ], + * Quiet: false + * } + * }; + * const command = new DeleteObjectsCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Deleted: [ + * { + * DeleteMarker: true, + * DeleteMarkerVersionId: "A._w1z6EFiCF5uhtQMDal9JDkID9tQ7F", + * Key: "objectkey1" + * }, + * { + * DeleteMarker: true, + * DeleteMarkerVersionId: "iOd_ORxhkKe_e8G8_oSGxt2PjsCZKlkt", + * Key: "objectkey2" + * } + * ] + * } + * *\/ + * ``` + * + * @public + */ +export declare class DeleteObjectsCommand extends DeleteObjectsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteObjectsRequest; + output: DeleteObjectsOutput; + }; + sdk: { + input: DeleteObjectsCommandInput; + output: DeleteObjectsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/DeletePublicAccessBlockCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeletePublicAccessBlockCommand.d.ts new file mode 100644 index 00000000..bb522f14 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/DeletePublicAccessBlockCommand.d.ts @@ -0,0 +1,101 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeletePublicAccessBlockRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeletePublicAccessBlockCommand}. + */ +export interface DeletePublicAccessBlockCommandInput extends DeletePublicAccessBlockRequest { +} +/** + * @public + * + * The output of {@link DeletePublicAccessBlockCommand}. + */ +export interface DeletePublicAccessBlockCommandOutput extends __MetadataBearer { +} +declare const DeletePublicAccessBlockCommand_base: { + new (input: DeletePublicAccessBlockCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeletePublicAccessBlockCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Removes the PublicAccessBlock configuration for an Amazon S3 bucket. To use this + * operation, you must have the s3:PutBucketPublicAccessBlock permission. For + * more information about permissions, see Permissions Related to Bucket Subresource Operations and Managing + * Access Permissions to Your Amazon S3 Resources.

+ *

The following operations are related to DeletePublicAccessBlock:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, DeletePublicAccessBlockCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, DeletePublicAccessBlockCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // DeletePublicAccessBlockRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new DeletePublicAccessBlockCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param DeletePublicAccessBlockCommandInput - {@link DeletePublicAccessBlockCommandInput} + * @returns {@link DeletePublicAccessBlockCommandOutput} + * @see {@link DeletePublicAccessBlockCommandInput} for command's `input` shape. + * @see {@link DeletePublicAccessBlockCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class DeletePublicAccessBlockCommand extends DeletePublicAccessBlockCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeletePublicAccessBlockRequest; + output: {}; + }; + sdk: { + input: DeletePublicAccessBlockCommandInput; + output: DeletePublicAccessBlockCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketAccelerateConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketAccelerateConfigurationCommand.d.ts new file mode 100644 index 00000000..61c7656f --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketAccelerateConfigurationCommand.d.ts @@ -0,0 +1,103 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketAccelerateConfigurationOutput, GetBucketAccelerateConfigurationRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketAccelerateConfigurationCommand}. + */ +export interface GetBucketAccelerateConfigurationCommandInput extends GetBucketAccelerateConfigurationRequest { +} +/** + * @public + * + * The output of {@link GetBucketAccelerateConfigurationCommand}. + */ +export interface GetBucketAccelerateConfigurationCommandOutput extends GetBucketAccelerateConfigurationOutput, __MetadataBearer { +} +declare const GetBucketAccelerateConfigurationCommand_base: { + new (input: GetBucketAccelerateConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketAccelerateConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

This implementation of the GET action uses the accelerate subresource to + * return the Transfer Acceleration state of a bucket, which is either Enabled or + * Suspended. Amazon S3 Transfer Acceleration is a bucket-level feature that + * enables you to perform faster data transfers to and from Amazon S3.

+ *

To use this operation, you must have permission to perform the + * s3:GetAccelerateConfiguration action. The bucket owner has this permission + * by default. The bucket owner can grant this permission to others. For more information + * about permissions, see Permissions Related to Bucket Subresource Operations and Managing + * Access Permissions to your Amazon S3 Resources in the + * Amazon S3 User Guide.

+ *

You set the Transfer Acceleration state of an existing bucket to Enabled or + * Suspended by using the PutBucketAccelerateConfiguration operation.

+ *

A GET accelerate request does not return a state value for a bucket that + * has no transfer acceleration state. A bucket has no Transfer Acceleration state if a state + * has never been set on the bucket.

+ *

For more information about transfer acceleration, see Transfer Acceleration in + * the Amazon S3 User Guide.

+ *

The following operations are related to + * GetBucketAccelerateConfiguration:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketAccelerateConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketAccelerateConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketAccelerateConfigurationRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * RequestPayer: "requester", + * }; + * const command = new GetBucketAccelerateConfigurationCommand(input); + * const response = await client.send(command); + * // { // GetBucketAccelerateConfigurationOutput + * // Status: "Enabled" || "Suspended", + * // RequestCharged: "requester", + * // }; + * + * ``` + * + * @param GetBucketAccelerateConfigurationCommandInput - {@link GetBucketAccelerateConfigurationCommandInput} + * @returns {@link GetBucketAccelerateConfigurationCommandOutput} + * @see {@link GetBucketAccelerateConfigurationCommandInput} for command's `input` shape. + * @see {@link GetBucketAccelerateConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class GetBucketAccelerateConfigurationCommand extends GetBucketAccelerateConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketAccelerateConfigurationRequest; + output: GetBucketAccelerateConfigurationOutput; + }; + sdk: { + input: GetBucketAccelerateConfigurationCommandInput; + output: GetBucketAccelerateConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketAclCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketAclCommand.d.ts new file mode 100644 index 00000000..fd2df777 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketAclCommand.d.ts @@ -0,0 +1,116 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketAclOutput, GetBucketAclRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketAclCommand}. + */ +export interface GetBucketAclCommandInput extends GetBucketAclRequest { +} +/** + * @public + * + * The output of {@link GetBucketAclCommand}. + */ +export interface GetBucketAclCommandOutput extends GetBucketAclOutput, __MetadataBearer { +} +declare const GetBucketAclCommand_base: { + new (input: GetBucketAclCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketAclCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

This implementation of the GET action uses the acl subresource + * to return the access control list (ACL) of a bucket. To use GET to return the + * ACL of the bucket, you must have the READ_ACP access to the bucket. If + * READ_ACP permission is granted to the anonymous user, you can return the + * ACL of the bucket without using an authorization header.

+ *

When you use this API operation with an access point, provide the alias of the access point in place of the bucket name.

+ *

When you use this API operation with an Object Lambda access point, provide the alias of the Object Lambda access point in place of the bucket name. + * If the Object Lambda access point alias in a request is not valid, the error code InvalidAccessPointAliasError is returned. + * For more information about InvalidAccessPointAliasError, see List of + * Error Codes.

+ * + *

If your bucket uses the bucket owner enforced setting for S3 Object Ownership, + * requests to read ACLs are still supported and return the + * bucket-owner-full-control ACL with the owner being the account that + * created the bucket. For more information, see Controlling object + * ownership and disabling ACLs in the + * Amazon S3 User Guide.

+ *
+ *

The following operations are related to GetBucketAcl:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketAclCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketAclCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketAclRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetBucketAclCommand(input); + * const response = await client.send(command); + * // { // GetBucketAclOutput + * // Owner: { // Owner + * // DisplayName: "STRING_VALUE", + * // ID: "STRING_VALUE", + * // }, + * // Grants: [ // Grants + * // { // Grant + * // Grantee: { // Grantee + * // DisplayName: "STRING_VALUE", + * // EmailAddress: "STRING_VALUE", + * // ID: "STRING_VALUE", + * // URI: "STRING_VALUE", + * // Type: "CanonicalUser" || "AmazonCustomerByEmail" || "Group", // required + * // }, + * // Permission: "FULL_CONTROL" || "WRITE" || "WRITE_ACP" || "READ" || "READ_ACP", + * // }, + * // ], + * // }; + * + * ``` + * + * @param GetBucketAclCommandInput - {@link GetBucketAclCommandInput} + * @returns {@link GetBucketAclCommandOutput} + * @see {@link GetBucketAclCommandInput} for command's `input` shape. + * @see {@link GetBucketAclCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class GetBucketAclCommand extends GetBucketAclCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketAclRequest; + output: GetBucketAclOutput; + }; + sdk: { + input: GetBucketAclCommandInput; + output: GetBucketAclCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketAnalyticsConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketAnalyticsConfigurationCommand.d.ts new file mode 100644 index 00000000..8a5dc515 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketAnalyticsConfigurationCommand.d.ts @@ -0,0 +1,136 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketAnalyticsConfigurationOutput, GetBucketAnalyticsConfigurationRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketAnalyticsConfigurationCommand}. + */ +export interface GetBucketAnalyticsConfigurationCommandInput extends GetBucketAnalyticsConfigurationRequest { +} +/** + * @public + * + * The output of {@link GetBucketAnalyticsConfigurationCommand}. + */ +export interface GetBucketAnalyticsConfigurationCommandOutput extends GetBucketAnalyticsConfigurationOutput, __MetadataBearer { +} +declare const GetBucketAnalyticsConfigurationCommand_base: { + new (input: GetBucketAnalyticsConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketAnalyticsConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

This implementation of the GET action returns an analytics configuration (identified by + * the analytics configuration ID) from the bucket.

+ *

To use this operation, you must have permissions to perform the + * s3:GetAnalyticsConfiguration action. The bucket owner has this permission + * by default. The bucket owner can grant this permission to others. For more information + * about permissions, see Permissions Related to Bucket Subresource Operations and Managing + * Access Permissions to Your Amazon S3 Resources in the + * Amazon S3 User Guide.

+ *

For information about Amazon S3 analytics feature, see Amazon S3 Analytics – Storage Class + * Analysis in the Amazon S3 User Guide.

+ *

The following operations are related to + * GetBucketAnalyticsConfiguration:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketAnalyticsConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketAnalyticsConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketAnalyticsConfigurationRequest + * Bucket: "STRING_VALUE", // required + * Id: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetBucketAnalyticsConfigurationCommand(input); + * const response = await client.send(command); + * // { // GetBucketAnalyticsConfigurationOutput + * // AnalyticsConfiguration: { // AnalyticsConfiguration + * // Id: "STRING_VALUE", // required + * // Filter: { // AnalyticsFilter Union: only one key present + * // Prefix: "STRING_VALUE", + * // Tag: { // Tag + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // And: { // AnalyticsAndOperator + * // Prefix: "STRING_VALUE", + * // Tags: [ // TagSet + * // { + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // ], + * // }, + * // }, + * // StorageClassAnalysis: { // StorageClassAnalysis + * // DataExport: { // StorageClassAnalysisDataExport + * // OutputSchemaVersion: "V_1", // required + * // Destination: { // AnalyticsExportDestination + * // S3BucketDestination: { // AnalyticsS3BucketDestination + * // Format: "CSV", // required + * // BucketAccountId: "STRING_VALUE", + * // Bucket: "STRING_VALUE", // required + * // Prefix: "STRING_VALUE", + * // }, + * // }, + * // }, + * // }, + * // }, + * // }; + * + * ``` + * + * @param GetBucketAnalyticsConfigurationCommandInput - {@link GetBucketAnalyticsConfigurationCommandInput} + * @returns {@link GetBucketAnalyticsConfigurationCommandOutput} + * @see {@link GetBucketAnalyticsConfigurationCommandInput} for command's `input` shape. + * @see {@link GetBucketAnalyticsConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class GetBucketAnalyticsConfigurationCommand extends GetBucketAnalyticsConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketAnalyticsConfigurationRequest; + output: GetBucketAnalyticsConfigurationOutput; + }; + sdk: { + input: GetBucketAnalyticsConfigurationCommandInput; + output: GetBucketAnalyticsConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketCorsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketCorsCommand.d.ts new file mode 100644 index 00000000..53d1ed34 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketCorsCommand.d.ts @@ -0,0 +1,145 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketCorsOutput, GetBucketCorsRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketCorsCommand}. + */ +export interface GetBucketCorsCommandInput extends GetBucketCorsRequest { +} +/** + * @public + * + * The output of {@link GetBucketCorsCommand}. + */ +export interface GetBucketCorsCommandOutput extends GetBucketCorsOutput, __MetadataBearer { +} +declare const GetBucketCorsCommand_base: { + new (input: GetBucketCorsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketCorsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Returns the Cross-Origin Resource Sharing (CORS) configuration information set for the + * bucket.

+ *

To use this operation, you must have permission to perform the + * s3:GetBucketCORS action. By default, the bucket owner has this permission + * and can grant it to others.

+ *

When you use this API operation with an access point, provide the alias of the access point in place of the bucket name.

+ *

When you use this API operation with an Object Lambda access point, provide the alias of the Object Lambda access point in place of the bucket name. + * If the Object Lambda access point alias in a request is not valid, the error code InvalidAccessPointAliasError is returned. + * For more information about InvalidAccessPointAliasError, see List of + * Error Codes.

+ *

For more information about CORS, see Enabling Cross-Origin Resource + * Sharing.

+ *

The following operations are related to GetBucketCors:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketCorsCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketCorsCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketCorsRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetBucketCorsCommand(input); + * const response = await client.send(command); + * // { // GetBucketCorsOutput + * // CORSRules: [ // CORSRules + * // { // CORSRule + * // ID: "STRING_VALUE", + * // AllowedHeaders: [ // AllowedHeaders + * // "STRING_VALUE", + * // ], + * // AllowedMethods: [ // AllowedMethods // required + * // "STRING_VALUE", + * // ], + * // AllowedOrigins: [ // AllowedOrigins // required + * // "STRING_VALUE", + * // ], + * // ExposeHeaders: [ // ExposeHeaders + * // "STRING_VALUE", + * // ], + * // MaxAgeSeconds: Number("int"), + * // }, + * // ], + * // }; + * + * ``` + * + * @param GetBucketCorsCommandInput - {@link GetBucketCorsCommandInput} + * @returns {@link GetBucketCorsCommandOutput} + * @see {@link GetBucketCorsCommandInput} for command's `input` shape. + * @see {@link GetBucketCorsCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To get cors configuration set on a bucket + * ```javascript + * // The following example returns cross-origin resource sharing (CORS) configuration set on a bucket. + * const input = { + * Bucket: "examplebucket" + * }; + * const command = new GetBucketCorsCommand(input); + * const response = await client.send(command); + * /* response is + * { + * CORSRules: [ + * { + * AllowedHeaders: [ + * "Authorization" + * ], + * AllowedMethods: [ + * "GET" + * ], + * AllowedOrigins: [ + * "*" + * ], + * MaxAgeSeconds: 3000 + * } + * ] + * } + * *\/ + * ``` + * + * @public + */ +export declare class GetBucketCorsCommand extends GetBucketCorsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketCorsRequest; + output: GetBucketCorsOutput; + }; + sdk: { + input: GetBucketCorsCommandInput; + output: GetBucketCorsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketEncryptionCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketEncryptionCommand.d.ts new file mode 100644 index 00000000..ef597d77 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketEncryptionCommand.d.ts @@ -0,0 +1,144 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketEncryptionOutput, GetBucketEncryptionRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketEncryptionCommand}. + */ +export interface GetBucketEncryptionCommandInput extends GetBucketEncryptionRequest { +} +/** + * @public + * + * The output of {@link GetBucketEncryptionCommand}. + */ +export interface GetBucketEncryptionCommandOutput extends GetBucketEncryptionOutput, __MetadataBearer { +} +declare const GetBucketEncryptionCommand_base: { + new (input: GetBucketEncryptionCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketEncryptionCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns the default encryption configuration for an Amazon S3 bucket. By default, all buckets + * have a default encryption configuration that uses server-side encryption with Amazon S3 managed + * keys (SSE-S3).

+ * + * + * + *
+ *
Permissions
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - The + * s3:GetEncryptionConfiguration permission is required in a + * policy. The bucket owner has this permission by default. The bucket owner + * can grant this permission to others. For more information about permissions, + * see Permissions Related to Bucket Operations and Managing Access + * Permissions to Your Amazon S3 Resources.

    + *
  • + *
  • + *

    + * Directory bucket permissions - + * To grant access to this API operation, you must have the + * s3express:GetEncryptionConfiguration permission in + * an IAM identity-based policy instead of a bucket policy. Cross-account access to this API operation isn't supported. This operation can only be performed by the Amazon Web Services account that owns the resource. + * For more information about directory bucket policies and permissions, see Amazon Web Services Identity and Access Management (IAM) for S3 Express One Zone in the Amazon S3 User Guide.

    + *
  • + *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is s3express-control.region-code.amazonaws.com.

+ *
+ *
+ *

The following operations are related to GetBucketEncryption:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketEncryptionCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketEncryptionCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketEncryptionRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetBucketEncryptionCommand(input); + * const response = await client.send(command); + * // { // GetBucketEncryptionOutput + * // ServerSideEncryptionConfiguration: { // ServerSideEncryptionConfiguration + * // Rules: [ // ServerSideEncryptionRules // required + * // { // ServerSideEncryptionRule + * // ApplyServerSideEncryptionByDefault: { // ServerSideEncryptionByDefault + * // SSEAlgorithm: "AES256" || "aws:kms" || "aws:kms:dsse", // required + * // KMSMasterKeyID: "STRING_VALUE", + * // }, + * // BucketKeyEnabled: true || false, + * // }, + * // ], + * // }, + * // }; + * + * ``` + * + * @param GetBucketEncryptionCommandInput - {@link GetBucketEncryptionCommandInput} + * @returns {@link GetBucketEncryptionCommandOutput} + * @see {@link GetBucketEncryptionCommandInput} for command's `input` shape. + * @see {@link GetBucketEncryptionCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class GetBucketEncryptionCommand extends GetBucketEncryptionCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketEncryptionRequest; + output: GetBucketEncryptionOutput; + }; + sdk: { + input: GetBucketEncryptionCommandInput; + output: GetBucketEncryptionCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketIntelligentTieringConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketIntelligentTieringConfigurationCommand.d.ts new file mode 100644 index 00000000..a0234083 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketIntelligentTieringConfigurationCommand.d.ts @@ -0,0 +1,122 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketIntelligentTieringConfigurationOutput, GetBucketIntelligentTieringConfigurationRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketIntelligentTieringConfigurationCommand}. + */ +export interface GetBucketIntelligentTieringConfigurationCommandInput extends GetBucketIntelligentTieringConfigurationRequest { +} +/** + * @public + * + * The output of {@link GetBucketIntelligentTieringConfigurationCommand}. + */ +export interface GetBucketIntelligentTieringConfigurationCommandOutput extends GetBucketIntelligentTieringConfigurationOutput, __MetadataBearer { +} +declare const GetBucketIntelligentTieringConfigurationCommand_base: { + new (input: GetBucketIntelligentTieringConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketIntelligentTieringConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Gets the S3 Intelligent-Tiering configuration from the specified bucket.

+ *

The S3 Intelligent-Tiering storage class is designed to optimize storage costs by automatically moving data to the most cost-effective storage access tier, without performance impact or operational overhead. S3 Intelligent-Tiering delivers automatic cost savings in three low latency and high throughput access tiers. To get the lowest storage cost on data that can be accessed in minutes to hours, you can choose to activate additional archiving capabilities.

+ *

The S3 Intelligent-Tiering storage class is the ideal storage class for data with unknown, changing, or unpredictable access patterns, independent of object size or retention period. If the size of an object is less than 128 KB, it is not monitored and not eligible for auto-tiering. Smaller objects can be stored, but they are always charged at the Frequent Access tier rates in the S3 Intelligent-Tiering storage class.

+ *

For more information, see Storage class for automatically optimizing frequently and infrequently accessed objects.

+ *

Operations related to GetBucketIntelligentTieringConfiguration include:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketIntelligentTieringConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketIntelligentTieringConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketIntelligentTieringConfigurationRequest + * Bucket: "STRING_VALUE", // required + * Id: "STRING_VALUE", // required + * }; + * const command = new GetBucketIntelligentTieringConfigurationCommand(input); + * const response = await client.send(command); + * // { // GetBucketIntelligentTieringConfigurationOutput + * // IntelligentTieringConfiguration: { // IntelligentTieringConfiguration + * // Id: "STRING_VALUE", // required + * // Filter: { // IntelligentTieringFilter + * // Prefix: "STRING_VALUE", + * // Tag: { // Tag + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // And: { // IntelligentTieringAndOperator + * // Prefix: "STRING_VALUE", + * // Tags: [ // TagSet + * // { + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // ], + * // }, + * // }, + * // Status: "Enabled" || "Disabled", // required + * // Tierings: [ // TieringList // required + * // { // Tiering + * // Days: Number("int"), // required + * // AccessTier: "ARCHIVE_ACCESS" || "DEEP_ARCHIVE_ACCESS", // required + * // }, + * // ], + * // }, + * // }; + * + * ``` + * + * @param GetBucketIntelligentTieringConfigurationCommandInput - {@link GetBucketIntelligentTieringConfigurationCommandInput} + * @returns {@link GetBucketIntelligentTieringConfigurationCommandOutput} + * @see {@link GetBucketIntelligentTieringConfigurationCommandInput} for command's `input` shape. + * @see {@link GetBucketIntelligentTieringConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class GetBucketIntelligentTieringConfigurationCommand extends GetBucketIntelligentTieringConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketIntelligentTieringConfigurationRequest; + output: GetBucketIntelligentTieringConfigurationOutput; + }; + sdk: { + input: GetBucketIntelligentTieringConfigurationCommandInput; + output: GetBucketIntelligentTieringConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketInventoryConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketInventoryConfigurationCommand.d.ts new file mode 100644 index 00000000..ba2aac0f --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketInventoryConfigurationCommand.d.ts @@ -0,0 +1,130 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketInventoryConfigurationOutput, GetBucketInventoryConfigurationRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketInventoryConfigurationCommand}. + */ +export interface GetBucketInventoryConfigurationCommandInput extends GetBucketInventoryConfigurationRequest { +} +/** + * @public + * + * The output of {@link GetBucketInventoryConfigurationCommand}. + */ +export interface GetBucketInventoryConfigurationCommandOutput extends GetBucketInventoryConfigurationOutput, __MetadataBearer { +} +declare const GetBucketInventoryConfigurationCommand_base: { + new (input: GetBucketInventoryConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketInventoryConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Returns an inventory configuration (identified by the inventory configuration ID) from + * the bucket.

+ *

To use this operation, you must have permissions to perform the + * s3:GetInventoryConfiguration action. The bucket owner has this permission + * by default and can grant this permission to others. For more information about permissions, + * see Permissions Related to Bucket Subresource Operations and Managing + * Access Permissions to Your Amazon S3 Resources.

+ *

For information about the Amazon S3 inventory feature, see Amazon S3 Inventory.

+ *

The following operations are related to + * GetBucketInventoryConfiguration:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketInventoryConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketInventoryConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketInventoryConfigurationRequest + * Bucket: "STRING_VALUE", // required + * Id: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetBucketInventoryConfigurationCommand(input); + * const response = await client.send(command); + * // { // GetBucketInventoryConfigurationOutput + * // InventoryConfiguration: { // InventoryConfiguration + * // Destination: { // InventoryDestination + * // S3BucketDestination: { // InventoryS3BucketDestination + * // AccountId: "STRING_VALUE", + * // Bucket: "STRING_VALUE", // required + * // Format: "CSV" || "ORC" || "Parquet", // required + * // Prefix: "STRING_VALUE", + * // Encryption: { // InventoryEncryption + * // SSES3: {}, + * // SSEKMS: { // SSEKMS + * // KeyId: "STRING_VALUE", // required + * // }, + * // }, + * // }, + * // }, + * // IsEnabled: true || false, // required + * // Filter: { // InventoryFilter + * // Prefix: "STRING_VALUE", // required + * // }, + * // Id: "STRING_VALUE", // required + * // IncludedObjectVersions: "All" || "Current", // required + * // OptionalFields: [ // InventoryOptionalFields + * // "Size" || "LastModifiedDate" || "StorageClass" || "ETag" || "IsMultipartUploaded" || "ReplicationStatus" || "EncryptionStatus" || "ObjectLockRetainUntilDate" || "ObjectLockMode" || "ObjectLockLegalHoldStatus" || "IntelligentTieringAccessTier" || "BucketKeyStatus" || "ChecksumAlgorithm" || "ObjectAccessControlList" || "ObjectOwner", + * // ], + * // Schedule: { // InventorySchedule + * // Frequency: "Daily" || "Weekly", // required + * // }, + * // }, + * // }; + * + * ``` + * + * @param GetBucketInventoryConfigurationCommandInput - {@link GetBucketInventoryConfigurationCommandInput} + * @returns {@link GetBucketInventoryConfigurationCommandOutput} + * @see {@link GetBucketInventoryConfigurationCommandInput} for command's `input` shape. + * @see {@link GetBucketInventoryConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class GetBucketInventoryConfigurationCommand extends GetBucketInventoryConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketInventoryConfigurationRequest; + output: GetBucketInventoryConfigurationOutput; + }; + sdk: { + input: GetBucketInventoryConfigurationCommandInput; + output: GetBucketInventoryConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketLifecycleConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketLifecycleConfigurationCommand.d.ts new file mode 100644 index 00000000..58c3c5fc --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketLifecycleConfigurationCommand.d.ts @@ -0,0 +1,254 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketLifecycleConfigurationOutput, GetBucketLifecycleConfigurationRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketLifecycleConfigurationCommand}. + */ +export interface GetBucketLifecycleConfigurationCommandInput extends GetBucketLifecycleConfigurationRequest { +} +/** + * @public + * + * The output of {@link GetBucketLifecycleConfigurationCommand}. + */ +export interface GetBucketLifecycleConfigurationCommandOutput extends GetBucketLifecycleConfigurationOutput, __MetadataBearer { +} +declare const GetBucketLifecycleConfigurationCommand_base: { + new (input: GetBucketLifecycleConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketLifecycleConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns the lifecycle configuration information set on the bucket. For information about + * lifecycle configuration, see Object Lifecycle + * Management.

+ *

Bucket lifecycle configuration now supports specifying a lifecycle rule using an object + * key name prefix, one or more object tags, object size, or any combination of these. + * Accordingly, this section describes the latest API, which is compatible with the new + * functionality. The previous version of the API supported filtering based only on an object + * key name prefix, which is supported for general purpose buckets for backward compatibility. + * For the related API description, see GetBucketLifecycle.

+ * + *

Lifecyle configurations for directory buckets only support expiring objects and + * cancelling multipart uploads. Expiring of versioned objects, transitions and tag filters + * are not supported.

+ *
+ *
+ *
Permissions
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - By + * default, all Amazon S3 resources are private, including buckets, objects, and + * related subresources (for example, lifecycle configuration and website + * configuration). Only the resource owner (that is, the Amazon Web Services account that + * created it) can access the resource. The resource owner can optionally grant + * access permissions to others by writing an access policy. For this + * operation, a user must have the s3:GetLifecycleConfiguration + * permission.

    + *

    For more information about permissions, see Managing Access + * Permissions to Your Amazon S3 Resources.

    + *
  • + *
+ *
    + *
  • + *

    + * Directory bucket permissions - + * You must have the s3express:GetLifecycleConfiguration + * permission in an IAM identity-based policy to use this operation. + * Cross-account access to this API operation isn't supported. The resource + * owner can optionally grant access permissions to others by creating a role + * or user for them as long as they are within the same account as the owner + * and resource.

    + *

    For more information about directory bucket policies and permissions, see + * Authorizing Regional endpoint APIs with IAM in the + * Amazon S3 User Guide.

    + * + *

    + * Directory buckets - For directory buckets, you must make requests for this API operation to the Regional endpoint. These endpoints support path-style requests in the format https://s3express-control.region-code.amazonaws.com/bucket-name + * . Virtual-hosted-style requests aren't supported. + * For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

    + *
    + *
  • + *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host + * header syntax is + * s3express-control.region.amazonaws.com.

+ *
+ *
+ *

+ * GetBucketLifecycleConfiguration has the following special error:

+ *
    + *
  • + *

    Error code: NoSuchLifecycleConfiguration + *

    + *
      + *
    • + *

      Description: The lifecycle configuration does not exist.

      + *
    • + *
    • + *

      HTTP Status Code: 404 Not Found

      + *
    • + *
    • + *

      SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
+ *

The following operations are related to + * GetBucketLifecycleConfiguration:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketLifecycleConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketLifecycleConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketLifecycleConfigurationRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetBucketLifecycleConfigurationCommand(input); + * const response = await client.send(command); + * // { // GetBucketLifecycleConfigurationOutput + * // Rules: [ // LifecycleRules + * // { // LifecycleRule + * // Expiration: { // LifecycleExpiration + * // Date: new Date("TIMESTAMP"), + * // Days: Number("int"), + * // ExpiredObjectDeleteMarker: true || false, + * // }, + * // ID: "STRING_VALUE", + * // Prefix: "STRING_VALUE", + * // Filter: { // LifecycleRuleFilter + * // Prefix: "STRING_VALUE", + * // Tag: { // Tag + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // ObjectSizeGreaterThan: Number("long"), + * // ObjectSizeLessThan: Number("long"), + * // And: { // LifecycleRuleAndOperator + * // Prefix: "STRING_VALUE", + * // Tags: [ // TagSet + * // { + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // ], + * // ObjectSizeGreaterThan: Number("long"), + * // ObjectSizeLessThan: Number("long"), + * // }, + * // }, + * // Status: "Enabled" || "Disabled", // required + * // Transitions: [ // TransitionList + * // { // Transition + * // Date: new Date("TIMESTAMP"), + * // Days: Number("int"), + * // StorageClass: "GLACIER" || "STANDARD_IA" || "ONEZONE_IA" || "INTELLIGENT_TIERING" || "DEEP_ARCHIVE" || "GLACIER_IR", + * // }, + * // ], + * // NoncurrentVersionTransitions: [ // NoncurrentVersionTransitionList + * // { // NoncurrentVersionTransition + * // NoncurrentDays: Number("int"), + * // StorageClass: "GLACIER" || "STANDARD_IA" || "ONEZONE_IA" || "INTELLIGENT_TIERING" || "DEEP_ARCHIVE" || "GLACIER_IR", + * // NewerNoncurrentVersions: Number("int"), + * // }, + * // ], + * // NoncurrentVersionExpiration: { // NoncurrentVersionExpiration + * // NoncurrentDays: Number("int"), + * // NewerNoncurrentVersions: Number("int"), + * // }, + * // AbortIncompleteMultipartUpload: { // AbortIncompleteMultipartUpload + * // DaysAfterInitiation: Number("int"), + * // }, + * // }, + * // ], + * // TransitionDefaultMinimumObjectSize: "varies_by_storage_class" || "all_storage_classes_128K", + * // }; + * + * ``` + * + * @param GetBucketLifecycleConfigurationCommandInput - {@link GetBucketLifecycleConfigurationCommandInput} + * @returns {@link GetBucketLifecycleConfigurationCommandOutput} + * @see {@link GetBucketLifecycleConfigurationCommandInput} for command's `input` shape. + * @see {@link GetBucketLifecycleConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To get lifecycle configuration on a bucket + * ```javascript + * // The following example retrieves lifecycle configuration on set on a bucket. + * const input = { + * Bucket: "examplebucket" + * }; + * const command = new GetBucketLifecycleConfigurationCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Rules: [ + * { + * ID: "Rule for TaxDocs/", + * Prefix: "TaxDocs", + * Status: "Enabled", + * Transitions: [ + * { + * Days: 365, + * StorageClass: "STANDARD_IA" + * } + * ] + * } + * ] + * } + * *\/ + * ``` + * + * @public + */ +export declare class GetBucketLifecycleConfigurationCommand extends GetBucketLifecycleConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketLifecycleConfigurationRequest; + output: GetBucketLifecycleConfigurationOutput; + }; + sdk: { + input: GetBucketLifecycleConfigurationCommandInput; + output: GetBucketLifecycleConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketLocationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketLocationCommand.d.ts new file mode 100644 index 00000000..564bd490 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketLocationCommand.d.ts @@ -0,0 +1,116 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketLocationOutput, GetBucketLocationRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketLocationCommand}. + */ +export interface GetBucketLocationCommandInput extends GetBucketLocationRequest { +} +/** + * @public + * + * The output of {@link GetBucketLocationCommand}. + */ +export interface GetBucketLocationCommandOutput extends GetBucketLocationOutput, __MetadataBearer { +} +declare const GetBucketLocationCommand_base: { + new (input: GetBucketLocationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketLocationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Returns the Region the bucket resides in. You set the bucket's Region using the + * LocationConstraint request parameter in a CreateBucket + * request. For more information, see CreateBucket.

+ *

When you use this API operation with an access point, provide the alias of the access point in place of the bucket name.

+ *

When you use this API operation with an Object Lambda access point, provide the alias of the Object Lambda access point in place of the bucket name. + * If the Object Lambda access point alias in a request is not valid, the error code InvalidAccessPointAliasError is returned. + * For more information about InvalidAccessPointAliasError, see List of + * Error Codes.

+ * + *

We recommend that you use HeadBucket to return the Region + * that a bucket resides in. For backward compatibility, Amazon S3 continues to support + * GetBucketLocation.

+ *
+ *

The following operations are related to GetBucketLocation:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketLocationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketLocationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketLocationRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetBucketLocationCommand(input); + * const response = await client.send(command); + * // { // GetBucketLocationOutput + * // LocationConstraint: "af-south-1" || "ap-east-1" || "ap-northeast-1" || "ap-northeast-2" || "ap-northeast-3" || "ap-south-1" || "ap-south-2" || "ap-southeast-1" || "ap-southeast-2" || "ap-southeast-3" || "ap-southeast-4" || "ap-southeast-5" || "ca-central-1" || "cn-north-1" || "cn-northwest-1" || "EU" || "eu-central-1" || "eu-central-2" || "eu-north-1" || "eu-south-1" || "eu-south-2" || "eu-west-1" || "eu-west-2" || "eu-west-3" || "il-central-1" || "me-central-1" || "me-south-1" || "sa-east-1" || "us-east-2" || "us-gov-east-1" || "us-gov-west-1" || "us-west-1" || "us-west-2", + * // }; + * + * ``` + * + * @param GetBucketLocationCommandInput - {@link GetBucketLocationCommandInput} + * @returns {@link GetBucketLocationCommandOutput} + * @see {@link GetBucketLocationCommandInput} for command's `input` shape. + * @see {@link GetBucketLocationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To get bucket location + * ```javascript + * // The following example returns bucket location. + * const input = { + * Bucket: "examplebucket" + * }; + * const command = new GetBucketLocationCommand(input); + * const response = await client.send(command); + * /* response is + * { + * LocationConstraint: "us-west-2" + * } + * *\/ + * ``` + * + * @public + */ +export declare class GetBucketLocationCommand extends GetBucketLocationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketLocationRequest; + output: GetBucketLocationOutput; + }; + sdk: { + input: GetBucketLocationCommandInput; + output: GetBucketLocationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketLoggingCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketLoggingCommand.d.ts new file mode 100644 index 00000000..ab3ccbfd --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketLoggingCommand.d.ts @@ -0,0 +1,111 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketLoggingOutput, GetBucketLoggingRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketLoggingCommand}. + */ +export interface GetBucketLoggingCommandInput extends GetBucketLoggingRequest { +} +/** + * @public + * + * The output of {@link GetBucketLoggingCommand}. + */ +export interface GetBucketLoggingCommandOutput extends GetBucketLoggingOutput, __MetadataBearer { +} +declare const GetBucketLoggingCommand_base: { + new (input: GetBucketLoggingCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketLoggingCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Returns the logging status of a bucket and the permissions users have to view and modify + * that status.

+ *

The following operations are related to GetBucketLogging:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketLoggingCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketLoggingCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketLoggingRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetBucketLoggingCommand(input); + * const response = await client.send(command); + * // { // GetBucketLoggingOutput + * // LoggingEnabled: { // LoggingEnabled + * // TargetBucket: "STRING_VALUE", // required + * // TargetGrants: [ // TargetGrants + * // { // TargetGrant + * // Grantee: { // Grantee + * // DisplayName: "STRING_VALUE", + * // EmailAddress: "STRING_VALUE", + * // ID: "STRING_VALUE", + * // URI: "STRING_VALUE", + * // Type: "CanonicalUser" || "AmazonCustomerByEmail" || "Group", // required + * // }, + * // Permission: "FULL_CONTROL" || "READ" || "WRITE", + * // }, + * // ], + * // TargetPrefix: "STRING_VALUE", // required + * // TargetObjectKeyFormat: { // TargetObjectKeyFormat + * // SimplePrefix: {}, + * // PartitionedPrefix: { // PartitionedPrefix + * // PartitionDateSource: "EventTime" || "DeliveryTime", + * // }, + * // }, + * // }, + * // }; + * + * ``` + * + * @param GetBucketLoggingCommandInput - {@link GetBucketLoggingCommandInput} + * @returns {@link GetBucketLoggingCommandOutput} + * @see {@link GetBucketLoggingCommandInput} for command's `input` shape. + * @see {@link GetBucketLoggingCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class GetBucketLoggingCommand extends GetBucketLoggingCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketLoggingRequest; + output: GetBucketLoggingOutput; + }; + sdk: { + input: GetBucketLoggingCommandInput; + output: GetBucketLoggingCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketMetadataTableConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketMetadataTableConfigurationCommand.d.ts new file mode 100644 index 00000000..f8339cb9 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketMetadataTableConfigurationCommand.d.ts @@ -0,0 +1,112 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketMetadataTableConfigurationOutput, GetBucketMetadataTableConfigurationRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketMetadataTableConfigurationCommand}. + */ +export interface GetBucketMetadataTableConfigurationCommandInput extends GetBucketMetadataTableConfigurationRequest { +} +/** + * @public + * + * The output of {@link GetBucketMetadataTableConfigurationCommand}. + */ +export interface GetBucketMetadataTableConfigurationCommandOutput extends GetBucketMetadataTableConfigurationOutput, __MetadataBearer { +} +declare const GetBucketMetadataTableConfigurationCommand_base: { + new (input: GetBucketMetadataTableConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketMetadataTableConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

+ * Retrieves the metadata table configuration for a general purpose bucket. For more + * information, see Accelerating data + * discovery with S3 Metadata in the Amazon S3 User Guide.

+ *
+ *
Permissions
+ *
+ *

To use this operation, you must have the s3:GetBucketMetadataTableConfiguration permission. For more + * information, see Setting up + * permissions for configuring metadata tables in the + * Amazon S3 User Guide.

+ *
+ *
+ *

The following operations are related to GetBucketMetadataTableConfiguration:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketMetadataTableConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketMetadataTableConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketMetadataTableConfigurationRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetBucketMetadataTableConfigurationCommand(input); + * const response = await client.send(command); + * // { // GetBucketMetadataTableConfigurationOutput + * // GetBucketMetadataTableConfigurationResult: { // GetBucketMetadataTableConfigurationResult + * // MetadataTableConfigurationResult: { // MetadataTableConfigurationResult + * // S3TablesDestinationResult: { // S3TablesDestinationResult + * // TableBucketArn: "STRING_VALUE", // required + * // TableName: "STRING_VALUE", // required + * // TableArn: "STRING_VALUE", // required + * // TableNamespace: "STRING_VALUE", // required + * // }, + * // }, + * // Status: "STRING_VALUE", // required + * // Error: { // ErrorDetails + * // ErrorCode: "STRING_VALUE", + * // ErrorMessage: "STRING_VALUE", + * // }, + * // }, + * // }; + * + * ``` + * + * @param GetBucketMetadataTableConfigurationCommandInput - {@link GetBucketMetadataTableConfigurationCommandInput} + * @returns {@link GetBucketMetadataTableConfigurationCommandOutput} + * @see {@link GetBucketMetadataTableConfigurationCommandInput} for command's `input` shape. + * @see {@link GetBucketMetadataTableConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class GetBucketMetadataTableConfigurationCommand extends GetBucketMetadataTableConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketMetadataTableConfigurationRequest; + output: GetBucketMetadataTableConfigurationOutput; + }; + sdk: { + input: GetBucketMetadataTableConfigurationCommandInput; + output: GetBucketMetadataTableConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketMetricsConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketMetricsConfigurationCommand.d.ts new file mode 100644 index 00000000..80437b38 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketMetricsConfigurationCommand.d.ts @@ -0,0 +1,129 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketMetricsConfigurationOutput, GetBucketMetricsConfigurationRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketMetricsConfigurationCommand}. + */ +export interface GetBucketMetricsConfigurationCommandInput extends GetBucketMetricsConfigurationRequest { +} +/** + * @public + * + * The output of {@link GetBucketMetricsConfigurationCommand}. + */ +export interface GetBucketMetricsConfigurationCommandOutput extends GetBucketMetricsConfigurationOutput, __MetadataBearer { +} +declare const GetBucketMetricsConfigurationCommand_base: { + new (input: GetBucketMetricsConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketMetricsConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Gets a metrics configuration (specified by the metrics configuration ID) from the + * bucket. Note that this doesn't include the daily storage metrics.

+ *

To use this operation, you must have permissions to perform the + * s3:GetMetricsConfiguration action. The bucket owner has this permission by + * default. The bucket owner can grant this permission to others. For more information about + * permissions, see Permissions Related to Bucket Subresource Operations and Managing + * Access Permissions to Your Amazon S3 Resources.

+ *

For information about CloudWatch request metrics for Amazon S3, see Monitoring + * Metrics with Amazon CloudWatch.

+ *

The following operations are related to + * GetBucketMetricsConfiguration:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketMetricsConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketMetricsConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketMetricsConfigurationRequest + * Bucket: "STRING_VALUE", // required + * Id: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetBucketMetricsConfigurationCommand(input); + * const response = await client.send(command); + * // { // GetBucketMetricsConfigurationOutput + * // MetricsConfiguration: { // MetricsConfiguration + * // Id: "STRING_VALUE", // required + * // Filter: { // MetricsFilter Union: only one key present + * // Prefix: "STRING_VALUE", + * // Tag: { // Tag + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // AccessPointArn: "STRING_VALUE", + * // And: { // MetricsAndOperator + * // Prefix: "STRING_VALUE", + * // Tags: [ // TagSet + * // { + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // ], + * // AccessPointArn: "STRING_VALUE", + * // }, + * // }, + * // }, + * // }; + * + * ``` + * + * @param GetBucketMetricsConfigurationCommandInput - {@link GetBucketMetricsConfigurationCommandInput} + * @returns {@link GetBucketMetricsConfigurationCommandOutput} + * @see {@link GetBucketMetricsConfigurationCommandInput} for command's `input` shape. + * @see {@link GetBucketMetricsConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class GetBucketMetricsConfigurationCommand extends GetBucketMetricsConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketMetricsConfigurationRequest; + output: GetBucketMetricsConfigurationOutput; + }; + sdk: { + input: GetBucketMetricsConfigurationCommandInput; + output: GetBucketMetricsConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketNotificationConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketNotificationConfigurationCommand.d.ts new file mode 100644 index 00000000..8b29de71 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketNotificationConfigurationCommand.d.ts @@ -0,0 +1,155 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketNotificationConfigurationRequest, NotificationConfiguration } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketNotificationConfigurationCommand}. + */ +export interface GetBucketNotificationConfigurationCommandInput extends GetBucketNotificationConfigurationRequest { +} +/** + * @public + * + * The output of {@link GetBucketNotificationConfigurationCommand}. + */ +export interface GetBucketNotificationConfigurationCommandOutput extends NotificationConfiguration, __MetadataBearer { +} +declare const GetBucketNotificationConfigurationCommand_base: { + new (input: GetBucketNotificationConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketNotificationConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Returns the notification configuration of a bucket.

+ *

If notifications are not enabled on the bucket, the action returns an empty + * NotificationConfiguration element.

+ *

By default, you must be the bucket owner to read the notification configuration of a + * bucket. However, the bucket owner can use a bucket policy to grant permission to other + * users to read this configuration with the s3:GetBucketNotification + * permission.

+ *

When you use this API operation with an access point, provide the alias of the access point in place of the bucket name.

+ *

When you use this API operation with an Object Lambda access point, provide the alias of the Object Lambda access point in place of the bucket name. + * If the Object Lambda access point alias in a request is not valid, the error code InvalidAccessPointAliasError is returned. + * For more information about InvalidAccessPointAliasError, see List of + * Error Codes.

+ *

For more information about setting and reading the notification configuration on a + * bucket, see Setting Up Notification of Bucket Events. For more information about bucket + * policies, see Using Bucket Policies.

+ *

The following action is related to GetBucketNotification:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketNotificationConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketNotificationConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketNotificationConfigurationRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetBucketNotificationConfigurationCommand(input); + * const response = await client.send(command); + * // { // NotificationConfiguration + * // TopicConfigurations: [ // TopicConfigurationList + * // { // TopicConfiguration + * // Id: "STRING_VALUE", + * // TopicArn: "STRING_VALUE", // required + * // Events: [ // EventList // required + * // "s3:ReducedRedundancyLostObject" || "s3:ObjectCreated:*" || "s3:ObjectCreated:Put" || "s3:ObjectCreated:Post" || "s3:ObjectCreated:Copy" || "s3:ObjectCreated:CompleteMultipartUpload" || "s3:ObjectRemoved:*" || "s3:ObjectRemoved:Delete" || "s3:ObjectRemoved:DeleteMarkerCreated" || "s3:ObjectRestore:*" || "s3:ObjectRestore:Post" || "s3:ObjectRestore:Completed" || "s3:Replication:*" || "s3:Replication:OperationFailedReplication" || "s3:Replication:OperationNotTracked" || "s3:Replication:OperationMissedThreshold" || "s3:Replication:OperationReplicatedAfterThreshold" || "s3:ObjectRestore:Delete" || "s3:LifecycleTransition" || "s3:IntelligentTiering" || "s3:ObjectAcl:Put" || "s3:LifecycleExpiration:*" || "s3:LifecycleExpiration:Delete" || "s3:LifecycleExpiration:DeleteMarkerCreated" || "s3:ObjectTagging:*" || "s3:ObjectTagging:Put" || "s3:ObjectTagging:Delete", + * // ], + * // Filter: { // NotificationConfigurationFilter + * // Key: { // S3KeyFilter + * // FilterRules: [ // FilterRuleList + * // { // FilterRule + * // Name: "prefix" || "suffix", + * // Value: "STRING_VALUE", + * // }, + * // ], + * // }, + * // }, + * // }, + * // ], + * // QueueConfigurations: [ // QueueConfigurationList + * // { // QueueConfiguration + * // Id: "STRING_VALUE", + * // QueueArn: "STRING_VALUE", // required + * // Events: [ // required + * // "s3:ReducedRedundancyLostObject" || "s3:ObjectCreated:*" || "s3:ObjectCreated:Put" || "s3:ObjectCreated:Post" || "s3:ObjectCreated:Copy" || "s3:ObjectCreated:CompleteMultipartUpload" || "s3:ObjectRemoved:*" || "s3:ObjectRemoved:Delete" || "s3:ObjectRemoved:DeleteMarkerCreated" || "s3:ObjectRestore:*" || "s3:ObjectRestore:Post" || "s3:ObjectRestore:Completed" || "s3:Replication:*" || "s3:Replication:OperationFailedReplication" || "s3:Replication:OperationNotTracked" || "s3:Replication:OperationMissedThreshold" || "s3:Replication:OperationReplicatedAfterThreshold" || "s3:ObjectRestore:Delete" || "s3:LifecycleTransition" || "s3:IntelligentTiering" || "s3:ObjectAcl:Put" || "s3:LifecycleExpiration:*" || "s3:LifecycleExpiration:Delete" || "s3:LifecycleExpiration:DeleteMarkerCreated" || "s3:ObjectTagging:*" || "s3:ObjectTagging:Put" || "s3:ObjectTagging:Delete", + * // ], + * // Filter: { + * // Key: { + * // FilterRules: [ + * // { + * // Name: "prefix" || "suffix", + * // Value: "STRING_VALUE", + * // }, + * // ], + * // }, + * // }, + * // }, + * // ], + * // LambdaFunctionConfigurations: [ // LambdaFunctionConfigurationList + * // { // LambdaFunctionConfiguration + * // Id: "STRING_VALUE", + * // LambdaFunctionArn: "STRING_VALUE", // required + * // Events: [ // required + * // "s3:ReducedRedundancyLostObject" || "s3:ObjectCreated:*" || "s3:ObjectCreated:Put" || "s3:ObjectCreated:Post" || "s3:ObjectCreated:Copy" || "s3:ObjectCreated:CompleteMultipartUpload" || "s3:ObjectRemoved:*" || "s3:ObjectRemoved:Delete" || "s3:ObjectRemoved:DeleteMarkerCreated" || "s3:ObjectRestore:*" || "s3:ObjectRestore:Post" || "s3:ObjectRestore:Completed" || "s3:Replication:*" || "s3:Replication:OperationFailedReplication" || "s3:Replication:OperationNotTracked" || "s3:Replication:OperationMissedThreshold" || "s3:Replication:OperationReplicatedAfterThreshold" || "s3:ObjectRestore:Delete" || "s3:LifecycleTransition" || "s3:IntelligentTiering" || "s3:ObjectAcl:Put" || "s3:LifecycleExpiration:*" || "s3:LifecycleExpiration:Delete" || "s3:LifecycleExpiration:DeleteMarkerCreated" || "s3:ObjectTagging:*" || "s3:ObjectTagging:Put" || "s3:ObjectTagging:Delete", + * // ], + * // Filter: { + * // Key: { + * // FilterRules: [ + * // { + * // Name: "prefix" || "suffix", + * // Value: "STRING_VALUE", + * // }, + * // ], + * // }, + * // }, + * // }, + * // ], + * // EventBridgeConfiguration: {}, + * // }; + * + * ``` + * + * @param GetBucketNotificationConfigurationCommandInput - {@link GetBucketNotificationConfigurationCommandInput} + * @returns {@link GetBucketNotificationConfigurationCommandOutput} + * @see {@link GetBucketNotificationConfigurationCommandInput} for command's `input` shape. + * @see {@link GetBucketNotificationConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class GetBucketNotificationConfigurationCommand extends GetBucketNotificationConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketNotificationConfigurationRequest; + output: NotificationConfiguration; + }; + sdk: { + input: GetBucketNotificationConfigurationCommandInput; + output: GetBucketNotificationConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketOwnershipControlsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketOwnershipControlsCommand.d.ts new file mode 100644 index 00000000..52000d65 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketOwnershipControlsCommand.d.ts @@ -0,0 +1,100 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketOwnershipControlsOutput, GetBucketOwnershipControlsRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketOwnershipControlsCommand}. + */ +export interface GetBucketOwnershipControlsCommandInput extends GetBucketOwnershipControlsRequest { +} +/** + * @public + * + * The output of {@link GetBucketOwnershipControlsCommand}. + */ +export interface GetBucketOwnershipControlsCommandOutput extends GetBucketOwnershipControlsOutput, __MetadataBearer { +} +declare const GetBucketOwnershipControlsCommand_base: { + new (input: GetBucketOwnershipControlsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketOwnershipControlsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Retrieves OwnershipControls for an Amazon S3 bucket. To use this operation, you + * must have the s3:GetBucketOwnershipControls permission. For more information + * about Amazon S3 permissions, see Specifying permissions in a + * policy.

+ *

For information about Amazon S3 Object Ownership, see Using Object + * Ownership.

+ *

The following operations are related to GetBucketOwnershipControls:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketOwnershipControlsCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketOwnershipControlsCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketOwnershipControlsRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetBucketOwnershipControlsCommand(input); + * const response = await client.send(command); + * // { // GetBucketOwnershipControlsOutput + * // OwnershipControls: { // OwnershipControls + * // Rules: [ // OwnershipControlsRules // required + * // { // OwnershipControlsRule + * // ObjectOwnership: "BucketOwnerPreferred" || "ObjectWriter" || "BucketOwnerEnforced", // required + * // }, + * // ], + * // }, + * // }; + * + * ``` + * + * @param GetBucketOwnershipControlsCommandInput - {@link GetBucketOwnershipControlsCommandInput} + * @returns {@link GetBucketOwnershipControlsCommandOutput} + * @see {@link GetBucketOwnershipControlsCommandInput} for command's `input` shape. + * @see {@link GetBucketOwnershipControlsCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class GetBucketOwnershipControlsCommand extends GetBucketOwnershipControlsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketOwnershipControlsRequest; + output: GetBucketOwnershipControlsOutput; + }; + sdk: { + input: GetBucketOwnershipControlsCommandInput; + output: GetBucketOwnershipControlsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketPolicyCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketPolicyCommand.d.ts new file mode 100644 index 00000000..7f0b4448 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketPolicyCommand.d.ts @@ -0,0 +1,159 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketPolicyOutput, GetBucketPolicyRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketPolicyCommand}. + */ +export interface GetBucketPolicyCommandInput extends GetBucketPolicyRequest { +} +/** + * @public + * + * The output of {@link GetBucketPolicyCommand}. + */ +export interface GetBucketPolicyCommandOutput extends GetBucketPolicyOutput, __MetadataBearer { +} +declare const GetBucketPolicyCommand_base: { + new (input: GetBucketPolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketPolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns the policy of a specified bucket.

+ * + *

+ * Directory buckets - For directory buckets, you must make requests for this API operation to the Regional endpoint. These endpoints support path-style requests in the format https://s3express-control.region-code.amazonaws.com/bucket-name + * . Virtual-hosted-style requests aren't supported. + * For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

+ *
+ *
+ *
Permissions
+ *
+ *

If you are using an identity other than the root user of the Amazon Web Services account that + * owns the bucket, the calling identity must both have the + * GetBucketPolicy permissions on the specified bucket and belong to + * the bucket owner's account in order to use this operation.

+ *

If you don't have GetBucketPolicy permissions, Amazon S3 returns a + * 403 Access Denied error. If you have the correct permissions, but + * you're not using an identity that belongs to the bucket owner's account, Amazon S3 + * returns a 405 Method Not Allowed error.

+ * + *

To ensure that bucket owners don't inadvertently lock themselves out of + * their own buckets, the root principal in a bucket owner's Amazon Web Services account can + * perform the GetBucketPolicy, PutBucketPolicy, and + * DeleteBucketPolicy API actions, even if their bucket policy + * explicitly denies the root principal's access. Bucket owner root principals can + * only be blocked from performing these API actions by VPC endpoint policies and + * Amazon Web Services Organizations policies.

+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - The + * s3:GetBucketPolicy permission is required in a policy. For + * more information about general purpose buckets bucket policies, see Using Bucket Policies and User Policies in the + * Amazon S3 User Guide.

    + *
  • + *
  • + *

    + * Directory bucket permissions - + * To grant access to this API operation, you must have the + * s3express:GetBucketPolicy permission in + * an IAM identity-based policy instead of a bucket policy. Cross-account access to this API operation isn't supported. This operation can only be performed by the Amazon Web Services account that owns the resource. + * For more information about directory bucket policies and permissions, see Amazon Web Services Identity and Access Management (IAM) for S3 Express One Zone in the Amazon S3 User Guide.

    + *
  • + *
+ *
+ *
Example bucket policies
+ *
+ *

+ * General purpose buckets example bucket policies + * - See Bucket policy + * examples in the Amazon S3 User Guide.

+ *

+ * Directory bucket example bucket policies + * - See Example bucket policies for S3 Express One Zone in the + * Amazon S3 User Guide.

+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is s3express-control.region-code.amazonaws.com.

+ *
+ *
+ *

The following action is related to GetBucketPolicy:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketPolicyCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketPolicyCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketPolicyRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetBucketPolicyCommand(input); + * const response = await client.send(command); + * // { // GetBucketPolicyOutput + * // Policy: "STRING_VALUE", + * // }; + * + * ``` + * + * @param GetBucketPolicyCommandInput - {@link GetBucketPolicyCommandInput} + * @returns {@link GetBucketPolicyCommandOutput} + * @see {@link GetBucketPolicyCommandInput} for command's `input` shape. + * @see {@link GetBucketPolicyCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To get bucket policy + * ```javascript + * // The following example returns bucket policy associated with a bucket. + * const input = { + * Bucket: "examplebucket" + * }; + * const command = new GetBucketPolicyCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Policy: `{"Version":"2008-10-17","Id":"LogPolicy","Statement":[{"Sid":"Enables the log delivery group to publish logs to your bucket ","Effect":"Allow","Principal":{"AWS":"111122223333"},"Action":["s3:GetBucketAcl","s3:GetObjectAcl","s3:PutObject"],"Resource":["arn:aws:s3:::policytest1/*","arn:aws:s3:::policytest1"]}]}` + * } + * *\/ + * ``` + * + * @public + */ +export declare class GetBucketPolicyCommand extends GetBucketPolicyCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketPolicyRequest; + output: GetBucketPolicyOutput; + }; + sdk: { + input: GetBucketPolicyCommandInput; + output: GetBucketPolicyCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketPolicyStatusCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketPolicyStatusCommand.d.ts new file mode 100644 index 00000000..3bc5ec9e --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketPolicyStatusCommand.d.ts @@ -0,0 +1,106 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketPolicyStatusOutput, GetBucketPolicyStatusRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketPolicyStatusCommand}. + */ +export interface GetBucketPolicyStatusCommandInput extends GetBucketPolicyStatusRequest { +} +/** + * @public + * + * The output of {@link GetBucketPolicyStatusCommand}. + */ +export interface GetBucketPolicyStatusCommandOutput extends GetBucketPolicyStatusOutput, __MetadataBearer { +} +declare const GetBucketPolicyStatusCommand_base: { + new (input: GetBucketPolicyStatusCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketPolicyStatusCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Retrieves the policy status for an Amazon S3 bucket, indicating whether the bucket is public. + * In order to use this operation, you must have the s3:GetBucketPolicyStatus + * permission. For more information about Amazon S3 permissions, see Specifying Permissions in a + * Policy.

+ *

For more information about when Amazon S3 considers a bucket public, see The Meaning of "Public".

+ *

The following operations are related to GetBucketPolicyStatus:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketPolicyStatusCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketPolicyStatusCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketPolicyStatusRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetBucketPolicyStatusCommand(input); + * const response = await client.send(command); + * // { // GetBucketPolicyStatusOutput + * // PolicyStatus: { // PolicyStatus + * // IsPublic: true || false, + * // }, + * // }; + * + * ``` + * + * @param GetBucketPolicyStatusCommandInput - {@link GetBucketPolicyStatusCommandInput} + * @returns {@link GetBucketPolicyStatusCommandOutput} + * @see {@link GetBucketPolicyStatusCommandInput} for command's `input` shape. + * @see {@link GetBucketPolicyStatusCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class GetBucketPolicyStatusCommand extends GetBucketPolicyStatusCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketPolicyStatusRequest; + output: GetBucketPolicyStatusOutput; + }; + sdk: { + input: GetBucketPolicyStatusCommandInput; + output: GetBucketPolicyStatusCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketReplicationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketReplicationCommand.d.ts new file mode 100644 index 00000000..64005d50 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketReplicationCommand.d.ts @@ -0,0 +1,195 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketReplicationOutput, GetBucketReplicationRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketReplicationCommand}. + */ +export interface GetBucketReplicationCommandInput extends GetBucketReplicationRequest { +} +/** + * @public + * + * The output of {@link GetBucketReplicationCommand}. + */ +export interface GetBucketReplicationCommandOutput extends GetBucketReplicationOutput, __MetadataBearer { +} +declare const GetBucketReplicationCommand_base: { + new (input: GetBucketReplicationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketReplicationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Returns the replication configuration of a bucket.

+ * + *

It can take a while to propagate the put or delete a replication configuration to + * all Amazon S3 systems. Therefore, a get request soon after put or delete can return a wrong + * result.

+ *
+ *

For information about replication configuration, see Replication in the + * Amazon S3 User Guide.

+ *

This action requires permissions for the s3:GetReplicationConfiguration + * action. For more information about permissions, see Using Bucket Policies and User + * Policies.

+ *

If you include the Filter element in a replication configuration, you must + * also include the DeleteMarkerReplication and Priority elements. + * The response also returns those elements.

+ *

For information about GetBucketReplication errors, see List of + * replication-related error codes + *

+ *

The following operations are related to GetBucketReplication:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketReplicationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketReplicationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketReplicationRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetBucketReplicationCommand(input); + * const response = await client.send(command); + * // { // GetBucketReplicationOutput + * // ReplicationConfiguration: { // ReplicationConfiguration + * // Role: "STRING_VALUE", // required + * // Rules: [ // ReplicationRules // required + * // { // ReplicationRule + * // ID: "STRING_VALUE", + * // Priority: Number("int"), + * // Prefix: "STRING_VALUE", + * // Filter: { // ReplicationRuleFilter + * // Prefix: "STRING_VALUE", + * // Tag: { // Tag + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // And: { // ReplicationRuleAndOperator + * // Prefix: "STRING_VALUE", + * // Tags: [ // TagSet + * // { + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // ], + * // }, + * // }, + * // Status: "Enabled" || "Disabled", // required + * // SourceSelectionCriteria: { // SourceSelectionCriteria + * // SseKmsEncryptedObjects: { // SseKmsEncryptedObjects + * // Status: "Enabled" || "Disabled", // required + * // }, + * // ReplicaModifications: { // ReplicaModifications + * // Status: "Enabled" || "Disabled", // required + * // }, + * // }, + * // ExistingObjectReplication: { // ExistingObjectReplication + * // Status: "Enabled" || "Disabled", // required + * // }, + * // Destination: { // Destination + * // Bucket: "STRING_VALUE", // required + * // Account: "STRING_VALUE", + * // StorageClass: "STANDARD" || "REDUCED_REDUNDANCY" || "STANDARD_IA" || "ONEZONE_IA" || "INTELLIGENT_TIERING" || "GLACIER" || "DEEP_ARCHIVE" || "OUTPOSTS" || "GLACIER_IR" || "SNOW" || "EXPRESS_ONEZONE", + * // AccessControlTranslation: { // AccessControlTranslation + * // Owner: "Destination", // required + * // }, + * // EncryptionConfiguration: { // EncryptionConfiguration + * // ReplicaKmsKeyID: "STRING_VALUE", + * // }, + * // ReplicationTime: { // ReplicationTime + * // Status: "Enabled" || "Disabled", // required + * // Time: { // ReplicationTimeValue + * // Minutes: Number("int"), + * // }, + * // }, + * // Metrics: { // Metrics + * // Status: "Enabled" || "Disabled", // required + * // EventThreshold: { + * // Minutes: Number("int"), + * // }, + * // }, + * // }, + * // DeleteMarkerReplication: { // DeleteMarkerReplication + * // Status: "Enabled" || "Disabled", + * // }, + * // }, + * // ], + * // }, + * // }; + * + * ``` + * + * @param GetBucketReplicationCommandInput - {@link GetBucketReplicationCommandInput} + * @returns {@link GetBucketReplicationCommandOutput} + * @see {@link GetBucketReplicationCommandInput} for command's `input` shape. + * @see {@link GetBucketReplicationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To get replication configuration set on a bucket + * ```javascript + * // The following example returns replication configuration set on a bucket. + * const input = { + * Bucket: "examplebucket" + * }; + * const command = new GetBucketReplicationCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ReplicationConfiguration: { + * Role: "arn:aws:iam::acct-id:role/example-role", + * Rules: [ + * { + * Destination: { + * Bucket: "arn:aws:s3:::destination-bucket" + * }, + * ID: "MWIwNTkwZmItMTE3MS00ZTc3LWJkZDEtNzRmODQwYzc1OTQy", + * Prefix: "Tax", + * Status: "Enabled" + * } + * ] + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class GetBucketReplicationCommand extends GetBucketReplicationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketReplicationRequest; + output: GetBucketReplicationOutput; + }; + sdk: { + input: GetBucketReplicationCommandInput; + output: GetBucketReplicationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketRequestPaymentCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketRequestPaymentCommand.d.ts new file mode 100644 index 00000000..3c83ccc3 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketRequestPaymentCommand.d.ts @@ -0,0 +1,101 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketRequestPaymentOutput, GetBucketRequestPaymentRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketRequestPaymentCommand}. + */ +export interface GetBucketRequestPaymentCommandInput extends GetBucketRequestPaymentRequest { +} +/** + * @public + * + * The output of {@link GetBucketRequestPaymentCommand}. + */ +export interface GetBucketRequestPaymentCommandOutput extends GetBucketRequestPaymentOutput, __MetadataBearer { +} +declare const GetBucketRequestPaymentCommand_base: { + new (input: GetBucketRequestPaymentCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketRequestPaymentCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Returns the request payment configuration of a bucket. To use this version of the + * operation, you must be the bucket owner. For more information, see Requester Pays + * Buckets.

+ *

The following operations are related to GetBucketRequestPayment:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketRequestPaymentCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketRequestPaymentCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketRequestPaymentRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetBucketRequestPaymentCommand(input); + * const response = await client.send(command); + * // { // GetBucketRequestPaymentOutput + * // Payer: "Requester" || "BucketOwner", + * // }; + * + * ``` + * + * @param GetBucketRequestPaymentCommandInput - {@link GetBucketRequestPaymentCommandInput} + * @returns {@link GetBucketRequestPaymentCommandOutput} + * @see {@link GetBucketRequestPaymentCommandInput} for command's `input` shape. + * @see {@link GetBucketRequestPaymentCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To get bucket versioning configuration + * ```javascript + * // The following example retrieves bucket versioning configuration. + * const input = { + * Bucket: "examplebucket" + * }; + * const command = new GetBucketRequestPaymentCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Payer: "BucketOwner" + * } + * *\/ + * ``` + * + * @public + */ +export declare class GetBucketRequestPaymentCommand extends GetBucketRequestPaymentCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketRequestPaymentRequest; + output: GetBucketRequestPaymentOutput; + }; + sdk: { + input: GetBucketRequestPaymentCommandInput; + output: GetBucketRequestPaymentCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketTaggingCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketTaggingCommand.d.ts new file mode 100644 index 00000000..d692b1fc --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketTaggingCommand.d.ts @@ -0,0 +1,134 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketTaggingOutput, GetBucketTaggingRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketTaggingCommand}. + */ +export interface GetBucketTaggingCommandInput extends GetBucketTaggingRequest { +} +/** + * @public + * + * The output of {@link GetBucketTaggingCommand}. + */ +export interface GetBucketTaggingCommandOutput extends GetBucketTaggingOutput, __MetadataBearer { +} +declare const GetBucketTaggingCommand_base: { + new (input: GetBucketTaggingCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketTaggingCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Returns the tag set associated with the bucket.

+ *

To use this operation, you must have permission to perform the + * s3:GetBucketTagging action. By default, the bucket owner has this + * permission and can grant this permission to others.

+ *

+ * GetBucketTagging has the following special error:

+ *
    + *
  • + *

    Error code: NoSuchTagSet + *

    + *
      + *
    • + *

      Description: There is no tag set associated with the bucket.

      + *
    • + *
    + *
  • + *
+ *

The following operations are related to GetBucketTagging:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketTaggingCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketTaggingCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketTaggingRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetBucketTaggingCommand(input); + * const response = await client.send(command); + * // { // GetBucketTaggingOutput + * // TagSet: [ // TagSet // required + * // { // Tag + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // ], + * // }; + * + * ``` + * + * @param GetBucketTaggingCommandInput - {@link GetBucketTaggingCommandInput} + * @returns {@link GetBucketTaggingCommandOutput} + * @see {@link GetBucketTaggingCommandInput} for command's `input` shape. + * @see {@link GetBucketTaggingCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To get tag set associated with a bucket + * ```javascript + * // The following example returns tag set associated with a bucket + * const input = { + * Bucket: "examplebucket" + * }; + * const command = new GetBucketTaggingCommand(input); + * const response = await client.send(command); + * /* response is + * { + * TagSet: [ + * { + * Key: "key1", + * Value: "value1" + * }, + * { + * Key: "key2", + * Value: "value2" + * } + * ] + * } + * *\/ + * ``` + * + * @public + */ +export declare class GetBucketTaggingCommand extends GetBucketTaggingCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketTaggingRequest; + output: GetBucketTaggingOutput; + }; + sdk: { + input: GetBucketTaggingCommandInput; + output: GetBucketTaggingCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketVersioningCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketVersioningCommand.d.ts new file mode 100644 index 00000000..f074d8ee --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketVersioningCommand.d.ts @@ -0,0 +1,115 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketVersioningOutput, GetBucketVersioningRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketVersioningCommand}. + */ +export interface GetBucketVersioningCommandInput extends GetBucketVersioningRequest { +} +/** + * @public + * + * The output of {@link GetBucketVersioningCommand}. + */ +export interface GetBucketVersioningCommandOutput extends GetBucketVersioningOutput, __MetadataBearer { +} +declare const GetBucketVersioningCommand_base: { + new (input: GetBucketVersioningCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketVersioningCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Returns the versioning state of a bucket.

+ *

To retrieve the versioning state of a bucket, you must be the bucket owner.

+ *

This implementation also returns the MFA Delete status of the versioning state. If the + * MFA Delete status is enabled, the bucket owner must use an authentication + * device to change the versioning state of the bucket.

+ *

The following operations are related to GetBucketVersioning:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketVersioningCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketVersioningCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketVersioningRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetBucketVersioningCommand(input); + * const response = await client.send(command); + * // { // GetBucketVersioningOutput + * // Status: "Enabled" || "Suspended", + * // MFADelete: "Enabled" || "Disabled", + * // }; + * + * ``` + * + * @param GetBucketVersioningCommandInput - {@link GetBucketVersioningCommandInput} + * @returns {@link GetBucketVersioningCommandOutput} + * @see {@link GetBucketVersioningCommandInput} for command's `input` shape. + * @see {@link GetBucketVersioningCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To get bucket versioning configuration + * ```javascript + * // The following example retrieves bucket versioning configuration. + * const input = { + * Bucket: "examplebucket" + * }; + * const command = new GetBucketVersioningCommand(input); + * const response = await client.send(command); + * /* response is + * { + * MFADelete: "Disabled", + * Status: "Enabled" + * } + * *\/ + * ``` + * + * @public + */ +export declare class GetBucketVersioningCommand extends GetBucketVersioningCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketVersioningRequest; + output: GetBucketVersioningOutput; + }; + sdk: { + input: GetBucketVersioningCommandInput; + output: GetBucketVersioningCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketWebsiteCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketWebsiteCommand.d.ts new file mode 100644 index 00000000..d713d9d4 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetBucketWebsiteCommand.d.ts @@ -0,0 +1,139 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketWebsiteOutput, GetBucketWebsiteRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetBucketWebsiteCommand}. + */ +export interface GetBucketWebsiteCommandInput extends GetBucketWebsiteRequest { +} +/** + * @public + * + * The output of {@link GetBucketWebsiteCommand}. + */ +export interface GetBucketWebsiteCommandOutput extends GetBucketWebsiteOutput, __MetadataBearer { +} +declare const GetBucketWebsiteCommand_base: { + new (input: GetBucketWebsiteCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetBucketWebsiteCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Returns the website configuration for a bucket. To host website on Amazon S3, you can + * configure a bucket as website by adding a website configuration. For more information about + * hosting websites, see Hosting Websites on Amazon S3.

+ *

This GET action requires the S3:GetBucketWebsite permission. By default, + * only the bucket owner can read the bucket website configuration. However, bucket owners can + * allow other users to read the website configuration by writing a bucket policy granting + * them the S3:GetBucketWebsite permission.

+ *

The following operations are related to GetBucketWebsite:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetBucketWebsiteCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetBucketWebsiteCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetBucketWebsiteRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetBucketWebsiteCommand(input); + * const response = await client.send(command); + * // { // GetBucketWebsiteOutput + * // RedirectAllRequestsTo: { // RedirectAllRequestsTo + * // HostName: "STRING_VALUE", // required + * // Protocol: "http" || "https", + * // }, + * // IndexDocument: { // IndexDocument + * // Suffix: "STRING_VALUE", // required + * // }, + * // ErrorDocument: { // ErrorDocument + * // Key: "STRING_VALUE", // required + * // }, + * // RoutingRules: [ // RoutingRules + * // { // RoutingRule + * // Condition: { // Condition + * // HttpErrorCodeReturnedEquals: "STRING_VALUE", + * // KeyPrefixEquals: "STRING_VALUE", + * // }, + * // Redirect: { // Redirect + * // HostName: "STRING_VALUE", + * // HttpRedirectCode: "STRING_VALUE", + * // Protocol: "http" || "https", + * // ReplaceKeyPrefixWith: "STRING_VALUE", + * // ReplaceKeyWith: "STRING_VALUE", + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param GetBucketWebsiteCommandInput - {@link GetBucketWebsiteCommandInput} + * @returns {@link GetBucketWebsiteCommandOutput} + * @see {@link GetBucketWebsiteCommandInput} for command's `input` shape. + * @see {@link GetBucketWebsiteCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To get bucket website configuration + * ```javascript + * // The following example retrieves website configuration of a bucket. + * const input = { + * Bucket: "examplebucket" + * }; + * const command = new GetBucketWebsiteCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ErrorDocument: { + * Key: "error.html" + * }, + * IndexDocument: { + * Suffix: "index.html" + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class GetBucketWebsiteCommand extends GetBucketWebsiteCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetBucketWebsiteRequest; + output: GetBucketWebsiteOutput; + }; + sdk: { + input: GetBucketWebsiteCommandInput; + output: GetBucketWebsiteCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectAclCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectAclCommand.d.ts new file mode 100644 index 00000000..ceb81f15 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectAclCommand.d.ts @@ -0,0 +1,189 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetObjectAclOutput, GetObjectAclRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetObjectAclCommand}. + */ +export interface GetObjectAclCommandInput extends GetObjectAclRequest { +} +/** + * @public + * + * The output of {@link GetObjectAclCommand}. + */ +export interface GetObjectAclCommandOutput extends GetObjectAclOutput, __MetadataBearer { +} +declare const GetObjectAclCommand_base: { + new (input: GetObjectAclCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetObjectAclCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Returns the access control list (ACL) of an object. To use this operation, you must have + * s3:GetObjectAcl permissions or READ_ACP access to the object. + * For more information, see Mapping of ACL permissions and access policy permissions in the Amazon S3 + * User Guide + *

+ *

This functionality is not supported for Amazon S3 on Outposts.

+ *

By default, GET returns ACL information about the current version of an object. To + * return ACL information about a different version, use the versionId subresource.

+ * + *

If your bucket uses the bucket owner enforced setting for S3 Object Ownership, + * requests to read ACLs are still supported and return the + * bucket-owner-full-control ACL with the owner being the account that + * created the bucket. For more information, see Controlling object + * ownership and disabling ACLs in the + * Amazon S3 User Guide.

+ *
+ *

The following operations are related to GetObjectAcl:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetObjectAclCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetObjectAclCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetObjectAclRequest + * Bucket: "STRING_VALUE", // required + * Key: "STRING_VALUE", // required + * VersionId: "STRING_VALUE", + * RequestPayer: "requester", + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetObjectAclCommand(input); + * const response = await client.send(command); + * // { // GetObjectAclOutput + * // Owner: { // Owner + * // DisplayName: "STRING_VALUE", + * // ID: "STRING_VALUE", + * // }, + * // Grants: [ // Grants + * // { // Grant + * // Grantee: { // Grantee + * // DisplayName: "STRING_VALUE", + * // EmailAddress: "STRING_VALUE", + * // ID: "STRING_VALUE", + * // URI: "STRING_VALUE", + * // Type: "CanonicalUser" || "AmazonCustomerByEmail" || "Group", // required + * // }, + * // Permission: "FULL_CONTROL" || "WRITE" || "WRITE_ACP" || "READ" || "READ_ACP", + * // }, + * // ], + * // RequestCharged: "requester", + * // }; + * + * ``` + * + * @param GetObjectAclCommandInput - {@link GetObjectAclCommandInput} + * @returns {@link GetObjectAclCommandOutput} + * @see {@link GetObjectAclCommandInput} for command's `input` shape. + * @see {@link GetObjectAclCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link NoSuchKey} (client fault) + *

The specified key does not exist.

+ * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To retrieve object ACL + * ```javascript + * // The following example retrieves access control list (ACL) of an object. + * const input = { + * Bucket: "examplebucket", + * Key: "HappyFace.jpg" + * }; + * const command = new GetObjectAclCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Grants: [ + * { + * Grantee: { + * DisplayName: "owner-display-name", + * ID: "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc", + * Type: "CanonicalUser" + * }, + * Permission: "WRITE" + * }, + * { + * Grantee: { + * DisplayName: "owner-display-name", + * ID: "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc", + * Type: "CanonicalUser" + * }, + * Permission: "WRITE_ACP" + * }, + * { + * Grantee: { + * DisplayName: "owner-display-name", + * ID: "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc", + * Type: "CanonicalUser" + * }, + * Permission: "READ" + * }, + * { + * Grantee: { + * DisplayName: "owner-display-name", + * ID: "852b113eexamplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc", + * Type: "CanonicalUser" + * }, + * Permission: "READ_ACP" + * } + * ], + * Owner: { + * DisplayName: "owner-display-name", + * ID: "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class GetObjectAclCommand extends GetObjectAclCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetObjectAclRequest; + output: GetObjectAclOutput; + }; + sdk: { + input: GetObjectAclCommandInput; + output: GetObjectAclCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectAttributesCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectAttributesCommand.d.ts new file mode 100644 index 00000000..018718dd --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectAttributesCommand.d.ts @@ -0,0 +1,330 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetObjectAttributesOutput, GetObjectAttributesRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetObjectAttributesCommand}. + */ +export interface GetObjectAttributesCommandInput extends GetObjectAttributesRequest { +} +/** + * @public + * + * The output of {@link GetObjectAttributesCommand}. + */ +export interface GetObjectAttributesCommandOutput extends GetObjectAttributesOutput, __MetadataBearer { +} +declare const GetObjectAttributesCommand_base: { + new (input: GetObjectAttributesCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetObjectAttributesCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Retrieves all the metadata from an object without returning the object itself. This + * operation is useful if you're interested only in an object's metadata.

+ *

+ * GetObjectAttributes combines the functionality of HeadObject + * and ListParts. All of the data returned with each of those individual calls + * can be returned with a single call to GetObjectAttributes.

+ * + *

+ * Directory buckets - + * For directory buckets, you must make requests for this API operation to the Zonal endpoint. These endpoints support virtual-hosted-style requests in the format https://amzn-s3-demo-bucket.s3express-zone-id.region-code.amazonaws.com/key-name + * . Path-style requests are not supported. For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

+ *
+ *
+ *
Permissions
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - To + * use GetObjectAttributes, you must have READ access to the + * object. The permissions that you need to use this operation depend on + * whether the bucket is versioned. If the bucket is versioned, you need both + * the s3:GetObjectVersion and + * s3:GetObjectVersionAttributes permissions for this + * operation. If the bucket is not versioned, you need the + * s3:GetObject and s3:GetObjectAttributes + * permissions. For more information, see Specifying + * Permissions in a Policy in the + * Amazon S3 User Guide. If the object that you request does + * not exist, the error Amazon S3 returns depends on whether you also have the + * s3:ListBucket permission.

    + *
      + *
    • + *

      If you have the s3:ListBucket permission on the + * bucket, Amazon S3 returns an HTTP status code 404 Not Found + * ("no such key") error.

      + *
    • + *
    • + *

      If you don't have the s3:ListBucket permission, Amazon S3 + * returns an HTTP status code 403 Forbidden ("access + * denied") error.

      + *
    • + *
    + *
  • + *
  • + *

    + * Directory bucket permissions - To grant access to this API operation on a directory bucket, we recommend that you use the + * CreateSession + * API operation for session-based authorization. Specifically, you grant the s3express:CreateSession permission to the directory bucket in a bucket policy or an IAM identity-based policy. Then, you make the CreateSession API call on the bucket to obtain a session token. With the session token in your request header, you can make API requests to this operation. After the session token expires, you make another CreateSession API call to generate a new session token for use. + * Amazon Web Services CLI or SDKs create session and refresh the session token automatically to avoid service interruptions when a session expires. For more information about authorization, see + * CreateSession + * .

    + *

    If + * the + * object is encrypted with SSE-KMS, you must also have the + * kms:GenerateDataKey and kms:Decrypt permissions + * in IAM identity-based policies and KMS key policies for the KMS + * key.

    + *
  • + *
+ *
+ *
Encryption
+ *
+ * + *

Encryption request headers, like x-amz-server-side-encryption, + * should not be sent for HEAD requests if your object uses + * server-side encryption with Key Management Service (KMS) keys (SSE-KMS), dual-layer + * server-side encryption with Amazon Web Services KMS keys (DSSE-KMS), or server-side + * encryption with Amazon S3 managed encryption keys (SSE-S3). The + * x-amz-server-side-encryption header is used when you + * PUT an object to S3 and want to specify the encryption method. + * If you include this header in a GET request for an object that + * uses these types of keys, you’ll get an HTTP 400 Bad Request + * error. It's because the encryption method can't be changed when you retrieve + * the object.

+ *
+ *

If you encrypt an object by using server-side encryption with customer-provided + * encryption keys (SSE-C) when you store the object in Amazon S3, then when you retrieve + * the metadata from the object, you must use the following headers to provide the + * encryption key for the server to be able to retrieve the object's metadata. The + * headers are:

+ *
    + *
  • + *

    + * x-amz-server-side-encryption-customer-algorithm + *

    + *
  • + *
  • + *

    + * x-amz-server-side-encryption-customer-key + *

    + *
  • + *
  • + *

    + * x-amz-server-side-encryption-customer-key-MD5 + *

    + *
  • + *
+ *

For more information about SSE-C, see Server-Side + * Encryption (Using Customer-Provided Encryption Keys) in the + * Amazon S3 User Guide.

+ * + *

+ * Directory bucket permissions - + * For directory buckets, there are only two supported options for server-side encryption: server-side encryption with Amazon S3 managed keys (SSE-S3) (AES256) and server-side encryption with KMS keys (SSE-KMS) (aws:kms). We recommend that the bucket's default encryption uses the desired encryption configuration and you don't override the bucket default encryption in your + * CreateSession requests or PUT object requests. Then, new objects + * are automatically encrypted with the desired encryption settings. For more + * information, see Protecting data with server-side encryption in the Amazon S3 User Guide. For more information about the encryption overriding behaviors in directory buckets, see Specifying server-side encryption with KMS for new object uploads.

+ *
+ *
+ *
Versioning
+ *
+ *

+ * Directory buckets - + * S3 Versioning isn't enabled and supported for directory buckets. For this API operation, only the null value of the version ID is supported by directory buckets. You can only specify null to the + * versionId query parameter in the request.

+ *
+ *
Conditional request headers
+ *
+ *

Consider the following when using request headers:

+ *
    + *
  • + *

    If both of the If-Match and If-Unmodified-Since + * headers are present in the request as follows, then Amazon S3 returns the HTTP + * status code 200 OK and the data requested:

    + *
      + *
    • + *

      + * If-Match condition evaluates to + * true.

      + *
    • + *
    • + *

      + * If-Unmodified-Since condition evaluates to + * false.

      + *
    • + *
    + *

    For more information about conditional requests, see RFC 7232.

    + *
  • + *
  • + *

    If both of the If-None-Match and + * If-Modified-Since headers are present in the request as + * follows, then Amazon S3 returns the HTTP status code 304 Not + * Modified:

    + *
      + *
    • + *

      + * If-None-Match condition evaluates to + * false.

      + *
    • + *
    • + *

      + * If-Modified-Since condition evaluates to + * true.

      + *
    • + *
    + *

    For more information about conditional requests, see RFC 7232.

    + *
  • + *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is + * Bucket-name.s3express-zone-id.region-code.amazonaws.com.

+ *
+ *
+ *

The following actions are related to GetObjectAttributes:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetObjectAttributesCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetObjectAttributesCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetObjectAttributesRequest + * Bucket: "STRING_VALUE", // required + * Key: "STRING_VALUE", // required + * VersionId: "STRING_VALUE", + * MaxParts: Number("int"), + * PartNumberMarker: "STRING_VALUE", + * SSECustomerAlgorithm: "STRING_VALUE", + * SSECustomerKey: "STRING_VALUE", + * SSECustomerKeyMD5: "STRING_VALUE", + * RequestPayer: "requester", + * ExpectedBucketOwner: "STRING_VALUE", + * ObjectAttributes: [ // ObjectAttributesList // required + * "ETag" || "Checksum" || "ObjectParts" || "StorageClass" || "ObjectSize", + * ], + * }; + * const command = new GetObjectAttributesCommand(input); + * const response = await client.send(command); + * // { // GetObjectAttributesOutput + * // DeleteMarker: true || false, + * // LastModified: new Date("TIMESTAMP"), + * // VersionId: "STRING_VALUE", + * // RequestCharged: "requester", + * // ETag: "STRING_VALUE", + * // Checksum: { // Checksum + * // ChecksumCRC32: "STRING_VALUE", + * // ChecksumCRC32C: "STRING_VALUE", + * // ChecksumCRC64NVME: "STRING_VALUE", + * // ChecksumSHA1: "STRING_VALUE", + * // ChecksumSHA256: "STRING_VALUE", + * // ChecksumType: "COMPOSITE" || "FULL_OBJECT", + * // }, + * // ObjectParts: { // GetObjectAttributesParts + * // TotalPartsCount: Number("int"), + * // PartNumberMarker: "STRING_VALUE", + * // NextPartNumberMarker: "STRING_VALUE", + * // MaxParts: Number("int"), + * // IsTruncated: true || false, + * // Parts: [ // PartsList + * // { // ObjectPart + * // PartNumber: Number("int"), + * // Size: Number("long"), + * // ChecksumCRC32: "STRING_VALUE", + * // ChecksumCRC32C: "STRING_VALUE", + * // ChecksumCRC64NVME: "STRING_VALUE", + * // ChecksumSHA1: "STRING_VALUE", + * // ChecksumSHA256: "STRING_VALUE", + * // }, + * // ], + * // }, + * // StorageClass: "STANDARD" || "REDUCED_REDUNDANCY" || "STANDARD_IA" || "ONEZONE_IA" || "INTELLIGENT_TIERING" || "GLACIER" || "DEEP_ARCHIVE" || "OUTPOSTS" || "GLACIER_IR" || "SNOW" || "EXPRESS_ONEZONE", + * // ObjectSize: Number("long"), + * // }; + * + * ``` + * + * @param GetObjectAttributesCommandInput - {@link GetObjectAttributesCommandInput} + * @returns {@link GetObjectAttributesCommandOutput} + * @see {@link GetObjectAttributesCommandInput} for command's `input` shape. + * @see {@link GetObjectAttributesCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link NoSuchKey} (client fault) + *

The specified key does not exist.

+ * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class GetObjectAttributesCommand extends GetObjectAttributesCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetObjectAttributesRequest; + output: GetObjectAttributesOutput; + }; + sdk: { + input: GetObjectAttributesCommandInput; + output: GetObjectAttributesCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectCommand.d.ts new file mode 100644 index 00000000..841fc3a4 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectCommand.d.ts @@ -0,0 +1,383 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer, StreamingBlobPayloadOutputTypes } from "@smithy/types"; +import { GetObjectOutput, GetObjectRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetObjectCommand}. + */ +export interface GetObjectCommandInput extends GetObjectRequest { +} +/** + * @public + * + * The output of {@link GetObjectCommand}. + */ +export interface GetObjectCommandOutput extends Omit, __MetadataBearer { + Body?: StreamingBlobPayloadOutputTypes; +} +declare const GetObjectCommand_base: { + new (input: GetObjectCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetObjectCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Retrieves an object from Amazon S3.

+ *

In the GetObject request, specify the full key name for the object.

+ *

+ * General purpose buckets - Both the virtual-hosted-style + * requests and the path-style requests are supported. For a virtual hosted-style request + * example, if you have the object photos/2006/February/sample.jpg, specify the + * object key name as /photos/2006/February/sample.jpg. For a path-style request + * example, if you have the object photos/2006/February/sample.jpg in the bucket + * named examplebucket, specify the object key name as + * /examplebucket/photos/2006/February/sample.jpg. For more information about + * request types, see HTTP Host + * Header Bucket Specification in the Amazon S3 User Guide.

+ *

+ * Directory buckets - + * Only virtual-hosted-style requests are supported. For a virtual hosted-style request example, if you have the object photos/2006/February/sample.jpg in the bucket named amzn-s3-demo-bucket--usw2-az1--x-s3, specify the object key name as /photos/2006/February/sample.jpg. Also, when you make requests to this API operation, your requests are sent to the Zonal endpoint. These endpoints support virtual-hosted-style requests in the format https://bucket-name.s3express-zone-id.region-code.amazonaws.com/key-name + * . Path-style requests are not supported. For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

+ *
+ *
Permissions
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - You + * must have the required permissions in a policy. To use + * GetObject, you must have the READ access to the + * object (or version). If you grant READ access to the anonymous + * user, the GetObject operation returns the object without using + * an authorization header. For more information, see Specifying permissions in a policy in the + * Amazon S3 User Guide.

    + *

    If you include a versionId in your request header, you must + * have the s3:GetObjectVersion permission to access a specific + * version of an object. The s3:GetObject permission is not + * required in this scenario.

    + *

    If you request the current version of an object without a specific + * versionId in the request header, only the + * s3:GetObject permission is required. The + * s3:GetObjectVersion permission is not required in this + * scenario.

    + *

    If the object that you request doesn’t exist, the error that Amazon S3 returns + * depends on whether you also have the s3:ListBucket + * permission.

    + *
      + *
    • + *

      If you have the s3:ListBucket permission on the + * bucket, Amazon S3 returns an HTTP status code 404 Not Found + * error.

      + *
    • + *
    • + *

      If you don’t have the s3:ListBucket permission, Amazon S3 + * returns an HTTP status code 403 Access Denied + * error.

      + *
    • + *
    + *
  • + *
  • + *

    + * Directory bucket permissions - To grant access to this API operation on a directory bucket, we recommend that you use the + * CreateSession + * API operation for session-based authorization. Specifically, you grant the s3express:CreateSession permission to the directory bucket in a bucket policy or an IAM identity-based policy. Then, you make the CreateSession API call on the bucket to obtain a session token. With the session token in your request header, you can make API requests to this operation. After the session token expires, you make another CreateSession API call to generate a new session token for use. + * Amazon Web Services CLI or SDKs create session and refresh the session token automatically to avoid service interruptions when a session expires. For more information about authorization, see + * CreateSession + * .

    + *

    If + * the + * object is encrypted using SSE-KMS, you must also have the + * kms:GenerateDataKey and kms:Decrypt permissions + * in IAM identity-based policies and KMS key policies for the KMS + * key.

    + *
  • + *
+ *
+ *
Storage classes
+ *
+ *

If the object you are retrieving is stored in the S3 Glacier Flexible Retrieval + * storage class, the S3 Glacier Deep Archive storage class, the + * S3 Intelligent-Tiering Archive Access tier, or the S3 Intelligent-Tiering Deep Archive Access tier, + * before you can retrieve the object you must first restore a copy using RestoreObject. Otherwise, this operation returns an + * InvalidObjectState error. For information about restoring archived + * objects, see Restoring Archived + * Objects in the Amazon S3 User Guide.

+ *

+ * Directory buckets - + * Directory buckets only support EXPRESS_ONEZONE (the S3 Express One Zone storage class) in Availability Zones and ONEZONE_IA (the S3 One Zone-Infrequent Access storage class) in Dedicated Local Zones. + * Unsupported storage class values won't write a destination object and will respond with the HTTP status code 400 Bad Request.

+ *
+ *
Encryption
+ *
+ *

Encryption request headers, like x-amz-server-side-encryption, + * should not be sent for the GetObject requests, if your object uses + * server-side encryption with Amazon S3 managed encryption keys (SSE-S3), server-side + * encryption with Key Management Service (KMS) keys (SSE-KMS), or dual-layer server-side + * encryption with Amazon Web Services KMS keys (DSSE-KMS). If you include the header in your + * GetObject requests for the object that uses these types of keys, + * you’ll get an HTTP 400 Bad Request error.

+ *

+ * Directory buckets - + * For directory buckets, there are only two supported options for server-side encryption: SSE-S3 and SSE-KMS. SSE-C isn't supported. For more + * information, see Protecting data with server-side encryption in the Amazon S3 User Guide.

+ *
+ *
Overriding response header values through the request
+ *
+ *

There are times when you want to override certain response header values of a + * GetObject response. For example, you might override the + * Content-Disposition response header value through your + * GetObject request.

+ *

You can override values for a set of response headers. These modified response + * header values are included only in a successful response, that is, when the HTTP + * status code 200 OK is returned. The headers you can override using + * the following query parameters in the request are a subset of the headers that + * Amazon S3 accepts when you create an object.

+ *

The response headers that you can override for the GetObject + * response are Cache-Control, Content-Disposition, + * Content-Encoding, Content-Language, + * Content-Type, and Expires.

+ *

To override values for a set of response headers in the GetObject + * response, you can use the following query parameters in the request.

+ *
    + *
  • + *

    + * response-cache-control + *

    + *
  • + *
  • + *

    + * response-content-disposition + *

    + *
  • + *
  • + *

    + * response-content-encoding + *

    + *
  • + *
  • + *

    + * response-content-language + *

    + *
  • + *
  • + *

    + * response-content-type + *

    + *
  • + *
  • + *

    + * response-expires + *

    + *
  • + *
+ * + *

When you use these parameters, you must sign the request by using either an + * Authorization header or a presigned URL. These parameters cannot be used with + * an unsigned (anonymous) request.

+ *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is + * Bucket-name.s3express-zone-id.region-code.amazonaws.com.

+ *
+ *
+ *

The following operations are related to GetObject:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetObjectCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetObjectCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetObjectRequest + * Bucket: "STRING_VALUE", // required + * IfMatch: "STRING_VALUE", + * IfModifiedSince: new Date("TIMESTAMP"), + * IfNoneMatch: "STRING_VALUE", + * IfUnmodifiedSince: new Date("TIMESTAMP"), + * Key: "STRING_VALUE", // required + * Range: "STRING_VALUE", + * ResponseCacheControl: "STRING_VALUE", + * ResponseContentDisposition: "STRING_VALUE", + * ResponseContentEncoding: "STRING_VALUE", + * ResponseContentLanguage: "STRING_VALUE", + * ResponseContentType: "STRING_VALUE", + * ResponseExpires: new Date("TIMESTAMP"), + * VersionId: "STRING_VALUE", + * SSECustomerAlgorithm: "STRING_VALUE", + * SSECustomerKey: "STRING_VALUE", + * SSECustomerKeyMD5: "STRING_VALUE", + * RequestPayer: "requester", + * PartNumber: Number("int"), + * ExpectedBucketOwner: "STRING_VALUE", + * ChecksumMode: "ENABLED", + * }; + * const command = new GetObjectCommand(input); + * const response = await client.send(command); + * // consume or destroy the stream to free the socket. + * const bytes = await response.Body.transformToByteArray(); + * // const str = await response.Body.transformToString(); + * // response.Body.destroy(); // only applicable to Node.js Readable streams. + * + * // { // GetObjectOutput + * // Body: "", // see \@smithy/types -> StreamingBlobPayloadOutputTypes + * // DeleteMarker: true || false, + * // AcceptRanges: "STRING_VALUE", + * // Expiration: "STRING_VALUE", + * // Restore: "STRING_VALUE", + * // LastModified: new Date("TIMESTAMP"), + * // ContentLength: Number("long"), + * // ETag: "STRING_VALUE", + * // ChecksumCRC32: "STRING_VALUE", + * // ChecksumCRC32C: "STRING_VALUE", + * // ChecksumCRC64NVME: "STRING_VALUE", + * // ChecksumSHA1: "STRING_VALUE", + * // ChecksumSHA256: "STRING_VALUE", + * // ChecksumType: "COMPOSITE" || "FULL_OBJECT", + * // MissingMeta: Number("int"), + * // VersionId: "STRING_VALUE", + * // CacheControl: "STRING_VALUE", + * // ContentDisposition: "STRING_VALUE", + * // ContentEncoding: "STRING_VALUE", + * // ContentLanguage: "STRING_VALUE", + * // ContentRange: "STRING_VALUE", + * // ContentType: "STRING_VALUE", + * // Expires: new Date("TIMESTAMP"), + * // ExpiresString: "STRING_VALUE", + * // WebsiteRedirectLocation: "STRING_VALUE", + * // ServerSideEncryption: "AES256" || "aws:kms" || "aws:kms:dsse", + * // Metadata: { // Metadata + * // "": "STRING_VALUE", + * // }, + * // SSECustomerAlgorithm: "STRING_VALUE", + * // SSECustomerKeyMD5: "STRING_VALUE", + * // SSEKMSKeyId: "STRING_VALUE", + * // BucketKeyEnabled: true || false, + * // StorageClass: "STANDARD" || "REDUCED_REDUNDANCY" || "STANDARD_IA" || "ONEZONE_IA" || "INTELLIGENT_TIERING" || "GLACIER" || "DEEP_ARCHIVE" || "OUTPOSTS" || "GLACIER_IR" || "SNOW" || "EXPRESS_ONEZONE", + * // RequestCharged: "requester", + * // ReplicationStatus: "COMPLETE" || "PENDING" || "FAILED" || "REPLICA" || "COMPLETED", + * // PartsCount: Number("int"), + * // TagCount: Number("int"), + * // ObjectLockMode: "GOVERNANCE" || "COMPLIANCE", + * // ObjectLockRetainUntilDate: new Date("TIMESTAMP"), + * // ObjectLockLegalHoldStatus: "ON" || "OFF", + * // }; + * + * ``` + * + * @param GetObjectCommandInput - {@link GetObjectCommandInput} + * @returns {@link GetObjectCommandOutput} + * @see {@link GetObjectCommandInput} for command's `input` shape. + * @see {@link GetObjectCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link InvalidObjectState} (client fault) + *

Object is archived and inaccessible until restored.

+ *

If the object you are retrieving is stored in the S3 Glacier Flexible Retrieval storage + * class, the S3 Glacier Deep Archive storage class, the S3 Intelligent-Tiering Archive Access + * tier, or the S3 Intelligent-Tiering Deep Archive Access tier, before you can retrieve the object you + * must first restore a copy using RestoreObject. Otherwise, this + * operation returns an InvalidObjectState error. For information about restoring + * archived objects, see Restoring Archived Objects in + * the Amazon S3 User Guide.

+ * + * @throws {@link NoSuchKey} (client fault) + *

The specified key does not exist.

+ * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To retrieve a byte range of an object + * ```javascript + * // The following example retrieves an object for an S3 bucket. The request specifies the range header to retrieve a specific byte range. + * const input = { + * Bucket: "examplebucket", + * Key: "SampleFile.txt", + * Range: "bytes=0-9" + * }; + * const command = new GetObjectCommand(input); + * const response = await client.send(command); + * // consume or destroy the stream to free the socket. + * const bytes = await response.Body.transformToByteArray(); + * // const str = await response.Body.transformToString(); + * // response.Body.destroy(); // only applicable to Node.js Readable streams. + * + * /* response is + * { + * AcceptRanges: "bytes", + * ContentLength: 10, + * ContentRange: "bytes 0-9/43", + * ContentType: "text/plain", + * ETag: `"0d94420ffd0bc68cd3d152506b97a9cc"`, + * LastModified: "2014-10-09T22:57:28.000Z", + * Metadata: { /* empty *\/ }, + * VersionId: "null" + * } + * *\/ + * ``` + * + * @example To retrieve an object + * ```javascript + * // The following example retrieves an object for an S3 bucket. + * const input = { + * Bucket: "examplebucket", + * Key: "HappyFace.jpg" + * }; + * const command = new GetObjectCommand(input); + * const response = await client.send(command); + * // consume or destroy the stream to free the socket. + * const bytes = await response.Body.transformToByteArray(); + * // const str = await response.Body.transformToString(); + * // response.Body.destroy(); // only applicable to Node.js Readable streams. + * + * /* response is + * { + * AcceptRanges: "bytes", + * ContentLength: 3191, + * ContentType: "image/jpeg", + * ETag: `"6805f2cfc46c0f04559748bb039d69ae"`, + * LastModified: "2016-12-15T01:19:41.000Z", + * Metadata: { /* empty *\/ }, + * TagCount: 2, + * VersionId: "null" + * } + * *\/ + * ``` + * + * @public + */ +export declare class GetObjectCommand extends GetObjectCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetObjectRequest; + output: GetObjectOutput; + }; + sdk: { + input: GetObjectCommandInput; + output: GetObjectCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectLegalHoldCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectLegalHoldCommand.d.ts new file mode 100644 index 00000000..8ed6b98d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectLegalHoldCommand.d.ts @@ -0,0 +1,91 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetObjectLegalHoldOutput, GetObjectLegalHoldRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetObjectLegalHoldCommand}. + */ +export interface GetObjectLegalHoldCommandInput extends GetObjectLegalHoldRequest { +} +/** + * @public + * + * The output of {@link GetObjectLegalHoldCommand}. + */ +export interface GetObjectLegalHoldCommandOutput extends GetObjectLegalHoldOutput, __MetadataBearer { +} +declare const GetObjectLegalHoldCommand_base: { + new (input: GetObjectLegalHoldCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetObjectLegalHoldCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Gets an object's current legal hold status. For more information, see Locking + * Objects.

+ *

This functionality is not supported for Amazon S3 on Outposts.

+ *

The following action is related to GetObjectLegalHold:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetObjectLegalHoldCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetObjectLegalHoldCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetObjectLegalHoldRequest + * Bucket: "STRING_VALUE", // required + * Key: "STRING_VALUE", // required + * VersionId: "STRING_VALUE", + * RequestPayer: "requester", + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetObjectLegalHoldCommand(input); + * const response = await client.send(command); + * // { // GetObjectLegalHoldOutput + * // LegalHold: { // ObjectLockLegalHold + * // Status: "ON" || "OFF", + * // }, + * // }; + * + * ``` + * + * @param GetObjectLegalHoldCommandInput - {@link GetObjectLegalHoldCommandInput} + * @returns {@link GetObjectLegalHoldCommandOutput} + * @see {@link GetObjectLegalHoldCommandInput} for command's `input` shape. + * @see {@link GetObjectLegalHoldCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class GetObjectLegalHoldCommand extends GetObjectLegalHoldCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetObjectLegalHoldRequest; + output: GetObjectLegalHoldOutput; + }; + sdk: { + input: GetObjectLegalHoldCommandInput; + output: GetObjectLegalHoldCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectLockConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectLockConfigurationCommand.d.ts new file mode 100644 index 00000000..3b08142d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectLockConfigurationCommand.d.ts @@ -0,0 +1,95 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetObjectLockConfigurationOutput, GetObjectLockConfigurationRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetObjectLockConfigurationCommand}. + */ +export interface GetObjectLockConfigurationCommandInput extends GetObjectLockConfigurationRequest { +} +/** + * @public + * + * The output of {@link GetObjectLockConfigurationCommand}. + */ +export interface GetObjectLockConfigurationCommandOutput extends GetObjectLockConfigurationOutput, __MetadataBearer { +} +declare const GetObjectLockConfigurationCommand_base: { + new (input: GetObjectLockConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetObjectLockConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Gets the Object Lock configuration for a bucket. The rule specified in the Object Lock + * configuration will be applied by default to every new object placed in the specified + * bucket. For more information, see Locking Objects.

+ *

The following action is related to GetObjectLockConfiguration:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetObjectLockConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetObjectLockConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetObjectLockConfigurationRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetObjectLockConfigurationCommand(input); + * const response = await client.send(command); + * // { // GetObjectLockConfigurationOutput + * // ObjectLockConfiguration: { // ObjectLockConfiguration + * // ObjectLockEnabled: "Enabled", + * // Rule: { // ObjectLockRule + * // DefaultRetention: { // DefaultRetention + * // Mode: "GOVERNANCE" || "COMPLIANCE", + * // Days: Number("int"), + * // Years: Number("int"), + * // }, + * // }, + * // }, + * // }; + * + * ``` + * + * @param GetObjectLockConfigurationCommandInput - {@link GetObjectLockConfigurationCommandInput} + * @returns {@link GetObjectLockConfigurationCommandOutput} + * @see {@link GetObjectLockConfigurationCommandInput} for command's `input` shape. + * @see {@link GetObjectLockConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class GetObjectLockConfigurationCommand extends GetObjectLockConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetObjectLockConfigurationRequest; + output: GetObjectLockConfigurationOutput; + }; + sdk: { + input: GetObjectLockConfigurationCommandInput; + output: GetObjectLockConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectRetentionCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectRetentionCommand.d.ts new file mode 100644 index 00000000..d22b70bd --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectRetentionCommand.d.ts @@ -0,0 +1,92 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetObjectRetentionOutput, GetObjectRetentionRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetObjectRetentionCommand}. + */ +export interface GetObjectRetentionCommandInput extends GetObjectRetentionRequest { +} +/** + * @public + * + * The output of {@link GetObjectRetentionCommand}. + */ +export interface GetObjectRetentionCommandOutput extends GetObjectRetentionOutput, __MetadataBearer { +} +declare const GetObjectRetentionCommand_base: { + new (input: GetObjectRetentionCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetObjectRetentionCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Retrieves an object's retention settings. For more information, see Locking + * Objects.

+ *

This functionality is not supported for Amazon S3 on Outposts.

+ *

The following action is related to GetObjectRetention:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetObjectRetentionCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetObjectRetentionCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetObjectRetentionRequest + * Bucket: "STRING_VALUE", // required + * Key: "STRING_VALUE", // required + * VersionId: "STRING_VALUE", + * RequestPayer: "requester", + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetObjectRetentionCommand(input); + * const response = await client.send(command); + * // { // GetObjectRetentionOutput + * // Retention: { // ObjectLockRetention + * // Mode: "GOVERNANCE" || "COMPLIANCE", + * // RetainUntilDate: new Date("TIMESTAMP"), + * // }, + * // }; + * + * ``` + * + * @param GetObjectRetentionCommandInput - {@link GetObjectRetentionCommandInput} + * @returns {@link GetObjectRetentionCommandOutput} + * @see {@link GetObjectRetentionCommandInput} for command's `input` shape. + * @see {@link GetObjectRetentionCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class GetObjectRetentionCommand extends GetObjectRetentionCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetObjectRetentionRequest; + output: GetObjectRetentionOutput; + }; + sdk: { + input: GetObjectRetentionCommandInput; + output: GetObjectRetentionCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectTaggingCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectTaggingCommand.d.ts new file mode 100644 index 00000000..e7975966 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectTaggingCommand.d.ts @@ -0,0 +1,162 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetObjectTaggingOutput, GetObjectTaggingRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetObjectTaggingCommand}. + */ +export interface GetObjectTaggingCommandInput extends GetObjectTaggingRequest { +} +/** + * @public + * + * The output of {@link GetObjectTaggingCommand}. + */ +export interface GetObjectTaggingCommandOutput extends GetObjectTaggingOutput, __MetadataBearer { +} +declare const GetObjectTaggingCommand_base: { + new (input: GetObjectTaggingCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetObjectTaggingCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Returns the tag-set of an object. You send the GET request against the tagging + * subresource associated with the object.

+ *

To use this operation, you must have permission to perform the + * s3:GetObjectTagging action. By default, the GET action returns information + * about current version of an object. For a versioned bucket, you can have multiple versions + * of an object in your bucket. To retrieve tags of any other version, use the versionId query + * parameter. You also need permission for the s3:GetObjectVersionTagging + * action.

+ *

By default, the bucket owner has this permission and can grant this permission to + * others.

+ *

For information about the Amazon S3 object tagging feature, see Object Tagging.

+ *

The following actions are related to GetObjectTagging:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetObjectTaggingCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetObjectTaggingCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetObjectTaggingRequest + * Bucket: "STRING_VALUE", // required + * Key: "STRING_VALUE", // required + * VersionId: "STRING_VALUE", + * ExpectedBucketOwner: "STRING_VALUE", + * RequestPayer: "requester", + * }; + * const command = new GetObjectTaggingCommand(input); + * const response = await client.send(command); + * // { // GetObjectTaggingOutput + * // VersionId: "STRING_VALUE", + * // TagSet: [ // TagSet // required + * // { // Tag + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // ], + * // }; + * + * ``` + * + * @param GetObjectTaggingCommandInput - {@link GetObjectTaggingCommandInput} + * @returns {@link GetObjectTaggingCommandOutput} + * @see {@link GetObjectTaggingCommandInput} for command's `input` shape. + * @see {@link GetObjectTaggingCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To retrieve tag set of a specific object version + * ```javascript + * // The following example retrieves tag set of an object. The request specifies object version. + * const input = { + * Bucket: "examplebucket", + * Key: "exampleobject", + * VersionId: "ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI" + * }; + * const command = new GetObjectTaggingCommand(input); + * const response = await client.send(command); + * /* response is + * { + * TagSet: [ + * { + * Key: "Key1", + * Value: "Value1" + * } + * ], + * VersionId: "ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI" + * } + * *\/ + * ``` + * + * @example To retrieve tag set of an object + * ```javascript + * // The following example retrieves tag set of an object. + * const input = { + * Bucket: "examplebucket", + * Key: "HappyFace.jpg" + * }; + * const command = new GetObjectTaggingCommand(input); + * const response = await client.send(command); + * /* response is + * { + * TagSet: [ + * { + * Key: "Key4", + * Value: "Value4" + * }, + * { + * Key: "Key3", + * Value: "Value3" + * } + * ], + * VersionId: "null" + * } + * *\/ + * ``` + * + * @public + */ +export declare class GetObjectTaggingCommand extends GetObjectTaggingCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetObjectTaggingRequest; + output: GetObjectTaggingOutput; + }; + sdk: { + input: GetObjectTaggingCommandInput; + output: GetObjectTaggingCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectTorrentCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectTorrentCommand.d.ts new file mode 100644 index 00000000..d644465c --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetObjectTorrentCommand.d.ts @@ -0,0 +1,120 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer, StreamingBlobPayloadOutputTypes } from "@smithy/types"; +import { GetObjectTorrentOutput, GetObjectTorrentRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetObjectTorrentCommand}. + */ +export interface GetObjectTorrentCommandInput extends GetObjectTorrentRequest { +} +/** + * @public + * + * The output of {@link GetObjectTorrentCommand}. + */ +export interface GetObjectTorrentCommandOutput extends Omit, __MetadataBearer { + Body?: StreamingBlobPayloadOutputTypes; +} +declare const GetObjectTorrentCommand_base: { + new (input: GetObjectTorrentCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetObjectTorrentCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Returns torrent files from a bucket. BitTorrent can save you bandwidth when you're + * distributing large files.

+ * + *

You can get torrent only for objects that are less than 5 GB in size, and that are + * not encrypted using server-side encryption with a customer-provided encryption + * key.

+ *
+ *

To use GET, you must have READ access to the object.

+ *

This functionality is not supported for Amazon S3 on Outposts.

+ *

The following action is related to GetObjectTorrent:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetObjectTorrentCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetObjectTorrentCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetObjectTorrentRequest + * Bucket: "STRING_VALUE", // required + * Key: "STRING_VALUE", // required + * RequestPayer: "requester", + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetObjectTorrentCommand(input); + * const response = await client.send(command); + * // consume or destroy the stream to free the socket. + * const bytes = await response.Body.transformToByteArray(); + * // const str = await response.Body.transformToString(); + * // response.Body.destroy(); // only applicable to Node.js Readable streams. + * + * // { // GetObjectTorrentOutput + * // Body: "", // see \@smithy/types -> StreamingBlobPayloadOutputTypes + * // RequestCharged: "requester", + * // }; + * + * ``` + * + * @param GetObjectTorrentCommandInput - {@link GetObjectTorrentCommandInput} + * @returns {@link GetObjectTorrentCommandOutput} + * @see {@link GetObjectTorrentCommandInput} for command's `input` shape. + * @see {@link GetObjectTorrentCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To retrieve torrent files for an object + * ```javascript + * // The following example retrieves torrent files of an object. + * const input = { + * Bucket: "examplebucket", + * Key: "HappyFace.jpg" + * }; + * const command = new GetObjectTorrentCommand(input); + * const response = await client.send(command); + * // consume or destroy the stream to free the socket. + * const bytes = await response.Body.transformToByteArray(); + * // const str = await response.Body.transformToString(); + * // response.Body.destroy(); // only applicable to Node.js Readable streams. + * + * /* response is + * { /* empty *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class GetObjectTorrentCommand extends GetObjectTorrentCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetObjectTorrentRequest; + output: GetObjectTorrentOutput; + }; + sdk: { + input: GetObjectTorrentCommandInput; + output: GetObjectTorrentCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/GetPublicAccessBlockCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetPublicAccessBlockCommand.d.ts new file mode 100644 index 00000000..0f5761b7 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/GetPublicAccessBlockCommand.d.ts @@ -0,0 +1,117 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetPublicAccessBlockOutput, GetPublicAccessBlockRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetPublicAccessBlockCommand}. + */ +export interface GetPublicAccessBlockCommandInput extends GetPublicAccessBlockRequest { +} +/** + * @public + * + * The output of {@link GetPublicAccessBlockCommand}. + */ +export interface GetPublicAccessBlockCommandOutput extends GetPublicAccessBlockOutput, __MetadataBearer { +} +declare const GetPublicAccessBlockCommand_base: { + new (input: GetPublicAccessBlockCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetPublicAccessBlockCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Retrieves the PublicAccessBlock configuration for an Amazon S3 bucket. To use + * this operation, you must have the s3:GetBucketPublicAccessBlock permission. + * For more information about Amazon S3 permissions, see Specifying Permissions in a + * Policy.

+ * + *

When Amazon S3 evaluates the PublicAccessBlock configuration for a bucket or + * an object, it checks the PublicAccessBlock configuration for both the + * bucket (or the bucket that contains the object) and the bucket owner's account. If the + * PublicAccessBlock settings are different between the bucket and the + * account, Amazon S3 uses the most restrictive combination of the bucket-level and + * account-level settings.

+ *
+ *

For more information about when Amazon S3 considers a bucket or an object public, see The Meaning of "Public".

+ *

The following operations are related to GetPublicAccessBlock:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, GetPublicAccessBlockCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, GetPublicAccessBlockCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // GetPublicAccessBlockRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new GetPublicAccessBlockCommand(input); + * const response = await client.send(command); + * // { // GetPublicAccessBlockOutput + * // PublicAccessBlockConfiguration: { // PublicAccessBlockConfiguration + * // BlockPublicAcls: true || false, + * // IgnorePublicAcls: true || false, + * // BlockPublicPolicy: true || false, + * // RestrictPublicBuckets: true || false, + * // }, + * // }; + * + * ``` + * + * @param GetPublicAccessBlockCommandInput - {@link GetPublicAccessBlockCommandInput} + * @returns {@link GetPublicAccessBlockCommandOutput} + * @see {@link GetPublicAccessBlockCommandInput} for command's `input` shape. + * @see {@link GetPublicAccessBlockCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class GetPublicAccessBlockCommand extends GetPublicAccessBlockCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetPublicAccessBlockRequest; + output: GetPublicAccessBlockOutput; + }; + sdk: { + input: GetPublicAccessBlockCommandInput; + output: GetPublicAccessBlockCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/HeadBucketCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/HeadBucketCommand.d.ts new file mode 100644 index 00000000..eaba84a8 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/HeadBucketCommand.d.ts @@ -0,0 +1,160 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { HeadBucketOutput, HeadBucketRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link HeadBucketCommand}. + */ +export interface HeadBucketCommandInput extends HeadBucketRequest { +} +/** + * @public + * + * The output of {@link HeadBucketCommand}. + */ +export interface HeadBucketCommandOutput extends HeadBucketOutput, __MetadataBearer { +} +declare const HeadBucketCommand_base: { + new (input: HeadBucketCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: HeadBucketCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

You can use this operation to determine if a bucket exists and if you have permission to + * access it. The action returns a 200 OK if the bucket exists and you have + * permission to access it.

+ * + *

If the bucket does not exist or you do not have permission to access it, the + * HEAD request returns a generic 400 Bad Request, 403 + * Forbidden or 404 Not Found code. A message body is not included, + * so you cannot determine the exception beyond these HTTP response codes.

+ *
+ *
+ *
Authentication and authorization
+ *
+ *

+ * General purpose buckets - Request to public + * buckets that grant the s3:ListBucket permission publicly do not need to be signed. + * All other HeadBucket requests must be authenticated and signed by + * using IAM credentials (access key ID and secret access key for the IAM + * identities). All headers with the x-amz- prefix, including + * x-amz-copy-source, must be signed. For more information, see + * REST Authentication.

+ *

+ * Directory buckets - You must use IAM + * credentials to authenticate and authorize your access to the + * HeadBucket API operation, instead of using the temporary security + * credentials through the CreateSession API operation.

+ *

Amazon Web Services CLI or SDKs handles authentication and authorization on your + * behalf.

+ *
+ *
Permissions
+ *
+ *

+ * + *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is + * Bucket-name.s3express-zone-id.region-code.amazonaws.com.

+ * + *

You must make requests for this API operation to the Zonal endpoint. These endpoints support virtual-hosted-style requests in the format https://bucket-name.s3express-zone-id.region-code.amazonaws.com. Path-style requests are not supported. For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

+ *
+ *
+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, HeadBucketCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, HeadBucketCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // HeadBucketRequest + * Bucket: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new HeadBucketCommand(input); + * const response = await client.send(command); + * // { // HeadBucketOutput + * // BucketLocationType: "AvailabilityZone" || "LocalZone", + * // BucketLocationName: "STRING_VALUE", + * // BucketRegion: "STRING_VALUE", + * // AccessPointAlias: true || false, + * // }; + * + * ``` + * + * @param HeadBucketCommandInput - {@link HeadBucketCommandInput} + * @returns {@link HeadBucketCommandOutput} + * @see {@link HeadBucketCommandInput} for command's `input` shape. + * @see {@link HeadBucketCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link NotFound} (client fault) + *

The specified content does not exist.

+ * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To determine if bucket exists + * ```javascript + * // This operation checks to see if a bucket exists. + * const input = { + * Bucket: "acl1" + * }; + * const command = new HeadBucketCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class HeadBucketCommand extends HeadBucketCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: HeadBucketRequest; + output: HeadBucketOutput; + }; + sdk: { + input: HeadBucketCommandInput; + output: HeadBucketCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/HeadObjectCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/HeadObjectCommand.d.ts new file mode 100644 index 00000000..10ec6a40 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/HeadObjectCommand.d.ts @@ -0,0 +1,318 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { HeadObjectOutput, HeadObjectRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link HeadObjectCommand}. + */ +export interface HeadObjectCommandInput extends HeadObjectRequest { +} +/** + * @public + * + * The output of {@link HeadObjectCommand}. + */ +export interface HeadObjectCommandOutput extends HeadObjectOutput, __MetadataBearer { +} +declare const HeadObjectCommand_base: { + new (input: HeadObjectCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: HeadObjectCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The HEAD operation retrieves metadata from an object without returning the + * object itself. This operation is useful if you're interested only in an object's + * metadata.

+ * + *

A HEAD request has the same options as a GET operation on + * an object. The response is identical to the GET response except that there + * is no response body. Because of this, if the HEAD request generates an + * error, it returns a generic code, such as 400 Bad Request, 403 + * Forbidden, 404 Not Found, 405 Method Not Allowed, + * 412 Precondition Failed, or 304 Not Modified. It's not + * possible to retrieve the exact exception of these error codes.

+ *
+ *

Request headers are limited to 8 KB in size. For more information, see Common + * Request Headers.

+ *
+ *
Permissions
+ *
+ *

+ *
    + *
  • + *

    + * General purpose bucket permissions - To + * use HEAD, you must have the s3:GetObject + * permission. You need the relevant read object (or version) permission for + * this operation. For more information, see Actions, resources, and + * condition keys for Amazon S3 in the Amazon S3 User + * Guide. For more information about the permissions to S3 API + * operations by S3 resource types, see Required permissions for Amazon S3 API operations in the + * Amazon S3 User Guide.

    + *

    If the object you request doesn't exist, the error that Amazon S3 returns + * depends on whether you also have the s3:ListBucket + * permission.

    + *
      + *
    • + *

      If you have the s3:ListBucket permission on the + * bucket, Amazon S3 returns an HTTP status code 404 Not Found + * error.

      + *
    • + *
    • + *

      If you don’t have the s3:ListBucket permission, Amazon S3 + * returns an HTTP status code 403 Forbidden error.

      + *
    • + *
    + *
  • + *
  • + *

    + * Directory bucket permissions - To grant access to this API operation on a directory bucket, we recommend that you use the + * CreateSession + * API operation for session-based authorization. Specifically, you grant the s3express:CreateSession permission to the directory bucket in a bucket policy or an IAM identity-based policy. Then, you make the CreateSession API call on the bucket to obtain a session token. With the session token in your request header, you can make API requests to this operation. After the session token expires, you make another CreateSession API call to generate a new session token for use. + * Amazon Web Services CLI or SDKs create session and refresh the session token automatically to avoid service interruptions when a session expires. For more information about authorization, see + * CreateSession + * .

    + *

    If you enable x-amz-checksum-mode in the request and the + * object is encrypted with Amazon Web Services Key Management Service (Amazon Web Services KMS), you must + * also have the kms:GenerateDataKey and kms:Decrypt + * permissions in IAM identity-based policies and KMS key policies for the + * KMS key to retrieve the checksum of the object.

    + *
  • + *
+ *
+ *
Encryption
+ *
+ * + *

Encryption request headers, like x-amz-server-side-encryption, + * should not be sent for HEAD requests if your object uses + * server-side encryption with Key Management Service (KMS) keys (SSE-KMS), dual-layer + * server-side encryption with Amazon Web Services KMS keys (DSSE-KMS), or server-side + * encryption with Amazon S3 managed encryption keys (SSE-S3). The + * x-amz-server-side-encryption header is used when you + * PUT an object to S3 and want to specify the encryption method. + * If you include this header in a HEAD request for an object that + * uses these types of keys, you’ll get an HTTP 400 Bad Request + * error. It's because the encryption method can't be changed when you retrieve + * the object.

+ *
+ *

If you encrypt an object by using server-side encryption with customer-provided + * encryption keys (SSE-C) when you store the object in Amazon S3, then when you retrieve + * the metadata from the object, you must use the following headers to provide the + * encryption key for the server to be able to retrieve the object's metadata. The + * headers are:

+ *
    + *
  • + *

    + * x-amz-server-side-encryption-customer-algorithm + *

    + *
  • + *
  • + *

    + * x-amz-server-side-encryption-customer-key + *

    + *
  • + *
  • + *

    + * x-amz-server-side-encryption-customer-key-MD5 + *

    + *
  • + *
+ *

For more information about SSE-C, see Server-Side + * Encryption (Using Customer-Provided Encryption Keys) in the + * Amazon S3 User Guide.

+ * + *

+ * Directory bucket - + * For directory buckets, there are only two supported options for server-side encryption: SSE-S3 and SSE-KMS. SSE-C isn't supported. For more + * information, see Protecting data with server-side encryption in the Amazon S3 User Guide.

+ *
+ *
+ *
Versioning
+ *
+ *
    + *
  • + *

    If the current version of the object is a delete marker, Amazon S3 behaves as + * if the object was deleted and includes x-amz-delete-marker: + * true in the response.

    + *
  • + *
  • + *

    If the specified version is a delete marker, the response returns a + * 405 Method Not Allowed error and the Last-Modified: + * timestamp response header.

    + *
  • + *
+ * + *
    + *
  • + *

    + * Directory buckets - + * Delete marker is not supported for directory buckets.

    + *
  • + *
  • + *

    + * Directory buckets - + * S3 Versioning isn't enabled and supported for directory buckets. For this API operation, only the null value of the version ID is supported by directory buckets. You can only specify null + * to the versionId query parameter in the request.

    + *
  • + *
+ *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is + * Bucket-name.s3express-zone-id.region-code.amazonaws.com.

+ * + *

For directory buckets, you must make requests for this API operation to the Zonal endpoint. These endpoints support virtual-hosted-style requests in the format https://amzn-s3-demo-bucket.s3express-zone-id.region-code.amazonaws.com/key-name + * . Path-style requests are not supported. For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

+ *
+ *
+ *
+ *

The following actions are related to HeadObject:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, HeadObjectCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, HeadObjectCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // HeadObjectRequest + * Bucket: "STRING_VALUE", // required + * IfMatch: "STRING_VALUE", + * IfModifiedSince: new Date("TIMESTAMP"), + * IfNoneMatch: "STRING_VALUE", + * IfUnmodifiedSince: new Date("TIMESTAMP"), + * Key: "STRING_VALUE", // required + * Range: "STRING_VALUE", + * ResponseCacheControl: "STRING_VALUE", + * ResponseContentDisposition: "STRING_VALUE", + * ResponseContentEncoding: "STRING_VALUE", + * ResponseContentLanguage: "STRING_VALUE", + * ResponseContentType: "STRING_VALUE", + * ResponseExpires: new Date("TIMESTAMP"), + * VersionId: "STRING_VALUE", + * SSECustomerAlgorithm: "STRING_VALUE", + * SSECustomerKey: "STRING_VALUE", + * SSECustomerKeyMD5: "STRING_VALUE", + * RequestPayer: "requester", + * PartNumber: Number("int"), + * ExpectedBucketOwner: "STRING_VALUE", + * ChecksumMode: "ENABLED", + * }; + * const command = new HeadObjectCommand(input); + * const response = await client.send(command); + * // { // HeadObjectOutput + * // DeleteMarker: true || false, + * // AcceptRanges: "STRING_VALUE", + * // Expiration: "STRING_VALUE", + * // Restore: "STRING_VALUE", + * // ArchiveStatus: "ARCHIVE_ACCESS" || "DEEP_ARCHIVE_ACCESS", + * // LastModified: new Date("TIMESTAMP"), + * // ContentLength: Number("long"), + * // ChecksumCRC32: "STRING_VALUE", + * // ChecksumCRC32C: "STRING_VALUE", + * // ChecksumCRC64NVME: "STRING_VALUE", + * // ChecksumSHA1: "STRING_VALUE", + * // ChecksumSHA256: "STRING_VALUE", + * // ChecksumType: "COMPOSITE" || "FULL_OBJECT", + * // ETag: "STRING_VALUE", + * // MissingMeta: Number("int"), + * // VersionId: "STRING_VALUE", + * // CacheControl: "STRING_VALUE", + * // ContentDisposition: "STRING_VALUE", + * // ContentEncoding: "STRING_VALUE", + * // ContentLanguage: "STRING_VALUE", + * // ContentType: "STRING_VALUE", + * // ContentRange: "STRING_VALUE", + * // Expires: new Date("TIMESTAMP"), + * // ExpiresString: "STRING_VALUE", + * // WebsiteRedirectLocation: "STRING_VALUE", + * // ServerSideEncryption: "AES256" || "aws:kms" || "aws:kms:dsse", + * // Metadata: { // Metadata + * // "": "STRING_VALUE", + * // }, + * // SSECustomerAlgorithm: "STRING_VALUE", + * // SSECustomerKeyMD5: "STRING_VALUE", + * // SSEKMSKeyId: "STRING_VALUE", + * // BucketKeyEnabled: true || false, + * // StorageClass: "STANDARD" || "REDUCED_REDUNDANCY" || "STANDARD_IA" || "ONEZONE_IA" || "INTELLIGENT_TIERING" || "GLACIER" || "DEEP_ARCHIVE" || "OUTPOSTS" || "GLACIER_IR" || "SNOW" || "EXPRESS_ONEZONE", + * // RequestCharged: "requester", + * // ReplicationStatus: "COMPLETE" || "PENDING" || "FAILED" || "REPLICA" || "COMPLETED", + * // PartsCount: Number("int"), + * // ObjectLockMode: "GOVERNANCE" || "COMPLIANCE", + * // ObjectLockRetainUntilDate: new Date("TIMESTAMP"), + * // ObjectLockLegalHoldStatus: "ON" || "OFF", + * // }; + * + * ``` + * + * @param HeadObjectCommandInput - {@link HeadObjectCommandInput} + * @returns {@link HeadObjectCommandOutput} + * @see {@link HeadObjectCommandInput} for command's `input` shape. + * @see {@link HeadObjectCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link NotFound} (client fault) + *

The specified content does not exist.

+ * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To retrieve metadata of an object without returning the object itself + * ```javascript + * // The following example retrieves an object metadata. + * const input = { + * Bucket: "examplebucket", + * Key: "HappyFace.jpg" + * }; + * const command = new HeadObjectCommand(input); + * const response = await client.send(command); + * /* response is + * { + * AcceptRanges: "bytes", + * ContentLength: 3191, + * ContentType: "image/jpeg", + * ETag: `"6805f2cfc46c0f04559748bb039d69ae"`, + * LastModified: "2016-12-15T01:19:41.000Z", + * Metadata: { /* empty *\/ }, + * VersionId: "null" + * } + * *\/ + * ``` + * + * @public + */ +export declare class HeadObjectCommand extends HeadObjectCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: HeadObjectRequest; + output: HeadObjectOutput; + }; + sdk: { + input: HeadObjectCommandInput; + output: HeadObjectCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/ListBucketAnalyticsConfigurationsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListBucketAnalyticsConfigurationsCommand.d.ts new file mode 100644 index 00000000..73828af6 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListBucketAnalyticsConfigurationsCommand.d.ts @@ -0,0 +1,148 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListBucketAnalyticsConfigurationsOutput, ListBucketAnalyticsConfigurationsRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListBucketAnalyticsConfigurationsCommand}. + */ +export interface ListBucketAnalyticsConfigurationsCommandInput extends ListBucketAnalyticsConfigurationsRequest { +} +/** + * @public + * + * The output of {@link ListBucketAnalyticsConfigurationsCommand}. + */ +export interface ListBucketAnalyticsConfigurationsCommandOutput extends ListBucketAnalyticsConfigurationsOutput, __MetadataBearer { +} +declare const ListBucketAnalyticsConfigurationsCommand_base: { + new (input: ListBucketAnalyticsConfigurationsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListBucketAnalyticsConfigurationsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Lists the analytics configurations for the bucket. You can have up to 1,000 analytics + * configurations per bucket.

+ *

This action supports list pagination and does not return more than 100 configurations at + * a time. You should always check the IsTruncated element in the response. If + * there are no more configurations to list, IsTruncated is set to false. If + * there are more configurations to list, IsTruncated is set to true, and there + * will be a value in NextContinuationToken. You use the + * NextContinuationToken value to continue the pagination of the list by + * passing the value in continuation-token in the request to GET the next + * page.

+ *

To use this operation, you must have permissions to perform the + * s3:GetAnalyticsConfiguration action. The bucket owner has this permission + * by default. The bucket owner can grant this permission to others. For more information + * about permissions, see Permissions Related to Bucket Subresource Operations and Managing + * Access Permissions to Your Amazon S3 Resources.

+ *

For information about Amazon S3 analytics feature, see Amazon S3 Analytics – Storage Class + * Analysis.

+ *

The following operations are related to + * ListBucketAnalyticsConfigurations:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, ListBucketAnalyticsConfigurationsCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, ListBucketAnalyticsConfigurationsCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // ListBucketAnalyticsConfigurationsRequest + * Bucket: "STRING_VALUE", // required + * ContinuationToken: "STRING_VALUE", + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new ListBucketAnalyticsConfigurationsCommand(input); + * const response = await client.send(command); + * // { // ListBucketAnalyticsConfigurationsOutput + * // IsTruncated: true || false, + * // ContinuationToken: "STRING_VALUE", + * // NextContinuationToken: "STRING_VALUE", + * // AnalyticsConfigurationList: [ // AnalyticsConfigurationList + * // { // AnalyticsConfiguration + * // Id: "STRING_VALUE", // required + * // Filter: { // AnalyticsFilter Union: only one key present + * // Prefix: "STRING_VALUE", + * // Tag: { // Tag + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // And: { // AnalyticsAndOperator + * // Prefix: "STRING_VALUE", + * // Tags: [ // TagSet + * // { + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // ], + * // }, + * // }, + * // StorageClassAnalysis: { // StorageClassAnalysis + * // DataExport: { // StorageClassAnalysisDataExport + * // OutputSchemaVersion: "V_1", // required + * // Destination: { // AnalyticsExportDestination + * // S3BucketDestination: { // AnalyticsS3BucketDestination + * // Format: "CSV", // required + * // BucketAccountId: "STRING_VALUE", + * // Bucket: "STRING_VALUE", // required + * // Prefix: "STRING_VALUE", + * // }, + * // }, + * // }, + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param ListBucketAnalyticsConfigurationsCommandInput - {@link ListBucketAnalyticsConfigurationsCommandInput} + * @returns {@link ListBucketAnalyticsConfigurationsCommandOutput} + * @see {@link ListBucketAnalyticsConfigurationsCommandInput} for command's `input` shape. + * @see {@link ListBucketAnalyticsConfigurationsCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class ListBucketAnalyticsConfigurationsCommand extends ListBucketAnalyticsConfigurationsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListBucketAnalyticsConfigurationsRequest; + output: ListBucketAnalyticsConfigurationsOutput; + }; + sdk: { + input: ListBucketAnalyticsConfigurationsCommandInput; + output: ListBucketAnalyticsConfigurationsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/ListBucketIntelligentTieringConfigurationsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListBucketIntelligentTieringConfigurationsCommand.d.ts new file mode 100644 index 00000000..cac0a30b --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListBucketIntelligentTieringConfigurationsCommand.d.ts @@ -0,0 +1,127 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListBucketIntelligentTieringConfigurationsOutput, ListBucketIntelligentTieringConfigurationsRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListBucketIntelligentTieringConfigurationsCommand}. + */ +export interface ListBucketIntelligentTieringConfigurationsCommandInput extends ListBucketIntelligentTieringConfigurationsRequest { +} +/** + * @public + * + * The output of {@link ListBucketIntelligentTieringConfigurationsCommand}. + */ +export interface ListBucketIntelligentTieringConfigurationsCommandOutput extends ListBucketIntelligentTieringConfigurationsOutput, __MetadataBearer { +} +declare const ListBucketIntelligentTieringConfigurationsCommand_base: { + new (input: ListBucketIntelligentTieringConfigurationsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListBucketIntelligentTieringConfigurationsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Lists the S3 Intelligent-Tiering configuration from the specified bucket.

+ *

The S3 Intelligent-Tiering storage class is designed to optimize storage costs by automatically moving data to the most cost-effective storage access tier, without performance impact or operational overhead. S3 Intelligent-Tiering delivers automatic cost savings in three low latency and high throughput access tiers. To get the lowest storage cost on data that can be accessed in minutes to hours, you can choose to activate additional archiving capabilities.

+ *

The S3 Intelligent-Tiering storage class is the ideal storage class for data with unknown, changing, or unpredictable access patterns, independent of object size or retention period. If the size of an object is less than 128 KB, it is not monitored and not eligible for auto-tiering. Smaller objects can be stored, but they are always charged at the Frequent Access tier rates in the S3 Intelligent-Tiering storage class.

+ *

For more information, see Storage class for automatically optimizing frequently and infrequently accessed objects.

+ *

Operations related to ListBucketIntelligentTieringConfigurations include:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, ListBucketIntelligentTieringConfigurationsCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, ListBucketIntelligentTieringConfigurationsCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // ListBucketIntelligentTieringConfigurationsRequest + * Bucket: "STRING_VALUE", // required + * ContinuationToken: "STRING_VALUE", + * }; + * const command = new ListBucketIntelligentTieringConfigurationsCommand(input); + * const response = await client.send(command); + * // { // ListBucketIntelligentTieringConfigurationsOutput + * // IsTruncated: true || false, + * // ContinuationToken: "STRING_VALUE", + * // NextContinuationToken: "STRING_VALUE", + * // IntelligentTieringConfigurationList: [ // IntelligentTieringConfigurationList + * // { // IntelligentTieringConfiguration + * // Id: "STRING_VALUE", // required + * // Filter: { // IntelligentTieringFilter + * // Prefix: "STRING_VALUE", + * // Tag: { // Tag + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // And: { // IntelligentTieringAndOperator + * // Prefix: "STRING_VALUE", + * // Tags: [ // TagSet + * // { + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // ], + * // }, + * // }, + * // Status: "Enabled" || "Disabled", // required + * // Tierings: [ // TieringList // required + * // { // Tiering + * // Days: Number("int"), // required + * // AccessTier: "ARCHIVE_ACCESS" || "DEEP_ARCHIVE_ACCESS", // required + * // }, + * // ], + * // }, + * // ], + * // }; + * + * ``` + * + * @param ListBucketIntelligentTieringConfigurationsCommandInput - {@link ListBucketIntelligentTieringConfigurationsCommandInput} + * @returns {@link ListBucketIntelligentTieringConfigurationsCommandOutput} + * @see {@link ListBucketIntelligentTieringConfigurationsCommandInput} for command's `input` shape. + * @see {@link ListBucketIntelligentTieringConfigurationsCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class ListBucketIntelligentTieringConfigurationsCommand extends ListBucketIntelligentTieringConfigurationsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListBucketIntelligentTieringConfigurationsRequest; + output: ListBucketIntelligentTieringConfigurationsOutput; + }; + sdk: { + input: ListBucketIntelligentTieringConfigurationsCommandInput; + output: ListBucketIntelligentTieringConfigurationsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/ListBucketInventoryConfigurationsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListBucketInventoryConfigurationsCommand.d.ts new file mode 100644 index 00000000..1e25e136 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListBucketInventoryConfigurationsCommand.d.ts @@ -0,0 +1,143 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListBucketInventoryConfigurationsOutput, ListBucketInventoryConfigurationsRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListBucketInventoryConfigurationsCommand}. + */ +export interface ListBucketInventoryConfigurationsCommandInput extends ListBucketInventoryConfigurationsRequest { +} +/** + * @public + * + * The output of {@link ListBucketInventoryConfigurationsCommand}. + */ +export interface ListBucketInventoryConfigurationsCommandOutput extends ListBucketInventoryConfigurationsOutput, __MetadataBearer { +} +declare const ListBucketInventoryConfigurationsCommand_base: { + new (input: ListBucketInventoryConfigurationsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListBucketInventoryConfigurationsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Returns a list of inventory configurations for the bucket. You can have up to 1,000 + * analytics configurations per bucket.

+ *

This action supports list pagination and does not return more than 100 configurations at + * a time. Always check the IsTruncated element in the response. If there are no + * more configurations to list, IsTruncated is set to false. If there are more + * configurations to list, IsTruncated is set to true, and there is a value in + * NextContinuationToken. You use the NextContinuationToken value + * to continue the pagination of the list by passing the value in continuation-token in the + * request to GET the next page.

+ *

To use this operation, you must have permissions to perform the + * s3:GetInventoryConfiguration action. The bucket owner has this permission + * by default. The bucket owner can grant this permission to others. For more information + * about permissions, see Permissions Related to Bucket Subresource Operations and Managing + * Access Permissions to Your Amazon S3 Resources.

+ *

For information about the Amazon S3 inventory feature, see Amazon S3 Inventory + *

+ *

The following operations are related to + * ListBucketInventoryConfigurations:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, ListBucketInventoryConfigurationsCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, ListBucketInventoryConfigurationsCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // ListBucketInventoryConfigurationsRequest + * Bucket: "STRING_VALUE", // required + * ContinuationToken: "STRING_VALUE", + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new ListBucketInventoryConfigurationsCommand(input); + * const response = await client.send(command); + * // { // ListBucketInventoryConfigurationsOutput + * // ContinuationToken: "STRING_VALUE", + * // InventoryConfigurationList: [ // InventoryConfigurationList + * // { // InventoryConfiguration + * // Destination: { // InventoryDestination + * // S3BucketDestination: { // InventoryS3BucketDestination + * // AccountId: "STRING_VALUE", + * // Bucket: "STRING_VALUE", // required + * // Format: "CSV" || "ORC" || "Parquet", // required + * // Prefix: "STRING_VALUE", + * // Encryption: { // InventoryEncryption + * // SSES3: {}, + * // SSEKMS: { // SSEKMS + * // KeyId: "STRING_VALUE", // required + * // }, + * // }, + * // }, + * // }, + * // IsEnabled: true || false, // required + * // Filter: { // InventoryFilter + * // Prefix: "STRING_VALUE", // required + * // }, + * // Id: "STRING_VALUE", // required + * // IncludedObjectVersions: "All" || "Current", // required + * // OptionalFields: [ // InventoryOptionalFields + * // "Size" || "LastModifiedDate" || "StorageClass" || "ETag" || "IsMultipartUploaded" || "ReplicationStatus" || "EncryptionStatus" || "ObjectLockRetainUntilDate" || "ObjectLockMode" || "ObjectLockLegalHoldStatus" || "IntelligentTieringAccessTier" || "BucketKeyStatus" || "ChecksumAlgorithm" || "ObjectAccessControlList" || "ObjectOwner", + * // ], + * // Schedule: { // InventorySchedule + * // Frequency: "Daily" || "Weekly", // required + * // }, + * // }, + * // ], + * // IsTruncated: true || false, + * // NextContinuationToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListBucketInventoryConfigurationsCommandInput - {@link ListBucketInventoryConfigurationsCommandInput} + * @returns {@link ListBucketInventoryConfigurationsCommandOutput} + * @see {@link ListBucketInventoryConfigurationsCommandInput} for command's `input` shape. + * @see {@link ListBucketInventoryConfigurationsCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class ListBucketInventoryConfigurationsCommand extends ListBucketInventoryConfigurationsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListBucketInventoryConfigurationsRequest; + output: ListBucketInventoryConfigurationsOutput; + }; + sdk: { + input: ListBucketInventoryConfigurationsCommandInput; + output: ListBucketInventoryConfigurationsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/ListBucketMetricsConfigurationsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListBucketMetricsConfigurationsCommand.d.ts new file mode 100644 index 00000000..a2c53d86 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListBucketMetricsConfigurationsCommand.d.ts @@ -0,0 +1,137 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListBucketMetricsConfigurationsOutput, ListBucketMetricsConfigurationsRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListBucketMetricsConfigurationsCommand}. + */ +export interface ListBucketMetricsConfigurationsCommandInput extends ListBucketMetricsConfigurationsRequest { +} +/** + * @public + * + * The output of {@link ListBucketMetricsConfigurationsCommand}. + */ +export interface ListBucketMetricsConfigurationsCommandOutput extends ListBucketMetricsConfigurationsOutput, __MetadataBearer { +} +declare const ListBucketMetricsConfigurationsCommand_base: { + new (input: ListBucketMetricsConfigurationsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListBucketMetricsConfigurationsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Lists the metrics configurations for the bucket. The metrics configurations are only for + * the request metrics of the bucket and do not provide information on daily storage metrics. + * You can have up to 1,000 configurations per bucket.

+ *

This action supports list pagination and does not return more than 100 configurations at + * a time. Always check the IsTruncated element in the response. If there are no + * more configurations to list, IsTruncated is set to false. If there are more + * configurations to list, IsTruncated is set to true, and there is a value in + * NextContinuationToken. You use the NextContinuationToken value + * to continue the pagination of the list by passing the value in + * continuation-token in the request to GET the next page.

+ *

To use this operation, you must have permissions to perform the + * s3:GetMetricsConfiguration action. The bucket owner has this permission by + * default. The bucket owner can grant this permission to others. For more information about + * permissions, see Permissions Related to Bucket Subresource Operations and Managing + * Access Permissions to Your Amazon S3 Resources.

+ *

For more information about metrics configurations and CloudWatch request metrics, see + * Monitoring Metrics with Amazon CloudWatch.

+ *

The following operations are related to + * ListBucketMetricsConfigurations:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, ListBucketMetricsConfigurationsCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, ListBucketMetricsConfigurationsCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // ListBucketMetricsConfigurationsRequest + * Bucket: "STRING_VALUE", // required + * ContinuationToken: "STRING_VALUE", + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new ListBucketMetricsConfigurationsCommand(input); + * const response = await client.send(command); + * // { // ListBucketMetricsConfigurationsOutput + * // IsTruncated: true || false, + * // ContinuationToken: "STRING_VALUE", + * // NextContinuationToken: "STRING_VALUE", + * // MetricsConfigurationList: [ // MetricsConfigurationList + * // { // MetricsConfiguration + * // Id: "STRING_VALUE", // required + * // Filter: { // MetricsFilter Union: only one key present + * // Prefix: "STRING_VALUE", + * // Tag: { // Tag + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // AccessPointArn: "STRING_VALUE", + * // And: { // MetricsAndOperator + * // Prefix: "STRING_VALUE", + * // Tags: [ // TagSet + * // { + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // ], + * // AccessPointArn: "STRING_VALUE", + * // }, + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param ListBucketMetricsConfigurationsCommandInput - {@link ListBucketMetricsConfigurationsCommandInput} + * @returns {@link ListBucketMetricsConfigurationsCommandOutput} + * @see {@link ListBucketMetricsConfigurationsCommandInput} for command's `input` shape. + * @see {@link ListBucketMetricsConfigurationsCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class ListBucketMetricsConfigurationsCommand extends ListBucketMetricsConfigurationsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListBucketMetricsConfigurationsRequest; + output: ListBucketMetricsConfigurationsOutput; + }; + sdk: { + input: ListBucketMetricsConfigurationsCommandInput; + output: ListBucketMetricsConfigurationsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/ListBucketsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListBucketsCommand.d.ts new file mode 100644 index 00000000..ca12226f --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListBucketsCommand.d.ts @@ -0,0 +1,130 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListBucketsOutput, ListBucketsRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListBucketsCommand}. + */ +export interface ListBucketsCommandInput extends ListBucketsRequest { +} +/** + * @public + * + * The output of {@link ListBucketsCommand}. + */ +export interface ListBucketsCommandOutput extends ListBucketsOutput, __MetadataBearer { +} +declare const ListBucketsCommand_base: { + new (input: ListBucketsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListBucketsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Returns a list of all buckets owned by the authenticated sender of the request. To grant IAM permission to use + * this operation, you must add the s3:ListAllMyBuckets policy action.

+ *

For information about Amazon S3 buckets, see Creating, configuring, and + * working with Amazon S3 buckets.

+ * + *

We strongly recommend using only paginated ListBuckets requests. Unpaginated ListBuckets requests are only supported for + * Amazon Web Services accounts set to the default general purpose bucket quota of 10,000. If you have an approved + * general purpose bucket quota above 10,000, you must send paginated ListBuckets requests to list your account’s buckets. + * All unpaginated ListBuckets requests will be rejected for Amazon Web Services accounts with a general purpose bucket quota + * greater than 10,000.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, ListBucketsCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, ListBucketsCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // ListBucketsRequest + * MaxBuckets: Number("int"), + * ContinuationToken: "STRING_VALUE", + * Prefix: "STRING_VALUE", + * BucketRegion: "STRING_VALUE", + * }; + * const command = new ListBucketsCommand(input); + * const response = await client.send(command); + * // { // ListBucketsOutput + * // Buckets: [ // Buckets + * // { // Bucket + * // Name: "STRING_VALUE", + * // CreationDate: new Date("TIMESTAMP"), + * // BucketRegion: "STRING_VALUE", + * // }, + * // ], + * // Owner: { // Owner + * // DisplayName: "STRING_VALUE", + * // ID: "STRING_VALUE", + * // }, + * // ContinuationToken: "STRING_VALUE", + * // Prefix: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListBucketsCommandInput - {@link ListBucketsCommandInput} + * @returns {@link ListBucketsCommandOutput} + * @see {@link ListBucketsCommandInput} for command's `input` shape. + * @see {@link ListBucketsCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To list all buckets + * ```javascript + * // The following example returns all the buckets owned by the sender of this request. + * const input = { /* empty *\/ }; + * const command = new ListBucketsCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Buckets: [ + * { + * CreationDate: "2012-02-15T21:03:02.000Z", + * Name: "examplebucket" + * }, + * { + * CreationDate: "2011-07-24T19:33:50.000Z", + * Name: "examplebucket2" + * }, + * { + * CreationDate: "2010-12-17T00:56:49.000Z", + * Name: "examplebucket3" + * } + * ], + * Owner: { + * DisplayName: "own-display-name", + * ID: "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31" + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class ListBucketsCommand extends ListBucketsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListBucketsRequest; + output: ListBucketsOutput; + }; + sdk: { + input: ListBucketsCommandInput; + output: ListBucketsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/ListDirectoryBucketsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListDirectoryBucketsCommand.d.ts new file mode 100644 index 00000000..6d5f9681 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListDirectoryBucketsCommand.d.ts @@ -0,0 +1,108 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListDirectoryBucketsOutput, ListDirectoryBucketsRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListDirectoryBucketsCommand}. + */ +export interface ListDirectoryBucketsCommandInput extends ListDirectoryBucketsRequest { +} +/** + * @public + * + * The output of {@link ListDirectoryBucketsCommand}. + */ +export interface ListDirectoryBucketsCommandOutput extends ListDirectoryBucketsOutput, __MetadataBearer { +} +declare const ListDirectoryBucketsCommand_base: { + new (input: ListDirectoryBucketsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListDirectoryBucketsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns a list of all Amazon S3 directory buckets owned by the authenticated sender of the + * request. For more information about directory buckets, see Directory buckets in the Amazon S3 User Guide.

+ * + *

+ * Directory buckets - For directory buckets, you must make requests for this API operation to the Regional endpoint. These endpoints support path-style requests in the format https://s3express-control.region-code.amazonaws.com/bucket-name + * . Virtual-hosted-style requests aren't supported. + * For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

+ *
+ *
+ *
Permissions
+ *
+ *

You must have the s3express:ListAllMyDirectoryBuckets permission + * in an IAM identity-based policy instead of a bucket policy. Cross-account access to this API operation isn't supported. This operation can only be performed by the Amazon Web Services account that owns the resource. + * For more information about directory bucket policies and permissions, see Amazon Web Services Identity and Access Management (IAM) for S3 Express One Zone in the Amazon S3 User Guide.

+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host + * header syntax is + * s3express-control.region.amazonaws.com.

+ *
+ *
+ * + *

The BucketRegion response element is not part of the + * ListDirectoryBuckets Response Syntax.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, ListDirectoryBucketsCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, ListDirectoryBucketsCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // ListDirectoryBucketsRequest + * ContinuationToken: "STRING_VALUE", + * MaxDirectoryBuckets: Number("int"), + * }; + * const command = new ListDirectoryBucketsCommand(input); + * const response = await client.send(command); + * // { // ListDirectoryBucketsOutput + * // Buckets: [ // Buckets + * // { // Bucket + * // Name: "STRING_VALUE", + * // CreationDate: new Date("TIMESTAMP"), + * // BucketRegion: "STRING_VALUE", + * // }, + * // ], + * // ContinuationToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListDirectoryBucketsCommandInput - {@link ListDirectoryBucketsCommandInput} + * @returns {@link ListDirectoryBucketsCommandOutput} + * @see {@link ListDirectoryBucketsCommandInput} for command's `input` shape. + * @see {@link ListDirectoryBucketsCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class ListDirectoryBucketsCommand extends ListDirectoryBucketsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListDirectoryBucketsRequest; + output: ListDirectoryBucketsOutput; + }; + sdk: { + input: ListDirectoryBucketsCommandInput; + output: ListDirectoryBucketsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/ListMultipartUploadsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListMultipartUploadsCommand.d.ts new file mode 100644 index 00000000..27010639 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListMultipartUploadsCommand.d.ts @@ -0,0 +1,344 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListMultipartUploadsOutput, ListMultipartUploadsRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListMultipartUploadsCommand}. + */ +export interface ListMultipartUploadsCommandInput extends ListMultipartUploadsRequest { +} +/** + * @public + * + * The output of {@link ListMultipartUploadsCommand}. + */ +export interface ListMultipartUploadsCommandOutput extends ListMultipartUploadsOutput, __MetadataBearer { +} +declare const ListMultipartUploadsCommand_base: { + new (input: ListMultipartUploadsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListMultipartUploadsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

This operation lists in-progress multipart uploads in a bucket. An in-progress multipart + * upload is a multipart upload that has been initiated by the + * CreateMultipartUpload request, but has not yet been completed or + * aborted.

+ * + *

+ * Directory buckets - If multipart uploads in + * a directory bucket are in progress, you can't delete the bucket until all the + * in-progress multipart uploads are aborted or completed. To delete these in-progress + * multipart uploads, use the ListMultipartUploads operation to list the + * in-progress multipart uploads in the bucket and use the + * AbortMultipartUpload operation to abort all the in-progress multipart + * uploads.

+ *
+ *

The ListMultipartUploads operation returns a maximum of 1,000 multipart + * uploads in the response. The limit of 1,000 multipart uploads is also the default value. + * You can further limit the number of uploads in a response by specifying the + * max-uploads request parameter. If there are more than 1,000 multipart + * uploads that satisfy your ListMultipartUploads request, the response returns + * an IsTruncated element with the value of true, a + * NextKeyMarker element, and a NextUploadIdMarker element. To + * list the remaining multipart uploads, you need to make subsequent + * ListMultipartUploads requests. In these requests, include two query + * parameters: key-marker and upload-id-marker. Set the value of + * key-marker to the NextKeyMarker value from the previous + * response. Similarly, set the value of upload-id-marker to the + * NextUploadIdMarker value from the previous response.

+ * + *

+ * Directory buckets - The + * upload-id-marker element and the NextUploadIdMarker element + * aren't supported by directory buckets. To list the additional multipart uploads, you + * only need to set the value of key-marker to the NextKeyMarker + * value from the previous response.

+ *
+ *

For more information about multipart uploads, see Uploading Objects Using Multipart + * Upload in the Amazon S3 User Guide.

+ * + *

+ * Directory buckets - + * For directory buckets, you must make requests for this API operation to the Zonal endpoint. These endpoints support virtual-hosted-style requests in the format https://amzn-s3-demo-bucket.s3express-zone-id.region-code.amazonaws.com/key-name + * . Path-style requests are not supported. For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

+ *
+ *
+ *
Permissions
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - For + * information about permissions required to use the multipart upload API, see + * Multipart Upload and + * Permissions in the Amazon S3 User Guide.

    + *
  • + *
  • + *

    + * Directory bucket permissions - To grant access to this API operation on a directory bucket, we recommend that you use the + * CreateSession + * API operation for session-based authorization. Specifically, you grant the s3express:CreateSession permission to the directory bucket in a bucket policy or an IAM identity-based policy. Then, you make the CreateSession API call on the bucket to obtain a session token. With the session token in your request header, you can make API requests to this operation. After the session token expires, you make another CreateSession API call to generate a new session token for use. + * Amazon Web Services CLI or SDKs create session and refresh the session token automatically to avoid service interruptions when a session expires. For more information about authorization, see + * CreateSession + * .

    + *
  • + *
+ *
+ *
Sorting of multipart uploads in response
+ *
+ *
    + *
  • + *

    + * General purpose bucket - In the + * ListMultipartUploads response, the multipart uploads are + * sorted based on two criteria:

    + *
      + *
    • + *

      Key-based sorting - Multipart uploads are initially sorted + * in ascending order based on their object keys.

      + *
    • + *
    • + *

      Time-based sorting - For uploads that share the same object + * key, they are further sorted in ascending order based on the upload + * initiation time. Among uploads with the same key, the one that was + * initiated first will appear before the ones that were initiated + * later.

      + *
    • + *
    + *
  • + *
  • + *

    + * Directory bucket - In the + * ListMultipartUploads response, the multipart uploads aren't + * sorted lexicographically based on the object keys. + * + *

    + *
  • + *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is + * Bucket-name.s3express-zone-id.region-code.amazonaws.com.

+ *
+ *
+ *

The following operations are related to ListMultipartUploads:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, ListMultipartUploadsCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, ListMultipartUploadsCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // ListMultipartUploadsRequest + * Bucket: "STRING_VALUE", // required + * Delimiter: "STRING_VALUE", + * EncodingType: "url", + * KeyMarker: "STRING_VALUE", + * MaxUploads: Number("int"), + * Prefix: "STRING_VALUE", + * UploadIdMarker: "STRING_VALUE", + * ExpectedBucketOwner: "STRING_VALUE", + * RequestPayer: "requester", + * }; + * const command = new ListMultipartUploadsCommand(input); + * const response = await client.send(command); + * // { // ListMultipartUploadsOutput + * // Bucket: "STRING_VALUE", + * // KeyMarker: "STRING_VALUE", + * // UploadIdMarker: "STRING_VALUE", + * // NextKeyMarker: "STRING_VALUE", + * // Prefix: "STRING_VALUE", + * // Delimiter: "STRING_VALUE", + * // NextUploadIdMarker: "STRING_VALUE", + * // MaxUploads: Number("int"), + * // IsTruncated: true || false, + * // Uploads: [ // MultipartUploadList + * // { // MultipartUpload + * // UploadId: "STRING_VALUE", + * // Key: "STRING_VALUE", + * // Initiated: new Date("TIMESTAMP"), + * // StorageClass: "STANDARD" || "REDUCED_REDUNDANCY" || "STANDARD_IA" || "ONEZONE_IA" || "INTELLIGENT_TIERING" || "GLACIER" || "DEEP_ARCHIVE" || "OUTPOSTS" || "GLACIER_IR" || "SNOW" || "EXPRESS_ONEZONE", + * // Owner: { // Owner + * // DisplayName: "STRING_VALUE", + * // ID: "STRING_VALUE", + * // }, + * // Initiator: { // Initiator + * // ID: "STRING_VALUE", + * // DisplayName: "STRING_VALUE", + * // }, + * // ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * // ChecksumType: "COMPOSITE" || "FULL_OBJECT", + * // }, + * // ], + * // CommonPrefixes: [ // CommonPrefixList + * // { // CommonPrefix + * // Prefix: "STRING_VALUE", + * // }, + * // ], + * // EncodingType: "url", + * // RequestCharged: "requester", + * // }; + * + * ``` + * + * @param ListMultipartUploadsCommandInput - {@link ListMultipartUploadsCommandInput} + * @returns {@link ListMultipartUploadsCommandOutput} + * @see {@link ListMultipartUploadsCommandInput} for command's `input` shape. + * @see {@link ListMultipartUploadsCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example List next set of multipart uploads when previous result is truncated + * ```javascript + * // The following example specifies the upload-id-marker and key-marker from previous truncated response to retrieve next setup of multipart uploads. + * const input = { + * Bucket: "examplebucket", + * KeyMarker: "nextkeyfrompreviousresponse", + * MaxUploads: 2, + * UploadIdMarker: "valuefrompreviousresponse" + * }; + * const command = new ListMultipartUploadsCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Bucket: "acl1", + * IsTruncated: true, + * KeyMarker: "", + * MaxUploads: 2, + * NextKeyMarker: "someobjectkey", + * NextUploadIdMarker: "examplelo91lv1iwvWpvCiJWugw2xXLPAD7Z8cJyX9.WiIRgNrdG6Ldsn.9FtS63TCl1Uf5faTB.1U5Ckcbmdw--", + * UploadIdMarker: "", + * Uploads: [ + * { + * Initiated: "2014-05-01T05:40:58.000Z", + * Initiator: { + * DisplayName: "ownder-display-name", + * ID: "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" + * }, + * Key: "JavaFile", + * Owner: { + * DisplayName: "mohanataws", + * ID: "852b113e7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" + * }, + * StorageClass: "STANDARD", + * UploadId: "gZ30jIqlUa.CInXklLQtSMJITdUnoZ1Y5GACB5UckOtspm5zbDMCkPF_qkfZzMiFZ6dksmcnqxJyIBvQMG9X9Q--" + * }, + * { + * Initiated: "2014-05-01T05:41:27.000Z", + * Initiator: { + * DisplayName: "ownder-display-name", + * ID: "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" + * }, + * Key: "JavaFile", + * Owner: { + * DisplayName: "ownder-display-name", + * ID: "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" + * }, + * StorageClass: "STANDARD", + * UploadId: "b7tZSqIlo91lv1iwvWpvCiJWugw2xXLPAD7Z8cJyX9.WiIRgNrdG6Ldsn.9FtS63TCl1Uf5faTB.1U5Ckcbmdw--" + * } + * ] + * } + * *\/ + * ``` + * + * @example To list in-progress multipart uploads on a bucket + * ```javascript + * // The following example lists in-progress multipart uploads on a specific bucket. + * const input = { + * Bucket: "examplebucket" + * }; + * const command = new ListMultipartUploadsCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Uploads: [ + * { + * Initiated: "2014-05-01T05:40:58.000Z", + * Initiator: { + * DisplayName: "display-name", + * ID: "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" + * }, + * Key: "JavaFile", + * Owner: { + * DisplayName: "display-name", + * ID: "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" + * }, + * StorageClass: "STANDARD", + * UploadId: "examplelUa.CInXklLQtSMJITdUnoZ1Y5GACB5UckOtspm5zbDMCkPF_qkfZzMiFZ6dksmcnqxJyIBvQMG9X9Q--" + * }, + * { + * Initiated: "2014-05-01T05:41:27.000Z", + * Initiator: { + * DisplayName: "display-name", + * ID: "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" + * }, + * Key: "JavaFile", + * Owner: { + * DisplayName: "display-name", + * ID: "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" + * }, + * StorageClass: "STANDARD", + * UploadId: "examplelo91lv1iwvWpvCiJWugw2xXLPAD7Z8cJyX9.WiIRgNrdG6Ldsn.9FtS63TCl1Uf5faTB.1U5Ckcbmdw--" + * } + * ] + * } + * *\/ + * ``` + * + * @public + */ +export declare class ListMultipartUploadsCommand extends ListMultipartUploadsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListMultipartUploadsRequest; + output: ListMultipartUploadsOutput; + }; + sdk: { + input: ListMultipartUploadsCommandInput; + output: ListMultipartUploadsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/ListObjectVersionsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListObjectVersionsCommand.d.ts new file mode 100644 index 00000000..c1d15044 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListObjectVersionsCommand.d.ts @@ -0,0 +1,214 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListObjectVersionsOutput, ListObjectVersionsRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListObjectVersionsCommand}. + */ +export interface ListObjectVersionsCommandInput extends ListObjectVersionsRequest { +} +/** + * @public + * + * The output of {@link ListObjectVersionsCommand}. + */ +export interface ListObjectVersionsCommandOutput extends ListObjectVersionsOutput, __MetadataBearer { +} +declare const ListObjectVersionsCommand_base: { + new (input: ListObjectVersionsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListObjectVersionsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Returns metadata about all versions of the objects in a bucket. You can also use request + * parameters as selection criteria to return metadata about a subset of all the object + * versions.

+ * + *

To use this operation, you must have permission to perform the + * s3:ListBucketVersions action. Be aware of the name difference.

+ *
+ * + *

A 200 OK response can contain valid or invalid XML. Make sure to design + * your application to parse the contents of the response and handle it + * appropriately.

+ *
+ *

To use this operation, you must have READ access to the bucket.

+ *

The following operations are related to ListObjectVersions:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, ListObjectVersionsCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, ListObjectVersionsCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // ListObjectVersionsRequest + * Bucket: "STRING_VALUE", // required + * Delimiter: "STRING_VALUE", + * EncodingType: "url", + * KeyMarker: "STRING_VALUE", + * MaxKeys: Number("int"), + * Prefix: "STRING_VALUE", + * VersionIdMarker: "STRING_VALUE", + * ExpectedBucketOwner: "STRING_VALUE", + * RequestPayer: "requester", + * OptionalObjectAttributes: [ // OptionalObjectAttributesList + * "RestoreStatus", + * ], + * }; + * const command = new ListObjectVersionsCommand(input); + * const response = await client.send(command); + * // { // ListObjectVersionsOutput + * // IsTruncated: true || false, + * // KeyMarker: "STRING_VALUE", + * // VersionIdMarker: "STRING_VALUE", + * // NextKeyMarker: "STRING_VALUE", + * // NextVersionIdMarker: "STRING_VALUE", + * // Versions: [ // ObjectVersionList + * // { // ObjectVersion + * // ETag: "STRING_VALUE", + * // ChecksumAlgorithm: [ // ChecksumAlgorithmList + * // "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * // ], + * // ChecksumType: "COMPOSITE" || "FULL_OBJECT", + * // Size: Number("long"), + * // StorageClass: "STANDARD", + * // Key: "STRING_VALUE", + * // VersionId: "STRING_VALUE", + * // IsLatest: true || false, + * // LastModified: new Date("TIMESTAMP"), + * // Owner: { // Owner + * // DisplayName: "STRING_VALUE", + * // ID: "STRING_VALUE", + * // }, + * // RestoreStatus: { // RestoreStatus + * // IsRestoreInProgress: true || false, + * // RestoreExpiryDate: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // DeleteMarkers: [ // DeleteMarkers + * // { // DeleteMarkerEntry + * // Owner: { + * // DisplayName: "STRING_VALUE", + * // ID: "STRING_VALUE", + * // }, + * // Key: "STRING_VALUE", + * // VersionId: "STRING_VALUE", + * // IsLatest: true || false, + * // LastModified: new Date("TIMESTAMP"), + * // }, + * // ], + * // Name: "STRING_VALUE", + * // Prefix: "STRING_VALUE", + * // Delimiter: "STRING_VALUE", + * // MaxKeys: Number("int"), + * // CommonPrefixes: [ // CommonPrefixList + * // { // CommonPrefix + * // Prefix: "STRING_VALUE", + * // }, + * // ], + * // EncodingType: "url", + * // RequestCharged: "requester", + * // }; + * + * ``` + * + * @param ListObjectVersionsCommandInput - {@link ListObjectVersionsCommandInput} + * @returns {@link ListObjectVersionsCommandOutput} + * @see {@link ListObjectVersionsCommandInput} for command's `input` shape. + * @see {@link ListObjectVersionsCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To list object versions + * ```javascript + * // The following example returns versions of an object with specific key name prefix. + * const input = { + * Bucket: "examplebucket", + * Prefix: "HappyFace.jpg" + * }; + * const command = new ListObjectVersionsCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Versions: [ + * { + * ETag: `"6805f2cfc46c0f04559748bb039d69ae"`, + * IsLatest: true, + * Key: "HappyFace.jpg", + * LastModified: "2016-12-15T01:19:41.000Z", + * Owner: { + * DisplayName: "owner-display-name", + * ID: "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" + * }, + * Size: 3191, + * StorageClass: "STANDARD", + * VersionId: "null" + * }, + * { + * ETag: `"6805f2cfc46c0f04559748bb039d69ae"`, + * IsLatest: false, + * Key: "HappyFace.jpg", + * LastModified: "2016-12-13T00:58:26.000Z", + * Owner: { + * DisplayName: "owner-display-name", + * ID: "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" + * }, + * Size: 3191, + * StorageClass: "STANDARD", + * VersionId: "PHtexPGjH2y.zBgT8LmB7wwLI2mpbz.k" + * } + * ] + * } + * *\/ + * ``` + * + * @public + */ +export declare class ListObjectVersionsCommand extends ListObjectVersionsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListObjectVersionsRequest; + output: ListObjectVersionsOutput; + }; + sdk: { + input: ListObjectVersionsCommandInput; + output: ListObjectVersionsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/ListObjectsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListObjectsCommand.d.ts new file mode 100644 index 00000000..7cf87776 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListObjectsCommand.d.ts @@ -0,0 +1,197 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListObjectsOutput, ListObjectsRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListObjectsCommand}. + */ +export interface ListObjectsCommandInput extends ListObjectsRequest { +} +/** + * @public + * + * The output of {@link ListObjectsCommand}. + */ +export interface ListObjectsCommandOutput extends ListObjectsOutput, __MetadataBearer { +} +declare const ListObjectsCommand_base: { + new (input: ListObjectsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListObjectsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Returns some or all (up to 1,000) of the objects in a bucket. You can use the request + * parameters as selection criteria to return a subset of the objects in a bucket. A 200 OK + * response can contain valid or invalid XML. Be sure to design your application to parse the + * contents of the response and handle it appropriately.

+ * + *

This action has been revised. We recommend that you use the newer version, ListObjectsV2, when developing applications. For backward compatibility, + * Amazon S3 continues to support ListObjects.

+ *
+ *

The following operations are related to ListObjects:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, ListObjectsCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, ListObjectsCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // ListObjectsRequest + * Bucket: "STRING_VALUE", // required + * Delimiter: "STRING_VALUE", + * EncodingType: "url", + * Marker: "STRING_VALUE", + * MaxKeys: Number("int"), + * Prefix: "STRING_VALUE", + * RequestPayer: "requester", + * ExpectedBucketOwner: "STRING_VALUE", + * OptionalObjectAttributes: [ // OptionalObjectAttributesList + * "RestoreStatus", + * ], + * }; + * const command = new ListObjectsCommand(input); + * const response = await client.send(command); + * // { // ListObjectsOutput + * // IsTruncated: true || false, + * // Marker: "STRING_VALUE", + * // NextMarker: "STRING_VALUE", + * // Contents: [ // ObjectList + * // { // Object + * // Key: "STRING_VALUE", + * // LastModified: new Date("TIMESTAMP"), + * // ETag: "STRING_VALUE", + * // ChecksumAlgorithm: [ // ChecksumAlgorithmList + * // "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * // ], + * // ChecksumType: "COMPOSITE" || "FULL_OBJECT", + * // Size: Number("long"), + * // StorageClass: "STANDARD" || "REDUCED_REDUNDANCY" || "GLACIER" || "STANDARD_IA" || "ONEZONE_IA" || "INTELLIGENT_TIERING" || "DEEP_ARCHIVE" || "OUTPOSTS" || "GLACIER_IR" || "SNOW" || "EXPRESS_ONEZONE", + * // Owner: { // Owner + * // DisplayName: "STRING_VALUE", + * // ID: "STRING_VALUE", + * // }, + * // RestoreStatus: { // RestoreStatus + * // IsRestoreInProgress: true || false, + * // RestoreExpiryDate: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // Name: "STRING_VALUE", + * // Prefix: "STRING_VALUE", + * // Delimiter: "STRING_VALUE", + * // MaxKeys: Number("int"), + * // CommonPrefixes: [ // CommonPrefixList + * // { // CommonPrefix + * // Prefix: "STRING_VALUE", + * // }, + * // ], + * // EncodingType: "url", + * // RequestCharged: "requester", + * // }; + * + * ``` + * + * @param ListObjectsCommandInput - {@link ListObjectsCommandInput} + * @returns {@link ListObjectsCommandOutput} + * @see {@link ListObjectsCommandInput} for command's `input` shape. + * @see {@link ListObjectsCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link NoSuchBucket} (client fault) + *

The specified bucket does not exist.

+ * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To list objects in a bucket + * ```javascript + * // The following example list two objects in a bucket. + * const input = { + * Bucket: "examplebucket", + * MaxKeys: 2 + * }; + * const command = new ListObjectsCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Contents: [ + * { + * ETag: `"70ee1738b6b21e2c8a43f3a5ab0eee71"`, + * Key: "example1.jpg", + * LastModified: "2014-11-21T19:40:05.000Z", + * Owner: { + * DisplayName: "myname", + * ID: "12345example25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" + * }, + * Size: 11, + * StorageClass: "STANDARD" + * }, + * { + * ETag: `"9c8af9a76df052144598c115ef33e511"`, + * Key: "example2.jpg", + * LastModified: "2013-11-15T01:10:49.000Z", + * Owner: { + * DisplayName: "myname", + * ID: "12345example25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" + * }, + * Size: 713193, + * StorageClass: "STANDARD" + * } + * ], + * NextMarker: "eyJNYXJrZXIiOiBudWxsLCAiYm90b190cnVuY2F0ZV9hbW91bnQiOiAyfQ==" + * } + * *\/ + * ``` + * + * @public + */ +export declare class ListObjectsCommand extends ListObjectsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListObjectsRequest; + output: ListObjectsOutput; + }; + sdk: { + input: ListObjectsCommandInput; + output: ListObjectsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/ListObjectsV2Command.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListObjectsV2Command.d.ts new file mode 100644 index 00000000..895195bb --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListObjectsV2Command.d.ts @@ -0,0 +1,262 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListObjectsV2Output, ListObjectsV2Request } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListObjectsV2Command}. + */ +export interface ListObjectsV2CommandInput extends ListObjectsV2Request { +} +/** + * @public + * + * The output of {@link ListObjectsV2Command}. + */ +export interface ListObjectsV2CommandOutput extends ListObjectsV2Output, __MetadataBearer { +} +declare const ListObjectsV2Command_base: { + new (input: ListObjectsV2CommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListObjectsV2CommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns some or all (up to 1,000) of the objects in a bucket with each request. You can + * use the request parameters as selection criteria to return a subset of the objects in a + * bucket. A 200 OK response can contain valid or invalid XML. Make sure to + * design your application to parse the contents of the response and handle it appropriately. + * For more information about listing objects, see Listing object keys + * programmatically in the Amazon S3 User Guide. To get a list of + * your buckets, see ListBuckets.

+ * + *
    + *
  • + *

    + * General purpose bucket - For general purpose buckets, + * ListObjectsV2 doesn't return prefixes that are related only to + * in-progress multipart uploads.

    + *
  • + *
  • + *

    + * Directory buckets - For + * directory buckets, ListObjectsV2 response includes the prefixes that + * are related only to in-progress multipart uploads.

    + *
  • + *
  • + *

    + * Directory buckets - + * For directory buckets, you must make requests for this API operation to the Zonal endpoint. These endpoints support virtual-hosted-style requests in the format https://amzn-s3-demo-bucket.s3express-zone-id.region-code.amazonaws.com/key-name + * . Path-style requests are not supported. For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

    + *
  • + *
+ *
+ *
+ *
Permissions
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - To + * use this operation, you must have READ access to the bucket. You must have + * permission to perform the s3:ListBucket action. The bucket + * owner has this permission by default and can grant this permission to + * others. For more information about permissions, see Permissions Related to Bucket Subresource Operations and Managing Access + * Permissions to Your Amazon S3 Resources in the + * Amazon S3 User Guide.

    + *
  • + *
  • + *

    + * Directory bucket permissions - To grant access to this API operation on a directory bucket, we recommend that you use the + * CreateSession + * API operation for session-based authorization. Specifically, you grant the s3express:CreateSession permission to the directory bucket in a bucket policy or an IAM identity-based policy. Then, you make the CreateSession API call on the bucket to obtain a session token. With the session token in your request header, you can make API requests to this operation. After the session token expires, you make another CreateSession API call to generate a new session token for use. + * Amazon Web Services CLI or SDKs create session and refresh the session token automatically to avoid service interruptions when a session expires. For more information about authorization, see + * CreateSession + * .

    + *
  • + *
+ *
+ *
Sorting order of returned objects
+ *
+ *
    + *
  • + *

    + * General purpose bucket - For + * general purpose buckets, ListObjectsV2 returns objects in + * lexicographical order based on their key names.

    + *
  • + *
  • + *

    + * Directory bucket - For + * directory buckets, ListObjectsV2 does not return objects in + * lexicographical order.

    + *
  • + *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is + * Bucket-name.s3express-zone-id.region-code.amazonaws.com.

+ *
+ *
+ * + *

This section describes the latest revision of this action. We recommend that you use + * this revised API operation for application development. For backward compatibility, Amazon S3 + * continues to support the prior version of this API operation, ListObjects.

+ *
+ *

The following operations are related to ListObjectsV2:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, ListObjectsV2Command } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, ListObjectsV2Command } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // ListObjectsV2Request + * Bucket: "STRING_VALUE", // required + * Delimiter: "STRING_VALUE", + * EncodingType: "url", + * MaxKeys: Number("int"), + * Prefix: "STRING_VALUE", + * ContinuationToken: "STRING_VALUE", + * FetchOwner: true || false, + * StartAfter: "STRING_VALUE", + * RequestPayer: "requester", + * ExpectedBucketOwner: "STRING_VALUE", + * OptionalObjectAttributes: [ // OptionalObjectAttributesList + * "RestoreStatus", + * ], + * }; + * const command = new ListObjectsV2Command(input); + * const response = await client.send(command); + * // { // ListObjectsV2Output + * // IsTruncated: true || false, + * // Contents: [ // ObjectList + * // { // Object + * // Key: "STRING_VALUE", + * // LastModified: new Date("TIMESTAMP"), + * // ETag: "STRING_VALUE", + * // ChecksumAlgorithm: [ // ChecksumAlgorithmList + * // "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * // ], + * // ChecksumType: "COMPOSITE" || "FULL_OBJECT", + * // Size: Number("long"), + * // StorageClass: "STANDARD" || "REDUCED_REDUNDANCY" || "GLACIER" || "STANDARD_IA" || "ONEZONE_IA" || "INTELLIGENT_TIERING" || "DEEP_ARCHIVE" || "OUTPOSTS" || "GLACIER_IR" || "SNOW" || "EXPRESS_ONEZONE", + * // Owner: { // Owner + * // DisplayName: "STRING_VALUE", + * // ID: "STRING_VALUE", + * // }, + * // RestoreStatus: { // RestoreStatus + * // IsRestoreInProgress: true || false, + * // RestoreExpiryDate: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // Name: "STRING_VALUE", + * // Prefix: "STRING_VALUE", + * // Delimiter: "STRING_VALUE", + * // MaxKeys: Number("int"), + * // CommonPrefixes: [ // CommonPrefixList + * // { // CommonPrefix + * // Prefix: "STRING_VALUE", + * // }, + * // ], + * // EncodingType: "url", + * // KeyCount: Number("int"), + * // ContinuationToken: "STRING_VALUE", + * // NextContinuationToken: "STRING_VALUE", + * // StartAfter: "STRING_VALUE", + * // RequestCharged: "requester", + * // }; + * + * ``` + * + * @param ListObjectsV2CommandInput - {@link ListObjectsV2CommandInput} + * @returns {@link ListObjectsV2CommandOutput} + * @see {@link ListObjectsV2CommandInput} for command's `input` shape. + * @see {@link ListObjectsV2CommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link NoSuchBucket} (client fault) + *

The specified bucket does not exist.

+ * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To get object list + * ```javascript + * // The following example retrieves object list. The request specifies max keys to limit response to include only 2 object keys. + * const input = { + * Bucket: "DOC-EXAMPLE-BUCKET", + * MaxKeys: 2 + * }; + * const command = new ListObjectsV2Command(input); + * const response = await client.send(command); + * /* response is + * { + * Contents: [ + * { + * ETag: `"70ee1738b6b21e2c8a43f3a5ab0eee71"`, + * Key: "happyface.jpg", + * LastModified: "2014-11-21T19:40:05.000Z", + * Size: 11, + * StorageClass: "STANDARD" + * }, + * { + * ETag: `"becf17f89c30367a9a44495d62ed521a-1"`, + * Key: "test.jpg", + * LastModified: "2014-05-02T04:51:50.000Z", + * Size: 4192256, + * StorageClass: "STANDARD" + * } + * ], + * IsTruncated: true, + * KeyCount: 2, + * MaxKeys: 2, + * Name: "DOC-EXAMPLE-BUCKET", + * NextContinuationToken: "1w41l63U0xa8q7smH50vCxyTQqdxo69O3EmK28Bi5PcROI4wI/EyIJg==", + * Prefix: "" + * } + * *\/ + * ``` + * + * @public + */ +export declare class ListObjectsV2Command extends ListObjectsV2Command_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListObjectsV2Request; + output: ListObjectsV2Output; + }; + sdk: { + input: ListObjectsV2CommandInput; + output: ListObjectsV2CommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/ListPartsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListPartsCommand.d.ts new file mode 100644 index 00000000..1c838442 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/ListPartsCommand.d.ts @@ -0,0 +1,240 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListPartsOutput, ListPartsRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListPartsCommand}. + */ +export interface ListPartsCommandInput extends ListPartsRequest { +} +/** + * @public + * + * The output of {@link ListPartsCommand}. + */ +export interface ListPartsCommandOutput extends ListPartsOutput, __MetadataBearer { +} +declare const ListPartsCommand_base: { + new (input: ListPartsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListPartsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Lists the parts that have been uploaded for a specific multipart upload.

+ *

To use this operation, you must provide the upload ID in the request. You + * obtain this uploadID by sending the initiate multipart upload request through CreateMultipartUpload.

+ *

The ListParts request returns a maximum of 1,000 uploaded parts. The limit + * of 1,000 parts is also the default value. You can restrict the number of parts in a + * response by specifying the max-parts request parameter. If your multipart + * upload consists of more than 1,000 parts, the response returns an IsTruncated + * field with the value of true, and a NextPartNumberMarker element. + * To list remaining uploaded parts, in subsequent ListParts requests, include + * the part-number-marker query string parameter and set its value to the + * NextPartNumberMarker field value from the previous response.

+ *

For more information on multipart uploads, see Uploading Objects Using Multipart + * Upload in the Amazon S3 User Guide.

+ * + *

+ * Directory buckets - + * For directory buckets, you must make requests for this API operation to the Zonal endpoint. These endpoints support virtual-hosted-style requests in the format https://amzn-s3-demo-bucket.s3express-zone-id.region-code.amazonaws.com/key-name + * . Path-style requests are not supported. For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

+ *
+ *
+ *
Permissions
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - For + * information about permissions required to use the multipart upload API, see + * Multipart Upload and + * Permissions in the Amazon S3 User Guide.

    + *

    If the upload was created using server-side encryption with Key Management Service + * (KMS) keys (SSE-KMS) or dual-layer server-side encryption with + * Amazon Web Services KMS keys (DSSE-KMS), you must have permission to the + * kms:Decrypt action for the ListParts request to + * succeed.

    + *
  • + *
  • + *

    + * Directory bucket permissions - To grant access to this API operation on a directory bucket, we recommend that you use the + * CreateSession + * API operation for session-based authorization. Specifically, you grant the s3express:CreateSession permission to the directory bucket in a bucket policy or an IAM identity-based policy. Then, you make the CreateSession API call on the bucket to obtain a session token. With the session token in your request header, you can make API requests to this operation. After the session token expires, you make another CreateSession API call to generate a new session token for use. + * Amazon Web Services CLI or SDKs create session and refresh the session token automatically to avoid service interruptions when a session expires. For more information about authorization, see + * CreateSession + * .

    + *
  • + *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is + * Bucket-name.s3express-zone-id.region-code.amazonaws.com.

+ *
+ *
+ *

The following operations are related to ListParts:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, ListPartsCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, ListPartsCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // ListPartsRequest + * Bucket: "STRING_VALUE", // required + * Key: "STRING_VALUE", // required + * MaxParts: Number("int"), + * PartNumberMarker: "STRING_VALUE", + * UploadId: "STRING_VALUE", // required + * RequestPayer: "requester", + * ExpectedBucketOwner: "STRING_VALUE", + * SSECustomerAlgorithm: "STRING_VALUE", + * SSECustomerKey: "STRING_VALUE", + * SSECustomerKeyMD5: "STRING_VALUE", + * }; + * const command = new ListPartsCommand(input); + * const response = await client.send(command); + * // { // ListPartsOutput + * // AbortDate: new Date("TIMESTAMP"), + * // AbortRuleId: "STRING_VALUE", + * // Bucket: "STRING_VALUE", + * // Key: "STRING_VALUE", + * // UploadId: "STRING_VALUE", + * // PartNumberMarker: "STRING_VALUE", + * // NextPartNumberMarker: "STRING_VALUE", + * // MaxParts: Number("int"), + * // IsTruncated: true || false, + * // Parts: [ // Parts + * // { // Part + * // PartNumber: Number("int"), + * // LastModified: new Date("TIMESTAMP"), + * // ETag: "STRING_VALUE", + * // Size: Number("long"), + * // ChecksumCRC32: "STRING_VALUE", + * // ChecksumCRC32C: "STRING_VALUE", + * // ChecksumCRC64NVME: "STRING_VALUE", + * // ChecksumSHA1: "STRING_VALUE", + * // ChecksumSHA256: "STRING_VALUE", + * // }, + * // ], + * // Initiator: { // Initiator + * // ID: "STRING_VALUE", + * // DisplayName: "STRING_VALUE", + * // }, + * // Owner: { // Owner + * // DisplayName: "STRING_VALUE", + * // ID: "STRING_VALUE", + * // }, + * // StorageClass: "STANDARD" || "REDUCED_REDUNDANCY" || "STANDARD_IA" || "ONEZONE_IA" || "INTELLIGENT_TIERING" || "GLACIER" || "DEEP_ARCHIVE" || "OUTPOSTS" || "GLACIER_IR" || "SNOW" || "EXPRESS_ONEZONE", + * // RequestCharged: "requester", + * // ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * // ChecksumType: "COMPOSITE" || "FULL_OBJECT", + * // }; + * + * ``` + * + * @param ListPartsCommandInput - {@link ListPartsCommandInput} + * @returns {@link ListPartsCommandOutput} + * @see {@link ListPartsCommandInput} for command's `input` shape. + * @see {@link ListPartsCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To list parts of a multipart upload. + * ```javascript + * // The following example lists parts uploaded for a specific multipart upload. + * const input = { + * Bucket: "examplebucket", + * Key: "bigobject", + * UploadId: "example7YPBOJuoFiQ9cz4P3Pe6FIZwO4f7wN93uHsNBEw97pl5eNwzExg0LAT2dUN91cOmrEQHDsP3WA60CEg--" + * }; + * const command = new ListPartsCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Initiator: { + * DisplayName: "owner-display-name", + * ID: "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" + * }, + * Owner: { + * DisplayName: "owner-display-name", + * ID: "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" + * }, + * Parts: [ + * { + * ETag: `"d8c2eafd90c266e19ab9dcacc479f8af"`, + * LastModified: "2016-12-16T00:11:42.000Z", + * PartNumber: 1, + * Size: 26246026 + * }, + * { + * ETag: `"d8c2eafd90c266e19ab9dcacc479f8af"`, + * LastModified: "2016-12-16T00:15:01.000Z", + * PartNumber: 2, + * Size: 26246026 + * } + * ], + * StorageClass: "STANDARD" + * } + * *\/ + * ``` + * + * @public + */ +export declare class ListPartsCommand extends ListPartsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListPartsRequest; + output: ListPartsOutput; + }; + sdk: { + input: ListPartsCommandInput; + output: ListPartsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketAccelerateConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketAccelerateConfigurationCommand.d.ts new file mode 100644 index 00000000..f6c5e544 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketAccelerateConfigurationCommand.d.ts @@ -0,0 +1,116 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketAccelerateConfigurationRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutBucketAccelerateConfigurationCommand}. + */ +export interface PutBucketAccelerateConfigurationCommandInput extends PutBucketAccelerateConfigurationRequest { +} +/** + * @public + * + * The output of {@link PutBucketAccelerateConfigurationCommand}. + */ +export interface PutBucketAccelerateConfigurationCommandOutput extends __MetadataBearer { +} +declare const PutBucketAccelerateConfigurationCommand_base: { + new (input: PutBucketAccelerateConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutBucketAccelerateConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Sets the accelerate configuration of an existing bucket. Amazon S3 Transfer Acceleration is a + * bucket-level feature that enables you to perform faster data transfers to Amazon S3.

+ *

To use this operation, you must have permission to perform the + * s3:PutAccelerateConfiguration action. The bucket owner has this permission + * by default. The bucket owner can grant this permission to others. For more information + * about permissions, see Permissions Related to Bucket Subresource Operations and Managing + * Access Permissions to Your Amazon S3 Resources.

+ *

The Transfer Acceleration state of a bucket can be set to one of the following two + * values:

+ *
    + *
  • + *

    Enabled – Enables accelerated data transfers to the bucket.

    + *
  • + *
  • + *

    Suspended – Disables accelerated data transfers to the bucket.

    + *
  • + *
+ *

The GetBucketAccelerateConfiguration action returns the transfer acceleration state + * of a bucket.

+ *

After setting the Transfer Acceleration state of a bucket to Enabled, it might take up + * to thirty minutes before the data transfer rates to the bucket increase.

+ *

The name of the bucket used for Transfer Acceleration must be DNS-compliant and must + * not contain periods (".").

+ *

For more information about transfer acceleration, see Transfer + * Acceleration.

+ *

The following operations are related to + * PutBucketAccelerateConfiguration:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutBucketAccelerateConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutBucketAccelerateConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutBucketAccelerateConfigurationRequest + * Bucket: "STRING_VALUE", // required + * AccelerateConfiguration: { // AccelerateConfiguration + * Status: "Enabled" || "Suspended", + * }, + * ExpectedBucketOwner: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * }; + * const command = new PutBucketAccelerateConfigurationCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param PutBucketAccelerateConfigurationCommandInput - {@link PutBucketAccelerateConfigurationCommandInput} + * @returns {@link PutBucketAccelerateConfigurationCommandOutput} + * @see {@link PutBucketAccelerateConfigurationCommandInput} for command's `input` shape. + * @see {@link PutBucketAccelerateConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class PutBucketAccelerateConfigurationCommand extends PutBucketAccelerateConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutBucketAccelerateConfigurationRequest; + output: {}; + }; + sdk: { + input: PutBucketAccelerateConfigurationCommandInput; + output: PutBucketAccelerateConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketAclCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketAclCommand.d.ts new file mode 100644 index 00000000..54366dc6 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketAclCommand.d.ts @@ -0,0 +1,312 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketAclRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutBucketAclCommand}. + */ +export interface PutBucketAclCommandInput extends PutBucketAclRequest { +} +/** + * @public + * + * The output of {@link PutBucketAclCommand}. + */ +export interface PutBucketAclCommandOutput extends __MetadataBearer { +} +declare const PutBucketAclCommand_base: { + new (input: PutBucketAclCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutBucketAclCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Sets the permissions on an existing bucket using access control lists (ACL). For more + * information, see Using ACLs. To set the ACL of a + * bucket, you must have the WRITE_ACP permission.

+ *

You can use one of the following two ways to set a bucket's permissions:

+ *
    + *
  • + *

    Specify the ACL in the request body

    + *
  • + *
  • + *

    Specify permissions using request headers

    + *
  • + *
+ * + *

You cannot specify access permission using both the body and the request + * headers.

+ *
+ *

Depending on your application needs, you may choose to set the ACL on a bucket using + * either the request body or the headers. For example, if you have an existing application + * that updates a bucket ACL using the request body, then you can continue to use that + * approach.

+ * + *

If your bucket uses the bucket owner enforced setting for S3 Object Ownership, ACLs + * are disabled and no longer affect permissions. You must use policies to grant access to + * your bucket and the objects in it. Requests to set ACLs or update ACLs fail and return + * the AccessControlListNotSupported error code. Requests to read ACLs are + * still supported. For more information, see Controlling object + * ownership in the Amazon S3 User Guide.

+ *
+ *
+ *
Permissions
+ *
+ *

You can set access permissions by using one of the following methods:

+ *
    + *
  • + *

    Specify a canned ACL with the x-amz-acl request header. Amazon S3 + * supports a set of predefined ACLs, known as canned + * ACLs. Each canned ACL has a predefined set of grantees and + * permissions. Specify the canned ACL name as the value of + * x-amz-acl. If you use this header, you cannot use other + * access control-specific headers in your request. For more information, see + * Canned + * ACL.

    + *
  • + *
  • + *

    Specify access permissions explicitly with the + * x-amz-grant-read, x-amz-grant-read-acp, + * x-amz-grant-write-acp, and + * x-amz-grant-full-control headers. When using these headers, + * you specify explicit access permissions and grantees (Amazon Web Services accounts or Amazon S3 + * groups) who will receive the permission. If you use these ACL-specific + * headers, you cannot use the x-amz-acl header to set a canned + * ACL. These parameters map to the set of permissions that Amazon S3 supports in an + * ACL. For more information, see Access Control List (ACL) + * Overview.

    + *

    You specify each grantee as a type=value pair, where the type is one of + * the following:

    + *
      + *
    • + *

      + * id – if the value specified is the canonical user ID + * of an Amazon Web Services account

      + *
    • + *
    • + *

      + * uri – if you are granting permissions to a predefined + * group

      + *
    • + *
    • + *

      + * emailAddress – if the value specified is the email + * address of an Amazon Web Services account

      + * + *

      Using email addresses to specify a grantee is only supported in the following Amazon Web Services Regions:

      + *
        + *
      • + *

        US East (N. Virginia)

        + *
      • + *
      • + *

        US West (N. California)

        + *
      • + *
      • + *

        US West (Oregon)

        + *
      • + *
      • + *

        Asia Pacific (Singapore)

        + *
      • + *
      • + *

        Asia Pacific (Sydney)

        + *
      • + *
      • + *

        Asia Pacific (Tokyo)

        + *
      • + *
      • + *

        Europe (Ireland)

        + *
      • + *
      • + *

        South America (São Paulo)

        + *
      • + *
      + *

      For a list of all the Amazon S3 supported Regions and endpoints, see Regions and Endpoints in the Amazon Web Services General Reference.

      + *
      + *
    • + *
    + *

    For example, the following x-amz-grant-write header grants + * create, overwrite, and delete objects permission to LogDelivery group + * predefined by Amazon S3 and two Amazon Web Services accounts identified by their email + * addresses.

    + *

    + * x-amz-grant-write: + * uri="http://acs.amazonaws.com/groups/s3/LogDelivery", id="111122223333", + * id="555566667777" + *

    + *
  • + *
+ *

You can use either a canned ACL or specify access permissions explicitly. You + * cannot do both.

+ *
+ *
Grantee Values
+ *
+ *

You can specify the person (grantee) to whom you're assigning access rights + * (using request elements) in the following ways:

+ *
    + *
  • + *

    By the person's ID:

    + *

    + * <>ID<><>GranteesEmail<> + * + *

    + *

    DisplayName is optional and ignored in the request

    + *
  • + *
  • + *

    By URI:

    + *

    + * <>http://acs.amazonaws.com/groups/global/AuthenticatedUsers<> + *

    + *
  • + *
  • + *

    By Email address:

    + *

    + * <>Grantees@email.com<>& + *

    + *

    The grantee is resolved to the CanonicalUser and, in a response to a GET + * Object acl request, appears as the CanonicalUser.

    + * + *

    Using email addresses to specify a grantee is only supported in the following Amazon Web Services Regions:

    + *
      + *
    • + *

      US East (N. Virginia)

      + *
    • + *
    • + *

      US West (N. California)

      + *
    • + *
    • + *

      US West (Oregon)

      + *
    • + *
    • + *

      Asia Pacific (Singapore)

      + *
    • + *
    • + *

      Asia Pacific (Sydney)

      + *
    • + *
    • + *

      Asia Pacific (Tokyo)

      + *
    • + *
    • + *

      Europe (Ireland)

      + *
    • + *
    • + *

      South America (São Paulo)

      + *
    • + *
    + *

    For a list of all the Amazon S3 supported Regions and endpoints, see Regions and Endpoints in the Amazon Web Services General Reference.

    + *
    + *
  • + *
+ *
+ *
+ *

The following operations are related to PutBucketAcl:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutBucketAclCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutBucketAclCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutBucketAclRequest + * ACL: "private" || "public-read" || "public-read-write" || "authenticated-read", + * AccessControlPolicy: { // AccessControlPolicy + * Grants: [ // Grants + * { // Grant + * Grantee: { // Grantee + * DisplayName: "STRING_VALUE", + * EmailAddress: "STRING_VALUE", + * ID: "STRING_VALUE", + * URI: "STRING_VALUE", + * Type: "CanonicalUser" || "AmazonCustomerByEmail" || "Group", // required + * }, + * Permission: "FULL_CONTROL" || "WRITE" || "WRITE_ACP" || "READ" || "READ_ACP", + * }, + * ], + * Owner: { // Owner + * DisplayName: "STRING_VALUE", + * ID: "STRING_VALUE", + * }, + * }, + * Bucket: "STRING_VALUE", // required + * ContentMD5: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * GrantFullControl: "STRING_VALUE", + * GrantRead: "STRING_VALUE", + * GrantReadACP: "STRING_VALUE", + * GrantWrite: "STRING_VALUE", + * GrantWriteACP: "STRING_VALUE", + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new PutBucketAclCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param PutBucketAclCommandInput - {@link PutBucketAclCommandInput} + * @returns {@link PutBucketAclCommandOutput} + * @see {@link PutBucketAclCommandInput} for command's `input` shape. + * @see {@link PutBucketAclCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example Put bucket acl + * ```javascript + * // The following example replaces existing ACL on a bucket. The ACL grants the bucket owner (specified using the owner ID) and write permission to the LogDelivery group. Because this is a replace operation, you must specify all the grants in your request. To incrementally add or remove ACL grants, you might use the console. + * const input = { + * Bucket: "examplebucket", + * GrantFullControl: "id=examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484", + * GrantWrite: "uri=http://acs.amazonaws.com/groups/s3/LogDelivery" + * }; + * const command = new PutBucketAclCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class PutBucketAclCommand extends PutBucketAclCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutBucketAclRequest; + output: {}; + }; + sdk: { + input: PutBucketAclCommandInput; + output: PutBucketAclCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketAnalyticsConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketAnalyticsConfigurationCommand.d.ts new file mode 100644 index 00000000..c8da4335 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketAnalyticsConfigurationCommand.d.ts @@ -0,0 +1,209 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketAnalyticsConfigurationRequest } from "../models/models_0"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutBucketAnalyticsConfigurationCommand}. + */ +export interface PutBucketAnalyticsConfigurationCommandInput extends PutBucketAnalyticsConfigurationRequest { +} +/** + * @public + * + * The output of {@link PutBucketAnalyticsConfigurationCommand}. + */ +export interface PutBucketAnalyticsConfigurationCommandOutput extends __MetadataBearer { +} +declare const PutBucketAnalyticsConfigurationCommand_base: { + new (input: PutBucketAnalyticsConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutBucketAnalyticsConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Sets an analytics configuration for the bucket (specified by the analytics configuration + * ID). You can have up to 1,000 analytics configurations per bucket.

+ *

You can choose to have storage class analysis export analysis reports sent to a + * comma-separated values (CSV) flat file. See the DataExport request element. + * Reports are updated daily and are based on the object filters that you configure. When + * selecting data export, you specify a destination bucket and an optional destination prefix + * where the file is written. You can export the data to a destination bucket in a different + * account. However, the destination bucket must be in the same Region as the bucket that you + * are making the PUT analytics configuration to. For more information, see Amazon S3 + * Analytics – Storage Class Analysis.

+ * + *

You must create a bucket policy on the destination bucket where the exported file is + * written to grant permissions to Amazon S3 to write objects to the bucket. For an example + * policy, see Granting Permissions for Amazon S3 Inventory and Storage Class Analysis.

+ *
+ *

To use this operation, you must have permissions to perform the + * s3:PutAnalyticsConfiguration action. The bucket owner has this permission + * by default. The bucket owner can grant this permission to others. For more information + * about permissions, see Permissions Related to Bucket Subresource Operations and Managing + * Access Permissions to Your Amazon S3 Resources.

+ *

+ * PutBucketAnalyticsConfiguration has the following special errors:

+ *
    + *
  • + *
      + *
    • + *

      + * HTTP Error: HTTP 400 Bad Request + *

      + *
    • + *
    • + *

      + * Code: InvalidArgument + *

      + *
    • + *
    • + *

      + * Cause: Invalid argument. + *

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * HTTP Error: HTTP 400 Bad Request + *

      + *
    • + *
    • + *

      + * Code: TooManyConfigurations + *

      + *
    • + *
    • + *

      + * Cause: You are attempting to create a new configuration but have + * already reached the 1,000-configuration limit. + *

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * HTTP Error: HTTP 403 Forbidden + *

      + *
    • + *
    • + *

      + * Code: AccessDenied + *

      + *
    • + *
    • + *

      + * Cause: You are not the owner of the specified bucket, or you do + * not have the s3:PutAnalyticsConfiguration bucket permission to set the + * configuration on the bucket. + *

      + *
    • + *
    + *
  • + *
+ *

The following operations are related to + * PutBucketAnalyticsConfiguration:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutBucketAnalyticsConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutBucketAnalyticsConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutBucketAnalyticsConfigurationRequest + * Bucket: "STRING_VALUE", // required + * Id: "STRING_VALUE", // required + * AnalyticsConfiguration: { // AnalyticsConfiguration + * Id: "STRING_VALUE", // required + * Filter: { // AnalyticsFilter Union: only one key present + * Prefix: "STRING_VALUE", + * Tag: { // Tag + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * And: { // AnalyticsAndOperator + * Prefix: "STRING_VALUE", + * Tags: [ // TagSet + * { + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * ], + * }, + * }, + * StorageClassAnalysis: { // StorageClassAnalysis + * DataExport: { // StorageClassAnalysisDataExport + * OutputSchemaVersion: "V_1", // required + * Destination: { // AnalyticsExportDestination + * S3BucketDestination: { // AnalyticsS3BucketDestination + * Format: "CSV", // required + * BucketAccountId: "STRING_VALUE", + * Bucket: "STRING_VALUE", // required + * Prefix: "STRING_VALUE", + * }, + * }, + * }, + * }, + * }, + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new PutBucketAnalyticsConfigurationCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param PutBucketAnalyticsConfigurationCommandInput - {@link PutBucketAnalyticsConfigurationCommandInput} + * @returns {@link PutBucketAnalyticsConfigurationCommandOutput} + * @see {@link PutBucketAnalyticsConfigurationCommandInput} for command's `input` shape. + * @see {@link PutBucketAnalyticsConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class PutBucketAnalyticsConfigurationCommand extends PutBucketAnalyticsConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutBucketAnalyticsConfigurationRequest; + output: {}; + }; + sdk: { + input: PutBucketAnalyticsConfigurationCommandInput; + output: PutBucketAnalyticsConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketCorsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketCorsCommand.d.ts new file mode 100644 index 00000000..191a4b19 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketCorsCommand.d.ts @@ -0,0 +1,196 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketCorsRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutBucketCorsCommand}. + */ +export interface PutBucketCorsCommandInput extends PutBucketCorsRequest { +} +/** + * @public + * + * The output of {@link PutBucketCorsCommand}. + */ +export interface PutBucketCorsCommandOutput extends __MetadataBearer { +} +declare const PutBucketCorsCommand_base: { + new (input: PutBucketCorsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutBucketCorsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Sets the cors configuration for your bucket. If the configuration exists, + * Amazon S3 replaces it.

+ *

To use this operation, you must be allowed to perform the s3:PutBucketCORS + * action. By default, the bucket owner has this permission and can grant it to others.

+ *

You set this configuration on a bucket so that the bucket can service cross-origin + * requests. For example, you might want to enable a request whose origin is + * http://www.example.com to access your Amazon S3 bucket at + * my.example.bucket.com by using the browser's XMLHttpRequest + * capability.

+ *

To enable cross-origin resource sharing (CORS) on a bucket, you add the + * cors subresource to the bucket. The cors subresource is an XML + * document in which you configure rules that identify origins and the HTTP methods that can + * be executed on your bucket. The document is limited to 64 KB in size.

+ *

When Amazon S3 receives a cross-origin request (or a pre-flight OPTIONS request) against a + * bucket, it evaluates the cors configuration on the bucket and uses the first + * CORSRule rule that matches the incoming browser request to enable a + * cross-origin request. For a rule to match, the following conditions must be met:

+ *
    + *
  • + *

    The request's Origin header must match AllowedOrigin + * elements.

    + *
  • + *
  • + *

    The request method (for example, GET, PUT, HEAD, and so on) or the + * Access-Control-Request-Method header in case of a pre-flight + * OPTIONS request must be one of the AllowedMethod + * elements.

    + *
  • + *
  • + *

    Every header specified in the Access-Control-Request-Headers request + * header of a pre-flight request must match an AllowedHeader element. + *

    + *
  • + *
+ *

For more information about CORS, go to Enabling Cross-Origin Resource Sharing in + * the Amazon S3 User Guide.

+ *

The following operations are related to PutBucketCors:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutBucketCorsCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutBucketCorsCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutBucketCorsRequest + * Bucket: "STRING_VALUE", // required + * CORSConfiguration: { // CORSConfiguration + * CORSRules: [ // CORSRules // required + * { // CORSRule + * ID: "STRING_VALUE", + * AllowedHeaders: [ // AllowedHeaders + * "STRING_VALUE", + * ], + * AllowedMethods: [ // AllowedMethods // required + * "STRING_VALUE", + * ], + * AllowedOrigins: [ // AllowedOrigins // required + * "STRING_VALUE", + * ], + * ExposeHeaders: [ // ExposeHeaders + * "STRING_VALUE", + * ], + * MaxAgeSeconds: Number("int"), + * }, + * ], + * }, + * ContentMD5: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new PutBucketCorsCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param PutBucketCorsCommandInput - {@link PutBucketCorsCommandInput} + * @returns {@link PutBucketCorsCommandOutput} + * @see {@link PutBucketCorsCommandInput} for command's `input` shape. + * @see {@link PutBucketCorsCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To set cors configuration on a bucket. + * ```javascript + * // The following example enables PUT, POST, and DELETE requests from www.example.com, and enables GET requests from any domain. + * const input = { + * Bucket: "", + * CORSConfiguration: { + * CORSRules: [ + * { + * AllowedHeaders: [ + * "*" + * ], + * AllowedMethods: [ + * "PUT", + * "POST", + * "DELETE" + * ], + * AllowedOrigins: [ + * "http://www.example.com" + * ], + * ExposeHeaders: [ + * "x-amz-server-side-encryption" + * ], + * MaxAgeSeconds: 3000 + * }, + * { + * AllowedHeaders: [ + * "Authorization" + * ], + * AllowedMethods: [ + * "GET" + * ], + * AllowedOrigins: [ + * "*" + * ], + * MaxAgeSeconds: 3000 + * } + * ] + * }, + * ContentMD5: "" + * }; + * const command = new PutBucketCorsCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class PutBucketCorsCommand extends PutBucketCorsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutBucketCorsRequest; + output: {}; + }; + sdk: { + input: PutBucketCorsCommandInput; + output: PutBucketCorsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketEncryptionCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketEncryptionCommand.d.ts new file mode 100644 index 00000000..5a9f2ad2 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketEncryptionCommand.d.ts @@ -0,0 +1,210 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketEncryptionRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutBucketEncryptionCommand}. + */ +export interface PutBucketEncryptionCommandInput extends PutBucketEncryptionRequest { +} +/** + * @public + * + * The output of {@link PutBucketEncryptionCommand}. + */ +export interface PutBucketEncryptionCommandOutput extends __MetadataBearer { +} +declare const PutBucketEncryptionCommand_base: { + new (input: PutBucketEncryptionCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutBucketEncryptionCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

This operation configures default encryption and Amazon S3 Bucket Keys for an existing + * bucket.

+ * + *

+ * Directory buckets - For directory buckets, you must make requests for this API operation to the Regional endpoint. These endpoints support path-style requests in the format https://s3express-control.region-code.amazonaws.com/bucket-name + * . Virtual-hosted-style requests aren't supported. + * For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

+ *
+ *

By default, all buckets have a default encryption configuration that uses server-side + * encryption with Amazon S3 managed keys (SSE-S3).

+ * + *
    + *
  • + *

    + * General purpose buckets + *

    + *
      + *
    • + *

      You can optionally configure default encryption for a bucket by using + * server-side encryption with Key Management Service (KMS) keys (SSE-KMS) or dual-layer + * server-side encryption with Amazon Web Services KMS keys (DSSE-KMS). If you specify + * default encryption by using SSE-KMS, you can also configure Amazon S3 + * Bucket Keys. For information about the bucket default encryption + * feature, see Amazon S3 Bucket Default + * Encryption in the Amazon S3 User Guide.

      + *
    • + *
    • + *

      If you use PutBucketEncryption to set your default bucket + * encryption to SSE-KMS, you should verify that your KMS key ID + * is correct. Amazon S3 doesn't validate the KMS key ID provided in + * PutBucketEncryption requests.

      + *
    • + *
    + *
  • + *
  • + *

    + * Directory buckets - You can + * optionally configure default encryption for a bucket by using server-side + * encryption with Key Management Service (KMS) keys (SSE-KMS).

    + *
      + *
    • + *

      We recommend that the bucket's default encryption uses the desired + * encryption configuration and you don't override the bucket default + * encryption in your CreateSession requests or PUT + * object requests. Then, new objects are automatically encrypted with the + * desired encryption settings. + * For more information about the encryption overriding behaviors in directory buckets, see Specifying server-side encryption with KMS for new object uploads.

      + *
    • + *
    • + *

      Your SSE-KMS configuration can only support 1 customer managed key per directory bucket's lifetime. + * The Amazon Web Services managed key (aws/s3) isn't supported. + *

      + *
    • + *
    • + *

      S3 Bucket Keys are always enabled for GET and PUT operations in a directory bucket and can’t be disabled. S3 Bucket Keys aren't supported, when you copy SSE-KMS encrypted objects from general purpose buckets + * to directory buckets, from directory buckets to general purpose buckets, or between directory buckets, through CopyObject, UploadPartCopy, the Copy operation in Batch Operations, or + * the import jobs. In this case, Amazon S3 makes a call to KMS every time a copy request is made for a KMS-encrypted object.

      + *
    • + *
    • + *

      When you specify an KMS customer managed key for encryption in your directory bucket, only use the key ID or key ARN. The key alias format of the KMS key isn't supported.

      + *
    • + *
    • + *

      For directory buckets, if you use PutBucketEncryption to set your default bucket encryption to SSE-KMS, Amazon S3 validates the + * KMS key ID provided in PutBucketEncryption requests.

      + *
    • + *
    + *
  • + *
+ *
+ * + *

If you're specifying a customer managed KMS key, we recommend using a fully + * qualified KMS key ARN. If you use a KMS key alias instead, then KMS resolves the + * key within the requester’s account. This behavior can result in data that's encrypted + * with a KMS key that belongs to the requester, and not the bucket owner.

+ *

Also, this action requires Amazon Web Services Signature Version 4. For more information, see + * Authenticating + * Requests (Amazon Web Services Signature Version 4).

+ *
+ *
+ *
Permissions
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - The + * s3:PutEncryptionConfiguration permission is required in a + * policy. The bucket owner has this permission by default. The bucket owner + * can grant this permission to others. For more information about permissions, + * see Permissions Related to Bucket Operations and Managing Access + * Permissions to Your Amazon S3 Resources in the + * Amazon S3 User Guide.

    + *
  • + *
  • + *

    + * Directory bucket permissions - + * To grant access to this API operation, you must have the + * s3express:PutEncryptionConfiguration permission in + * an IAM identity-based policy instead of a bucket policy. Cross-account access to this API operation isn't supported. This operation can only be performed by the Amazon Web Services account that owns the resource. + * For more information about directory bucket policies and permissions, see Amazon Web Services Identity and Access Management (IAM) for S3 Express One Zone in the Amazon S3 User Guide.

    + *

    To set a directory bucket default encryption with SSE-KMS, you must also + * have the kms:GenerateDataKey and the kms:Decrypt + * permissions in IAM identity-based policies and KMS key policies for the + * target KMS key.

    + *
  • + *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is s3express-control.region-code.amazonaws.com.

+ *
+ *
+ *

The following operations are related to PutBucketEncryption:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutBucketEncryptionCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutBucketEncryptionCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutBucketEncryptionRequest + * Bucket: "STRING_VALUE", // required + * ContentMD5: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * ServerSideEncryptionConfiguration: { // ServerSideEncryptionConfiguration + * Rules: [ // ServerSideEncryptionRules // required + * { // ServerSideEncryptionRule + * ApplyServerSideEncryptionByDefault: { // ServerSideEncryptionByDefault + * SSEAlgorithm: "AES256" || "aws:kms" || "aws:kms:dsse", // required + * KMSMasterKeyID: "STRING_VALUE", + * }, + * BucketKeyEnabled: true || false, + * }, + * ], + * }, + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new PutBucketEncryptionCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param PutBucketEncryptionCommandInput - {@link PutBucketEncryptionCommandInput} + * @returns {@link PutBucketEncryptionCommandOutput} + * @see {@link PutBucketEncryptionCommandInput} for command's `input` shape. + * @see {@link PutBucketEncryptionCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class PutBucketEncryptionCommand extends PutBucketEncryptionCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutBucketEncryptionRequest; + output: {}; + }; + sdk: { + input: PutBucketEncryptionCommandInput; + output: PutBucketEncryptionCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketIntelligentTieringConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketIntelligentTieringConfigurationCommand.d.ts new file mode 100644 index 00000000..2a1a118a --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketIntelligentTieringConfigurationCommand.d.ts @@ -0,0 +1,154 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketIntelligentTieringConfigurationRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutBucketIntelligentTieringConfigurationCommand}. + */ +export interface PutBucketIntelligentTieringConfigurationCommandInput extends PutBucketIntelligentTieringConfigurationRequest { +} +/** + * @public + * + * The output of {@link PutBucketIntelligentTieringConfigurationCommand}. + */ +export interface PutBucketIntelligentTieringConfigurationCommandOutput extends __MetadataBearer { +} +declare const PutBucketIntelligentTieringConfigurationCommand_base: { + new (input: PutBucketIntelligentTieringConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutBucketIntelligentTieringConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Puts a S3 Intelligent-Tiering configuration to the specified bucket. You can have up to + * 1,000 S3 Intelligent-Tiering configurations per bucket.

+ *

The S3 Intelligent-Tiering storage class is designed to optimize storage costs by automatically moving data to the most cost-effective storage access tier, without performance impact or operational overhead. S3 Intelligent-Tiering delivers automatic cost savings in three low latency and high throughput access tiers. To get the lowest storage cost on data that can be accessed in minutes to hours, you can choose to activate additional archiving capabilities.

+ *

The S3 Intelligent-Tiering storage class is the ideal storage class for data with unknown, changing, or unpredictable access patterns, independent of object size or retention period. If the size of an object is less than 128 KB, it is not monitored and not eligible for auto-tiering. Smaller objects can be stored, but they are always charged at the Frequent Access tier rates in the S3 Intelligent-Tiering storage class.

+ *

For more information, see Storage class for automatically optimizing frequently and infrequently accessed objects.

+ *

Operations related to PutBucketIntelligentTieringConfiguration include:

+ * + * + *

You only need S3 Intelligent-Tiering enabled on a bucket if you want to automatically + * move objects stored in the S3 Intelligent-Tiering storage class to the Archive Access + * or Deep Archive Access tier.

+ *
+ *

+ * PutBucketIntelligentTieringConfiguration has the following special + * errors:

+ *
+ *
HTTP 400 Bad Request Error
+ *
+ *

+ * Code: InvalidArgument

+ *

+ * Cause: Invalid Argument

+ *
+ *
HTTP 400 Bad Request Error
+ *
+ *

+ * Code: TooManyConfigurations

+ *

+ * Cause: You are attempting to create a new configuration + * but have already reached the 1,000-configuration limit.

+ *
+ *
HTTP 403 Forbidden Error
+ *
+ *

+ * Cause: You are not the owner of the specified bucket, or + * you do not have the s3:PutIntelligentTieringConfiguration bucket + * permission to set the configuration on the bucket.

+ *
+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutBucketIntelligentTieringConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutBucketIntelligentTieringConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutBucketIntelligentTieringConfigurationRequest + * Bucket: "STRING_VALUE", // required + * Id: "STRING_VALUE", // required + * IntelligentTieringConfiguration: { // IntelligentTieringConfiguration + * Id: "STRING_VALUE", // required + * Filter: { // IntelligentTieringFilter + * Prefix: "STRING_VALUE", + * Tag: { // Tag + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * And: { // IntelligentTieringAndOperator + * Prefix: "STRING_VALUE", + * Tags: [ // TagSet + * { + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * ], + * }, + * }, + * Status: "Enabled" || "Disabled", // required + * Tierings: [ // TieringList // required + * { // Tiering + * Days: Number("int"), // required + * AccessTier: "ARCHIVE_ACCESS" || "DEEP_ARCHIVE_ACCESS", // required + * }, + * ], + * }, + * }; + * const command = new PutBucketIntelligentTieringConfigurationCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param PutBucketIntelligentTieringConfigurationCommandInput - {@link PutBucketIntelligentTieringConfigurationCommandInput} + * @returns {@link PutBucketIntelligentTieringConfigurationCommandOutput} + * @see {@link PutBucketIntelligentTieringConfigurationCommandInput} for command's `input` shape. + * @see {@link PutBucketIntelligentTieringConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class PutBucketIntelligentTieringConfigurationCommand extends PutBucketIntelligentTieringConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutBucketIntelligentTieringConfigurationRequest; + output: {}; + }; + sdk: { + input: PutBucketIntelligentTieringConfigurationCommandInput; + output: PutBucketIntelligentTieringConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketInventoryConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketInventoryConfigurationCommand.d.ts new file mode 100644 index 00000000..cbba6580 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketInventoryConfigurationCommand.d.ts @@ -0,0 +1,186 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketInventoryConfigurationRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutBucketInventoryConfigurationCommand}. + */ +export interface PutBucketInventoryConfigurationCommandInput extends PutBucketInventoryConfigurationRequest { +} +/** + * @public + * + * The output of {@link PutBucketInventoryConfigurationCommand}. + */ +export interface PutBucketInventoryConfigurationCommandOutput extends __MetadataBearer { +} +declare const PutBucketInventoryConfigurationCommand_base: { + new (input: PutBucketInventoryConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutBucketInventoryConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

This implementation of the PUT action adds an inventory configuration + * (identified by the inventory ID) to the bucket. You can have up to 1,000 inventory + * configurations per bucket.

+ *

Amazon S3 inventory generates inventories of the objects in the bucket on a daily or weekly + * basis, and the results are published to a flat file. The bucket that is inventoried is + * called the source bucket, and the bucket where the inventory flat file + * is stored is called the destination bucket. The + * destination bucket must be in the same Amazon Web Services Region as the + * source bucket.

+ *

When you configure an inventory for a source bucket, you specify + * the destination bucket where you want the inventory to be stored, and + * whether to generate the inventory daily or weekly. You can also configure what object + * metadata to include and whether to inventory all object versions or only current versions. + * For more information, see Amazon S3 Inventory in the + * Amazon S3 User Guide.

+ * + *

You must create a bucket policy on the destination bucket to + * grant permissions to Amazon S3 to write objects to the bucket in the defined location. For an + * example policy, see Granting Permissions for Amazon S3 Inventory and Storage Class Analysis.

+ *
+ *
+ *
Permissions
+ *
+ *

To use this operation, you must have permission to perform the + * s3:PutInventoryConfiguration action. The bucket owner has this + * permission by default and can grant this permission to others.

+ *

The s3:PutInventoryConfiguration permission allows a user to + * create an S3 Inventory + * report that includes all object metadata fields available and to specify the + * destination bucket to store the inventory. A user with read access to objects in + * the destination bucket can also access all object metadata fields that are + * available in the inventory report.

+ *

To restrict access to an inventory report, see Restricting access to an Amazon S3 Inventory report in the + * Amazon S3 User Guide. For more information about the metadata + * fields available in S3 Inventory, see Amazon S3 Inventory lists in the Amazon S3 User Guide. For + * more information about permissions, see Permissions related to bucket subresource operations and Identity and access management in Amazon S3 in the + * Amazon S3 User Guide.

+ *
+ *
+ *

+ * PutBucketInventoryConfiguration has the following special errors:

+ *
+ *
HTTP 400 Bad Request Error
+ *
+ *

+ * Code: InvalidArgument

+ *

+ * Cause: Invalid Argument

+ *
+ *
HTTP 400 Bad Request Error
+ *
+ *

+ * Code: TooManyConfigurations

+ *

+ * Cause: You are attempting to create a new configuration + * but have already reached the 1,000-configuration limit.

+ *
+ *
HTTP 403 Forbidden Error
+ *
+ *

+ * Cause: You are not the owner of the specified bucket, or + * you do not have the s3:PutInventoryConfiguration bucket permission to + * set the configuration on the bucket.

+ *
+ *
+ *

The following operations are related to + * PutBucketInventoryConfiguration:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutBucketInventoryConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutBucketInventoryConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutBucketInventoryConfigurationRequest + * Bucket: "STRING_VALUE", // required + * Id: "STRING_VALUE", // required + * InventoryConfiguration: { // InventoryConfiguration + * Destination: { // InventoryDestination + * S3BucketDestination: { // InventoryS3BucketDestination + * AccountId: "STRING_VALUE", + * Bucket: "STRING_VALUE", // required + * Format: "CSV" || "ORC" || "Parquet", // required + * Prefix: "STRING_VALUE", + * Encryption: { // InventoryEncryption + * SSES3: {}, + * SSEKMS: { // SSEKMS + * KeyId: "STRING_VALUE", // required + * }, + * }, + * }, + * }, + * IsEnabled: true || false, // required + * Filter: { // InventoryFilter + * Prefix: "STRING_VALUE", // required + * }, + * Id: "STRING_VALUE", // required + * IncludedObjectVersions: "All" || "Current", // required + * OptionalFields: [ // InventoryOptionalFields + * "Size" || "LastModifiedDate" || "StorageClass" || "ETag" || "IsMultipartUploaded" || "ReplicationStatus" || "EncryptionStatus" || "ObjectLockRetainUntilDate" || "ObjectLockMode" || "ObjectLockLegalHoldStatus" || "IntelligentTieringAccessTier" || "BucketKeyStatus" || "ChecksumAlgorithm" || "ObjectAccessControlList" || "ObjectOwner", + * ], + * Schedule: { // InventorySchedule + * Frequency: "Daily" || "Weekly", // required + * }, + * }, + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new PutBucketInventoryConfigurationCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param PutBucketInventoryConfigurationCommandInput - {@link PutBucketInventoryConfigurationCommandInput} + * @returns {@link PutBucketInventoryConfigurationCommandOutput} + * @see {@link PutBucketInventoryConfigurationCommandInput} for command's `input` shape. + * @see {@link PutBucketInventoryConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class PutBucketInventoryConfigurationCommand extends PutBucketInventoryConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutBucketInventoryConfigurationRequest; + output: {}; + }; + sdk: { + input: PutBucketInventoryConfigurationCommandInput; + output: PutBucketInventoryConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketLifecycleConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketLifecycleConfigurationCommand.d.ts new file mode 100644 index 00000000..aab04b85 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketLifecycleConfigurationCommand.d.ts @@ -0,0 +1,296 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketLifecycleConfigurationOutput, PutBucketLifecycleConfigurationRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutBucketLifecycleConfigurationCommand}. + */ +export interface PutBucketLifecycleConfigurationCommandInput extends PutBucketLifecycleConfigurationRequest { +} +/** + * @public + * + * The output of {@link PutBucketLifecycleConfigurationCommand}. + */ +export interface PutBucketLifecycleConfigurationCommandOutput extends PutBucketLifecycleConfigurationOutput, __MetadataBearer { +} +declare const PutBucketLifecycleConfigurationCommand_base: { + new (input: PutBucketLifecycleConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutBucketLifecycleConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates a new lifecycle configuration for the bucket or replaces an existing lifecycle + * configuration. Keep in mind that this will overwrite an existing lifecycle configuration, + * so if you want to retain any configuration details, they must be included in the new + * lifecycle configuration. For information about lifecycle configuration, see Managing + * your storage lifecycle.

+ * + *

Bucket lifecycle configuration now supports specifying a lifecycle rule using an object key name prefix, one or more object tags, object size, or any combination of these. Accordingly, this section describes the latest API. The previous version of the API supported filtering based only on an object key name prefix, which is supported for backward compatibility. + * For the related API description, see PutBucketLifecycle.

+ *
+ *
+ *
Rules
+ *
Permissions
+ *
HTTP Host header syntax
+ *
+ *

You specify the lifecycle configuration in your request body. The lifecycle + * configuration is specified as XML consisting of one or more rules. An Amazon S3 + * Lifecycle configuration can have up to 1,000 rules. This limit is not + * adjustable.

+ *

Bucket lifecycle configuration supports specifying a lifecycle rule using an + * object key name prefix, one or more object tags, object size, or any combination + * of these. Accordingly, this section describes the latest API. The previous version + * of the API supported filtering based only on an object key name prefix, which is + * supported for backward compatibility for general purpose buckets. For the related + * API description, see PutBucketLifecycle.

+ * + *

Lifecyle configurations for directory buckets only support expiring objects and + * cancelling multipart uploads. Expiring of versioned objects,transitions and tag + * filters are not supported.

+ *
+ *

A lifecycle rule consists of the following:

+ *
    + *
  • + *

    A filter identifying a subset of objects to which the rule applies. The + * filter can be based on a key name prefix, object tags, object size, or any + * combination of these.

    + *
  • + *
  • + *

    A status indicating whether the rule is in effect.

    + *
  • + *
  • + *

    One or more lifecycle transition and expiration actions that you want + * Amazon S3 to perform on the objects identified by the filter. If the state of + * your bucket is versioning-enabled or versioning-suspended, you can have many + * versions of the same object (one current version and zero or more noncurrent + * versions). Amazon S3 provides predefined actions that you can specify for current + * and noncurrent object versions.

    + *
  • + *
+ *

For more information, see Object Lifecycle + * Management and Lifecycle Configuration + * Elements.

+ *
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - By + * default, all Amazon S3 resources are private, including buckets, objects, and + * related subresources (for example, lifecycle configuration and website + * configuration). Only the resource owner (that is, the Amazon Web Services account that + * created it) can access the resource. The resource owner can optionally grant + * access permissions to others by writing an access policy. For this + * operation, a user must have the s3:PutLifecycleConfiguration + * permission.

    + *

    You can also explicitly deny permissions. An explicit deny also + * supersedes any other permissions. If you want to block users or accounts + * from removing or deleting objects from your bucket, you must deny them + * permissions for the following actions:

    + * + *
  • + *
+ *
    + *
  • + *

    + * Directory bucket permissions - + * You must have the s3express:PutLifecycleConfiguration + * permission in an IAM identity-based policy to use this operation. + * Cross-account access to this API operation isn't supported. The resource + * owner can optionally grant access permissions to others by creating a role + * or user for them as long as they are within the same account as the owner + * and resource.

    + *

    For more information about directory bucket policies and permissions, see + * Authorizing Regional endpoint APIs with IAM in the + * Amazon S3 User Guide.

    + * + *

    + * Directory buckets - For directory buckets, you must make requests for this API operation to the Regional endpoint. These endpoints support path-style requests in the format https://s3express-control.region-code.amazonaws.com/bucket-name + * . Virtual-hosted-style requests aren't supported. + * For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

    + *
    + *
  • + *
+ *
+ *
+ *

+ * Directory buckets - The HTTP Host + * header syntax is + * s3express-control.region.amazonaws.com.

+ *

The following operations are related to + * PutBucketLifecycleConfiguration:

+ * + *
+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutBucketLifecycleConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutBucketLifecycleConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutBucketLifecycleConfigurationRequest + * Bucket: "STRING_VALUE", // required + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * LifecycleConfiguration: { // BucketLifecycleConfiguration + * Rules: [ // LifecycleRules // required + * { // LifecycleRule + * Expiration: { // LifecycleExpiration + * Date: new Date("TIMESTAMP"), + * Days: Number("int"), + * ExpiredObjectDeleteMarker: true || false, + * }, + * ID: "STRING_VALUE", + * Prefix: "STRING_VALUE", + * Filter: { // LifecycleRuleFilter + * Prefix: "STRING_VALUE", + * Tag: { // Tag + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * ObjectSizeGreaterThan: Number("long"), + * ObjectSizeLessThan: Number("long"), + * And: { // LifecycleRuleAndOperator + * Prefix: "STRING_VALUE", + * Tags: [ // TagSet + * { + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * ], + * ObjectSizeGreaterThan: Number("long"), + * ObjectSizeLessThan: Number("long"), + * }, + * }, + * Status: "Enabled" || "Disabled", // required + * Transitions: [ // TransitionList + * { // Transition + * Date: new Date("TIMESTAMP"), + * Days: Number("int"), + * StorageClass: "GLACIER" || "STANDARD_IA" || "ONEZONE_IA" || "INTELLIGENT_TIERING" || "DEEP_ARCHIVE" || "GLACIER_IR", + * }, + * ], + * NoncurrentVersionTransitions: [ // NoncurrentVersionTransitionList + * { // NoncurrentVersionTransition + * NoncurrentDays: Number("int"), + * StorageClass: "GLACIER" || "STANDARD_IA" || "ONEZONE_IA" || "INTELLIGENT_TIERING" || "DEEP_ARCHIVE" || "GLACIER_IR", + * NewerNoncurrentVersions: Number("int"), + * }, + * ], + * NoncurrentVersionExpiration: { // NoncurrentVersionExpiration + * NoncurrentDays: Number("int"), + * NewerNoncurrentVersions: Number("int"), + * }, + * AbortIncompleteMultipartUpload: { // AbortIncompleteMultipartUpload + * DaysAfterInitiation: Number("int"), + * }, + * }, + * ], + * }, + * ExpectedBucketOwner: "STRING_VALUE", + * TransitionDefaultMinimumObjectSize: "varies_by_storage_class" || "all_storage_classes_128K", + * }; + * const command = new PutBucketLifecycleConfigurationCommand(input); + * const response = await client.send(command); + * // { // PutBucketLifecycleConfigurationOutput + * // TransitionDefaultMinimumObjectSize: "varies_by_storage_class" || "all_storage_classes_128K", + * // }; + * + * ``` + * + * @param PutBucketLifecycleConfigurationCommandInput - {@link PutBucketLifecycleConfigurationCommandInput} + * @returns {@link PutBucketLifecycleConfigurationCommandOutput} + * @see {@link PutBucketLifecycleConfigurationCommandInput} for command's `input` shape. + * @see {@link PutBucketLifecycleConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example Put bucket lifecycle + * ```javascript + * // The following example replaces existing lifecycle configuration, if any, on the specified bucket. + * const input = { + * Bucket: "examplebucket", + * LifecycleConfiguration: { + * Rules: [ + * { + * Expiration: { + * Days: 3650 + * }, + * Filter: { + * Prefix: "documents/" + * }, + * ID: "TestOnly", + * Status: "Enabled", + * Transitions: [ + * { + * Days: 365, + * StorageClass: "GLACIER" + * } + * ] + * } + * ] + * } + * }; + * const command = new PutBucketLifecycleConfigurationCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class PutBucketLifecycleConfigurationCommand extends PutBucketLifecycleConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutBucketLifecycleConfigurationRequest; + output: PutBucketLifecycleConfigurationOutput; + }; + sdk: { + input: PutBucketLifecycleConfigurationCommandInput; + output: PutBucketLifecycleConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketLoggingCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketLoggingCommand.d.ts new file mode 100644 index 00000000..05f75e5c --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketLoggingCommand.d.ts @@ -0,0 +1,212 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketLoggingRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutBucketLoggingCommand}. + */ +export interface PutBucketLoggingCommandInput extends PutBucketLoggingRequest { +} +/** + * @public + * + * The output of {@link PutBucketLoggingCommand}. + */ +export interface PutBucketLoggingCommandOutput extends __MetadataBearer { +} +declare const PutBucketLoggingCommand_base: { + new (input: PutBucketLoggingCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutBucketLoggingCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Set the logging parameters for a bucket and to specify permissions for who can view and + * modify the logging parameters. All logs are saved to buckets in the same Amazon Web Services Region as + * the source bucket. To set the logging status of a bucket, you must be the bucket + * owner.

+ *

The bucket owner is automatically granted FULL_CONTROL to all logs. You use the + * Grantee request element to grant access to other people. The + * Permissions request element specifies the kind of access the grantee has to + * the logs.

+ * + *

If the target bucket for log delivery uses the bucket owner enforced setting for S3 + * Object Ownership, you can't use the Grantee request element to grant access + * to others. Permissions can only be granted using policies. For more information, see + * Permissions for server access log delivery in the + * Amazon S3 User Guide.

+ *
+ *
+ *
Grantee Values
+ *
+ *

You can specify the person (grantee) to whom you're assigning access rights (by + * using request elements) in the following ways:

+ *
    + *
  • + *

    By the person's ID:

    + *

    + * <>ID<><>GranteesEmail<> + * + *

    + *

    + * DisplayName is optional and ignored in the request.

    + *
  • + *
  • + *

    By Email address:

    + *

    + * <>Grantees@email.com<> + *

    + *

    The grantee is resolved to the CanonicalUser and, in a + * response to a GETObjectAcl request, appears as the + * CanonicalUser.

    + *
  • + *
  • + *

    By URI:

    + *

    + * <>http://acs.amazonaws.com/groups/global/AuthenticatedUsers<> + *

    + *
  • + *
+ *
+ *
+ *

To enable logging, you use LoggingEnabled and its children request + * elements. To disable logging, you use an empty BucketLoggingStatus request + * element:

+ *

+ * + *

+ *

For more information about server access logging, see Server Access Logging in the + * Amazon S3 User Guide.

+ *

For more information about creating a bucket, see CreateBucket. For more + * information about returning the logging status of a bucket, see GetBucketLogging.

+ *

The following operations are related to PutBucketLogging:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutBucketLoggingCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutBucketLoggingCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutBucketLoggingRequest + * Bucket: "STRING_VALUE", // required + * BucketLoggingStatus: { // BucketLoggingStatus + * LoggingEnabled: { // LoggingEnabled + * TargetBucket: "STRING_VALUE", // required + * TargetGrants: [ // TargetGrants + * { // TargetGrant + * Grantee: { // Grantee + * DisplayName: "STRING_VALUE", + * EmailAddress: "STRING_VALUE", + * ID: "STRING_VALUE", + * URI: "STRING_VALUE", + * Type: "CanonicalUser" || "AmazonCustomerByEmail" || "Group", // required + * }, + * Permission: "FULL_CONTROL" || "READ" || "WRITE", + * }, + * ], + * TargetPrefix: "STRING_VALUE", // required + * TargetObjectKeyFormat: { // TargetObjectKeyFormat + * SimplePrefix: {}, + * PartitionedPrefix: { // PartitionedPrefix + * PartitionDateSource: "EventTime" || "DeliveryTime", + * }, + * }, + * }, + * }, + * ContentMD5: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new PutBucketLoggingCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param PutBucketLoggingCommandInput - {@link PutBucketLoggingCommandInput} + * @returns {@link PutBucketLoggingCommandOutput} + * @see {@link PutBucketLoggingCommandInput} for command's `input` shape. + * @see {@link PutBucketLoggingCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example Set logging configuration for a bucket + * ```javascript + * // The following example sets logging policy on a bucket. For the Log Delivery group to deliver logs to the destination bucket, it needs permission for the READ_ACP action which the policy grants. + * const input = { + * Bucket: "sourcebucket", + * BucketLoggingStatus: { + * LoggingEnabled: { + * TargetBucket: "targetbucket", + * TargetGrants: [ + * { + * Grantee: { + * Type: "Group", + * URI: "http://acs.amazonaws.com/groups/global/AllUsers" + * }, + * Permission: "READ" + * } + * ], + * TargetPrefix: "MyBucketLogs/" + * } + * } + * }; + * const command = new PutBucketLoggingCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class PutBucketLoggingCommand extends PutBucketLoggingCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutBucketLoggingRequest; + output: {}; + }; + sdk: { + input: PutBucketLoggingCommandInput; + output: PutBucketLoggingCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketMetricsConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketMetricsConfigurationCommand.d.ts new file mode 100644 index 00000000..191c015d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketMetricsConfigurationCommand.d.ts @@ -0,0 +1,142 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketMetricsConfigurationRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutBucketMetricsConfigurationCommand}. + */ +export interface PutBucketMetricsConfigurationCommandInput extends PutBucketMetricsConfigurationRequest { +} +/** + * @public + * + * The output of {@link PutBucketMetricsConfigurationCommand}. + */ +export interface PutBucketMetricsConfigurationCommandOutput extends __MetadataBearer { +} +declare const PutBucketMetricsConfigurationCommand_base: { + new (input: PutBucketMetricsConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutBucketMetricsConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Sets a metrics configuration (specified by the metrics configuration ID) for the bucket. + * You can have up to 1,000 metrics configurations per bucket. If you're updating an existing + * metrics configuration, note that this is a full replacement of the existing metrics + * configuration. If you don't include the elements you want to keep, they are erased.

+ *

To use this operation, you must have permissions to perform the + * s3:PutMetricsConfiguration action. The bucket owner has this permission by + * default. The bucket owner can grant this permission to others. For more information about + * permissions, see Permissions Related to Bucket Subresource Operations and Managing + * Access Permissions to Your Amazon S3 Resources.

+ *

For information about CloudWatch request metrics for Amazon S3, see Monitoring + * Metrics with Amazon CloudWatch.

+ *

The following operations are related to + * PutBucketMetricsConfiguration:

+ * + *

+ * PutBucketMetricsConfiguration has the following special error:

+ *
    + *
  • + *

    Error code: TooManyConfigurations + *

    + *
      + *
    • + *

      Description: You are attempting to create a new configuration but have + * already reached the 1,000-configuration limit.

      + *
    • + *
    • + *

      HTTP Status Code: HTTP 400 Bad Request

      + *
    • + *
    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutBucketMetricsConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutBucketMetricsConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutBucketMetricsConfigurationRequest + * Bucket: "STRING_VALUE", // required + * Id: "STRING_VALUE", // required + * MetricsConfiguration: { // MetricsConfiguration + * Id: "STRING_VALUE", // required + * Filter: { // MetricsFilter Union: only one key present + * Prefix: "STRING_VALUE", + * Tag: { // Tag + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * AccessPointArn: "STRING_VALUE", + * And: { // MetricsAndOperator + * Prefix: "STRING_VALUE", + * Tags: [ // TagSet + * { + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * ], + * AccessPointArn: "STRING_VALUE", + * }, + * }, + * }, + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new PutBucketMetricsConfigurationCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param PutBucketMetricsConfigurationCommandInput - {@link PutBucketMetricsConfigurationCommandInput} + * @returns {@link PutBucketMetricsConfigurationCommandOutput} + * @see {@link PutBucketMetricsConfigurationCommandInput} for command's `input` shape. + * @see {@link PutBucketMetricsConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class PutBucketMetricsConfigurationCommand extends PutBucketMetricsConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutBucketMetricsConfigurationRequest; + output: {}; + }; + sdk: { + input: PutBucketMetricsConfigurationCommandInput; + output: PutBucketMetricsConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketNotificationConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketNotificationConfigurationCommand.d.ts new file mode 100644 index 00000000..9fa4e93a --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketNotificationConfigurationCommand.d.ts @@ -0,0 +1,209 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketNotificationConfigurationRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutBucketNotificationConfigurationCommand}. + */ +export interface PutBucketNotificationConfigurationCommandInput extends PutBucketNotificationConfigurationRequest { +} +/** + * @public + * + * The output of {@link PutBucketNotificationConfigurationCommand}. + */ +export interface PutBucketNotificationConfigurationCommandOutput extends __MetadataBearer { +} +declare const PutBucketNotificationConfigurationCommand_base: { + new (input: PutBucketNotificationConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutBucketNotificationConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Enables notifications of specified events for a bucket. For more information about event + * notifications, see Configuring Event + * Notifications.

+ *

Using this API, you can replace an existing notification configuration. The + * configuration is an XML file that defines the event types that you want Amazon S3 to publish and + * the destination where you want Amazon S3 to publish an event notification when it detects an + * event of the specified type.

+ *

By default, your bucket has no event notifications configured. That is, the notification + * configuration will be an empty NotificationConfiguration.

+ *

+ * + *

+ *

+ * + *

+ *

This action replaces the existing notification configuration with the configuration you + * include in the request body.

+ *

After Amazon S3 receives this request, it first verifies that any Amazon Simple Notification + * Service (Amazon SNS) or Amazon Simple Queue Service (Amazon SQS) destination exists, and + * that the bucket owner has permission to publish to it by sending a test notification. In + * the case of Lambda destinations, Amazon S3 verifies that the Lambda function permissions + * grant Amazon S3 permission to invoke the function from the Amazon S3 bucket. For more information, + * see Configuring Notifications for Amazon S3 Events.

+ *

You can disable notifications by adding the empty NotificationConfiguration + * element.

+ *

For more information about the number of event notification configurations that you can + * create per bucket, see Amazon S3 service quotas in Amazon Web Services + * General Reference.

+ *

By default, only the bucket owner can configure notifications on a bucket. However, + * bucket owners can use a bucket policy to grant permission to other users to set this + * configuration with the required s3:PutBucketNotification permission.

+ * + *

The PUT notification is an atomic operation. For example, suppose your notification + * configuration includes SNS topic, SQS queue, and Lambda function configurations. When + * you send a PUT request with this configuration, Amazon S3 sends test messages to your SNS + * topic. If the message fails, the entire PUT action will fail, and Amazon S3 will not add the + * configuration to your bucket.

+ *
+ *

If the configuration in the request body includes only one + * TopicConfiguration specifying only the + * s3:ReducedRedundancyLostObject event type, the response will also include + * the x-amz-sns-test-message-id header containing the message ID of the test + * notification sent to the topic.

+ *

The following action is related to + * PutBucketNotificationConfiguration:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutBucketNotificationConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutBucketNotificationConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutBucketNotificationConfigurationRequest + * Bucket: "STRING_VALUE", // required + * NotificationConfiguration: { // NotificationConfiguration + * TopicConfigurations: [ // TopicConfigurationList + * { // TopicConfiguration + * Id: "STRING_VALUE", + * TopicArn: "STRING_VALUE", // required + * Events: [ // EventList // required + * "s3:ReducedRedundancyLostObject" || "s3:ObjectCreated:*" || "s3:ObjectCreated:Put" || "s3:ObjectCreated:Post" || "s3:ObjectCreated:Copy" || "s3:ObjectCreated:CompleteMultipartUpload" || "s3:ObjectRemoved:*" || "s3:ObjectRemoved:Delete" || "s3:ObjectRemoved:DeleteMarkerCreated" || "s3:ObjectRestore:*" || "s3:ObjectRestore:Post" || "s3:ObjectRestore:Completed" || "s3:Replication:*" || "s3:Replication:OperationFailedReplication" || "s3:Replication:OperationNotTracked" || "s3:Replication:OperationMissedThreshold" || "s3:Replication:OperationReplicatedAfterThreshold" || "s3:ObjectRestore:Delete" || "s3:LifecycleTransition" || "s3:IntelligentTiering" || "s3:ObjectAcl:Put" || "s3:LifecycleExpiration:*" || "s3:LifecycleExpiration:Delete" || "s3:LifecycleExpiration:DeleteMarkerCreated" || "s3:ObjectTagging:*" || "s3:ObjectTagging:Put" || "s3:ObjectTagging:Delete", + * ], + * Filter: { // NotificationConfigurationFilter + * Key: { // S3KeyFilter + * FilterRules: [ // FilterRuleList + * { // FilterRule + * Name: "prefix" || "suffix", + * Value: "STRING_VALUE", + * }, + * ], + * }, + * }, + * }, + * ], + * QueueConfigurations: [ // QueueConfigurationList + * { // QueueConfiguration + * Id: "STRING_VALUE", + * QueueArn: "STRING_VALUE", // required + * Events: [ // required + * "s3:ReducedRedundancyLostObject" || "s3:ObjectCreated:*" || "s3:ObjectCreated:Put" || "s3:ObjectCreated:Post" || "s3:ObjectCreated:Copy" || "s3:ObjectCreated:CompleteMultipartUpload" || "s3:ObjectRemoved:*" || "s3:ObjectRemoved:Delete" || "s3:ObjectRemoved:DeleteMarkerCreated" || "s3:ObjectRestore:*" || "s3:ObjectRestore:Post" || "s3:ObjectRestore:Completed" || "s3:Replication:*" || "s3:Replication:OperationFailedReplication" || "s3:Replication:OperationNotTracked" || "s3:Replication:OperationMissedThreshold" || "s3:Replication:OperationReplicatedAfterThreshold" || "s3:ObjectRestore:Delete" || "s3:LifecycleTransition" || "s3:IntelligentTiering" || "s3:ObjectAcl:Put" || "s3:LifecycleExpiration:*" || "s3:LifecycleExpiration:Delete" || "s3:LifecycleExpiration:DeleteMarkerCreated" || "s3:ObjectTagging:*" || "s3:ObjectTagging:Put" || "s3:ObjectTagging:Delete", + * ], + * Filter: { + * Key: { + * FilterRules: [ + * { + * Name: "prefix" || "suffix", + * Value: "STRING_VALUE", + * }, + * ], + * }, + * }, + * }, + * ], + * LambdaFunctionConfigurations: [ // LambdaFunctionConfigurationList + * { // LambdaFunctionConfiguration + * Id: "STRING_VALUE", + * LambdaFunctionArn: "STRING_VALUE", // required + * Events: [ // required + * "s3:ReducedRedundancyLostObject" || "s3:ObjectCreated:*" || "s3:ObjectCreated:Put" || "s3:ObjectCreated:Post" || "s3:ObjectCreated:Copy" || "s3:ObjectCreated:CompleteMultipartUpload" || "s3:ObjectRemoved:*" || "s3:ObjectRemoved:Delete" || "s3:ObjectRemoved:DeleteMarkerCreated" || "s3:ObjectRestore:*" || "s3:ObjectRestore:Post" || "s3:ObjectRestore:Completed" || "s3:Replication:*" || "s3:Replication:OperationFailedReplication" || "s3:Replication:OperationNotTracked" || "s3:Replication:OperationMissedThreshold" || "s3:Replication:OperationReplicatedAfterThreshold" || "s3:ObjectRestore:Delete" || "s3:LifecycleTransition" || "s3:IntelligentTiering" || "s3:ObjectAcl:Put" || "s3:LifecycleExpiration:*" || "s3:LifecycleExpiration:Delete" || "s3:LifecycleExpiration:DeleteMarkerCreated" || "s3:ObjectTagging:*" || "s3:ObjectTagging:Put" || "s3:ObjectTagging:Delete", + * ], + * Filter: { + * Key: { + * FilterRules: [ + * { + * Name: "prefix" || "suffix", + * Value: "STRING_VALUE", + * }, + * ], + * }, + * }, + * }, + * ], + * EventBridgeConfiguration: {}, + * }, + * ExpectedBucketOwner: "STRING_VALUE", + * SkipDestinationValidation: true || false, + * }; + * const command = new PutBucketNotificationConfigurationCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param PutBucketNotificationConfigurationCommandInput - {@link PutBucketNotificationConfigurationCommandInput} + * @returns {@link PutBucketNotificationConfigurationCommandOutput} + * @see {@link PutBucketNotificationConfigurationCommandInput} for command's `input` shape. + * @see {@link PutBucketNotificationConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example Set notification configuration for a bucket + * ```javascript + * // The following example sets notification configuration on a bucket to publish the object created events to an SNS topic. + * const input = { + * Bucket: "examplebucket", + * NotificationConfiguration: { + * TopicConfigurations: [ + * { + * Events: [ + * "s3:ObjectCreated:*" + * ], + * TopicArn: "arn:aws:sns:us-west-2:123456789012:s3-notification-topic" + * } + * ] + * } + * }; + * const command = new PutBucketNotificationConfigurationCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class PutBucketNotificationConfigurationCommand extends PutBucketNotificationConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutBucketNotificationConfigurationRequest; + output: {}; + }; + sdk: { + input: PutBucketNotificationConfigurationCommandInput; + output: PutBucketNotificationConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketOwnershipControlsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketOwnershipControlsCommand.d.ts new file mode 100644 index 00000000..b73b282d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketOwnershipControlsCommand.d.ts @@ -0,0 +1,100 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketOwnershipControlsRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutBucketOwnershipControlsCommand}. + */ +export interface PutBucketOwnershipControlsCommandInput extends PutBucketOwnershipControlsRequest { +} +/** + * @public + * + * The output of {@link PutBucketOwnershipControlsCommand}. + */ +export interface PutBucketOwnershipControlsCommandOutput extends __MetadataBearer { +} +declare const PutBucketOwnershipControlsCommand_base: { + new (input: PutBucketOwnershipControlsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutBucketOwnershipControlsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Creates or modifies OwnershipControls for an Amazon S3 bucket. To use this + * operation, you must have the s3:PutBucketOwnershipControls permission. For + * more information about Amazon S3 permissions, see Specifying permissions in a + * policy.

+ *

For information about Amazon S3 Object Ownership, see Using object + * ownership.

+ *

The following operations are related to PutBucketOwnershipControls:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutBucketOwnershipControlsCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutBucketOwnershipControlsCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutBucketOwnershipControlsRequest + * Bucket: "STRING_VALUE", // required + * ContentMD5: "STRING_VALUE", + * ExpectedBucketOwner: "STRING_VALUE", + * OwnershipControls: { // OwnershipControls + * Rules: [ // OwnershipControlsRules // required + * { // OwnershipControlsRule + * ObjectOwnership: "BucketOwnerPreferred" || "ObjectWriter" || "BucketOwnerEnforced", // required + * }, + * ], + * }, + * }; + * const command = new PutBucketOwnershipControlsCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param PutBucketOwnershipControlsCommandInput - {@link PutBucketOwnershipControlsCommandInput} + * @returns {@link PutBucketOwnershipControlsCommandOutput} + * @see {@link PutBucketOwnershipControlsCommandInput} for command's `input` shape. + * @see {@link PutBucketOwnershipControlsCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class PutBucketOwnershipControlsCommand extends PutBucketOwnershipControlsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutBucketOwnershipControlsRequest; + output: {}; + }; + sdk: { + input: PutBucketOwnershipControlsCommandInput; + output: PutBucketOwnershipControlsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketPolicyCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketPolicyCommand.d.ts new file mode 100644 index 00000000..c0a67f8b --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketPolicyCommand.d.ts @@ -0,0 +1,165 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketPolicyRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutBucketPolicyCommand}. + */ +export interface PutBucketPolicyCommandInput extends PutBucketPolicyRequest { +} +/** + * @public + * + * The output of {@link PutBucketPolicyCommand}. + */ +export interface PutBucketPolicyCommandOutput extends __MetadataBearer { +} +declare const PutBucketPolicyCommand_base: { + new (input: PutBucketPolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutBucketPolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Applies an Amazon S3 bucket policy to an Amazon S3 bucket.

+ * + *

+ * Directory buckets - For directory buckets, you must make requests for this API operation to the Regional endpoint. These endpoints support path-style requests in the format https://s3express-control.region-code.amazonaws.com/bucket-name + * . Virtual-hosted-style requests aren't supported. + * For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

+ *
+ *
+ *
Permissions
+ *
+ *

If you are using an identity other than the root user of the Amazon Web Services account that + * owns the bucket, the calling identity must both have the + * PutBucketPolicy permissions on the specified bucket and belong to + * the bucket owner's account in order to use this operation.

+ *

If you don't have PutBucketPolicy permissions, Amazon S3 returns a + * 403 Access Denied error. If you have the correct permissions, but + * you're not using an identity that belongs to the bucket owner's account, Amazon S3 + * returns a 405 Method Not Allowed error.

+ * + *

To ensure that bucket owners don't inadvertently lock themselves out of + * their own buckets, the root principal in a bucket owner's Amazon Web Services account can + * perform the GetBucketPolicy, PutBucketPolicy, and + * DeleteBucketPolicy API actions, even if their bucket policy + * explicitly denies the root principal's access. Bucket owner root principals can + * only be blocked from performing these API actions by VPC endpoint policies and + * Amazon Web Services Organizations policies.

+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - The + * s3:PutBucketPolicy permission is required in a policy. For + * more information about general purpose buckets bucket policies, see Using Bucket Policies and User Policies in the + * Amazon S3 User Guide.

    + *
  • + *
  • + *

    + * Directory bucket permissions - + * To grant access to this API operation, you must have the + * s3express:PutBucketPolicy permission in + * an IAM identity-based policy instead of a bucket policy. Cross-account access to this API operation isn't supported. This operation can only be performed by the Amazon Web Services account that owns the resource. + * For more information about directory bucket policies and permissions, see Amazon Web Services Identity and Access Management (IAM) for S3 Express One Zone in the Amazon S3 User Guide.

    + *
  • + *
+ *
+ *
Example bucket policies
+ *
+ *

+ * General purpose buckets example bucket policies + * - See Bucket policy + * examples in the Amazon S3 User Guide.

+ *

+ * Directory bucket example bucket policies + * - See Example bucket policies for S3 Express One Zone in the + * Amazon S3 User Guide.

+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is s3express-control.region-code.amazonaws.com.

+ *
+ *
+ *

The following operations are related to PutBucketPolicy:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutBucketPolicyCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutBucketPolicyCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutBucketPolicyRequest + * Bucket: "STRING_VALUE", // required + * ContentMD5: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * ConfirmRemoveSelfBucketAccess: true || false, + * Policy: "STRING_VALUE", // required + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new PutBucketPolicyCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param PutBucketPolicyCommandInput - {@link PutBucketPolicyCommandInput} + * @returns {@link PutBucketPolicyCommandOutput} + * @see {@link PutBucketPolicyCommandInput} for command's `input` shape. + * @see {@link PutBucketPolicyCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example Set bucket policy + * ```javascript + * // The following example sets a permission policy on a bucket. + * const input = { + * Bucket: "examplebucket", + * Policy: `{"Version": "2012-10-17", "Statement": [{ "Sid": "id-1","Effect": "Allow","Principal": {"AWS": "arn:aws:iam::123456789012:root"}, "Action": [ "s3:PutObject","s3:PutObjectAcl"], "Resource": ["arn:aws:s3:::acl3/*" ] } ]}` + * }; + * const command = new PutBucketPolicyCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class PutBucketPolicyCommand extends PutBucketPolicyCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutBucketPolicyRequest; + output: {}; + }; + sdk: { + input: PutBucketPolicyCommandInput; + output: PutBucketPolicyCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketReplicationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketReplicationCommand.d.ts new file mode 100644 index 00000000..29357877 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketReplicationCommand.d.ts @@ -0,0 +1,236 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketReplicationRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutBucketReplicationCommand}. + */ +export interface PutBucketReplicationCommandInput extends PutBucketReplicationRequest { +} +/** + * @public + * + * The output of {@link PutBucketReplicationCommand}. + */ +export interface PutBucketReplicationCommandOutput extends __MetadataBearer { +} +declare const PutBucketReplicationCommand_base: { + new (input: PutBucketReplicationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutBucketReplicationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Creates a replication configuration or replaces an existing one. For more information, + * see Replication in the Amazon S3 User Guide.

+ *

Specify the replication configuration in the request body. In the replication + * configuration, you provide the name of the destination bucket or buckets where you want + * Amazon S3 to replicate objects, the IAM role that Amazon S3 can assume to replicate objects on your + * behalf, and other relevant information. You can invoke this request for a specific Amazon Web Services + * Region by using the + * aws:RequestedRegion + * condition key.

+ *

A replication configuration must include at least one rule, and can contain a maximum of + * 1,000. Each rule identifies a subset of objects to replicate by filtering the objects in + * the source bucket. To choose additional subsets of objects to replicate, add a rule for + * each subset.

+ *

To specify a subset of the objects in the source bucket to apply a replication rule to, + * add the Filter element as a child of the Rule element. You can filter objects based on an + * object key prefix, one or more object tags, or both. When you add the Filter element in the + * configuration, you must also add the following elements: + * DeleteMarkerReplication, Status, and + * Priority.

+ * + *

If you are using an earlier version of the replication configuration, Amazon S3 handles + * replication of delete markers differently. For more information, see Backward Compatibility.

+ *
+ *

For information about enabling versioning on a bucket, see Using Versioning.

+ *
+ *
Handling Replication of Encrypted Objects
+ *
+ *

By default, Amazon S3 doesn't replicate objects that are stored at rest using + * server-side encryption with KMS keys. To replicate Amazon Web Services KMS-encrypted objects, + * add the following: SourceSelectionCriteria, + * SseKmsEncryptedObjects, Status, + * EncryptionConfiguration, and ReplicaKmsKeyID. For + * information about replication configuration, see Replicating + * Objects Created with SSE Using KMS keys.

+ *

For information on PutBucketReplication errors, see List of + * replication-related error codes + *

+ *
+ *
Permissions
+ *
+ *

To create a PutBucketReplication request, you must have + * s3:PutReplicationConfiguration permissions for the bucket. + * + *

+ *

By default, a resource owner, in this case the Amazon Web Services account that created the + * bucket, can perform this operation. The resource owner can also grant others + * permissions to perform the operation. For more information about permissions, see + * Specifying Permissions in + * a Policy and Managing Access + * Permissions to Your Amazon S3 Resources.

+ * + *

To perform this operation, the user or role performing the action must have + * the iam:PassRole + * permission.

+ *
+ *
+ *
+ *

The following operations are related to PutBucketReplication:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutBucketReplicationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutBucketReplicationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutBucketReplicationRequest + * Bucket: "STRING_VALUE", // required + * ContentMD5: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * ReplicationConfiguration: { // ReplicationConfiguration + * Role: "STRING_VALUE", // required + * Rules: [ // ReplicationRules // required + * { // ReplicationRule + * ID: "STRING_VALUE", + * Priority: Number("int"), + * Prefix: "STRING_VALUE", + * Filter: { // ReplicationRuleFilter + * Prefix: "STRING_VALUE", + * Tag: { // Tag + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * And: { // ReplicationRuleAndOperator + * Prefix: "STRING_VALUE", + * Tags: [ // TagSet + * { + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * ], + * }, + * }, + * Status: "Enabled" || "Disabled", // required + * SourceSelectionCriteria: { // SourceSelectionCriteria + * SseKmsEncryptedObjects: { // SseKmsEncryptedObjects + * Status: "Enabled" || "Disabled", // required + * }, + * ReplicaModifications: { // ReplicaModifications + * Status: "Enabled" || "Disabled", // required + * }, + * }, + * ExistingObjectReplication: { // ExistingObjectReplication + * Status: "Enabled" || "Disabled", // required + * }, + * Destination: { // Destination + * Bucket: "STRING_VALUE", // required + * Account: "STRING_VALUE", + * StorageClass: "STANDARD" || "REDUCED_REDUNDANCY" || "STANDARD_IA" || "ONEZONE_IA" || "INTELLIGENT_TIERING" || "GLACIER" || "DEEP_ARCHIVE" || "OUTPOSTS" || "GLACIER_IR" || "SNOW" || "EXPRESS_ONEZONE", + * AccessControlTranslation: { // AccessControlTranslation + * Owner: "Destination", // required + * }, + * EncryptionConfiguration: { // EncryptionConfiguration + * ReplicaKmsKeyID: "STRING_VALUE", + * }, + * ReplicationTime: { // ReplicationTime + * Status: "Enabled" || "Disabled", // required + * Time: { // ReplicationTimeValue + * Minutes: Number("int"), + * }, + * }, + * Metrics: { // Metrics + * Status: "Enabled" || "Disabled", // required + * EventThreshold: { + * Minutes: Number("int"), + * }, + * }, + * }, + * DeleteMarkerReplication: { // DeleteMarkerReplication + * Status: "Enabled" || "Disabled", + * }, + * }, + * ], + * }, + * Token: "STRING_VALUE", + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new PutBucketReplicationCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param PutBucketReplicationCommandInput - {@link PutBucketReplicationCommandInput} + * @returns {@link PutBucketReplicationCommandOutput} + * @see {@link PutBucketReplicationCommandInput} for command's `input` shape. + * @see {@link PutBucketReplicationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example Set replication configuration on a bucket + * ```javascript + * // The following example sets replication configuration on a bucket. + * const input = { + * Bucket: "examplebucket", + * ReplicationConfiguration: { + * Role: "arn:aws:iam::123456789012:role/examplerole", + * Rules: [ + * { + * Destination: { + * Bucket: "arn:aws:s3:::destinationbucket", + * StorageClass: "STANDARD" + * }, + * Prefix: "", + * Status: "Enabled" + * } + * ] + * } + * }; + * const command = new PutBucketReplicationCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class PutBucketReplicationCommand extends PutBucketReplicationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutBucketReplicationRequest; + output: {}; + }; + sdk: { + input: PutBucketReplicationCommandInput; + output: PutBucketReplicationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketRequestPaymentCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketRequestPaymentCommand.d.ts new file mode 100644 index 00000000..28ade8b0 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketRequestPaymentCommand.d.ts @@ -0,0 +1,112 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketRequestPaymentRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutBucketRequestPaymentCommand}. + */ +export interface PutBucketRequestPaymentCommandInput extends PutBucketRequestPaymentRequest { +} +/** + * @public + * + * The output of {@link PutBucketRequestPaymentCommand}. + */ +export interface PutBucketRequestPaymentCommandOutput extends __MetadataBearer { +} +declare const PutBucketRequestPaymentCommand_base: { + new (input: PutBucketRequestPaymentCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutBucketRequestPaymentCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Sets the request payment configuration for a bucket. By default, the bucket owner pays + * for downloads from the bucket. This configuration parameter enables the bucket owner (only) + * to specify that the person requesting the download will be charged for the download. For + * more information, see Requester Pays + * Buckets.

+ *

The following operations are related to PutBucketRequestPayment:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutBucketRequestPaymentCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutBucketRequestPaymentCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutBucketRequestPaymentRequest + * Bucket: "STRING_VALUE", // required + * ContentMD5: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * RequestPaymentConfiguration: { // RequestPaymentConfiguration + * Payer: "Requester" || "BucketOwner", // required + * }, + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new PutBucketRequestPaymentCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param PutBucketRequestPaymentCommandInput - {@link PutBucketRequestPaymentCommandInput} + * @returns {@link PutBucketRequestPaymentCommandOutput} + * @see {@link PutBucketRequestPaymentCommandInput} for command's `input` shape. + * @see {@link PutBucketRequestPaymentCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example Set request payment configuration on a bucket. + * ```javascript + * // The following example sets request payment configuration on a bucket so that person requesting the download is charged. + * const input = { + * Bucket: "examplebucket", + * RequestPaymentConfiguration: { + * Payer: "Requester" + * } + * }; + * const command = new PutBucketRequestPaymentCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class PutBucketRequestPaymentCommand extends PutBucketRequestPaymentCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutBucketRequestPaymentRequest; + output: {}; + }; + sdk: { + input: PutBucketRequestPaymentCommandInput; + output: PutBucketRequestPaymentCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketTaggingCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketTaggingCommand.d.ts new file mode 100644 index 00000000..11b92d9c --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketTaggingCommand.d.ts @@ -0,0 +1,166 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketTaggingRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutBucketTaggingCommand}. + */ +export interface PutBucketTaggingCommandInput extends PutBucketTaggingRequest { +} +/** + * @public + * + * The output of {@link PutBucketTaggingCommand}. + */ +export interface PutBucketTaggingCommandOutput extends __MetadataBearer { +} +declare const PutBucketTaggingCommand_base: { + new (input: PutBucketTaggingCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutBucketTaggingCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Sets the tags for a bucket.

+ *

Use tags to organize your Amazon Web Services bill to reflect your own cost structure. To do this, + * sign up to get your Amazon Web Services account bill with tag key values included. Then, to see the cost + * of combined resources, organize your billing information according to resources with the + * same tag key values. For example, you can tag several resources with a specific application + * name, and then organize your billing information to see the total cost of that application + * across several services. For more information, see Cost Allocation and + * Tagging and Using Cost Allocation in Amazon S3 + * Bucket Tags.

+ * + *

When this operation sets the tags for a bucket, it will overwrite any current tags + * the bucket already has. You cannot use this operation to add tags to an existing list of + * tags.

+ *
+ *

To use this operation, you must have permissions to perform the + * s3:PutBucketTagging action. The bucket owner has this permission by default + * and can grant this permission to others. For more information about permissions, see Permissions Related to Bucket Subresource Operations and Managing + * Access Permissions to Your Amazon S3 Resources.

+ *

+ * PutBucketTagging has the following special errors. For more Amazon S3 errors + * see, Error + * Responses.

+ *
    + *
  • + *

    + * InvalidTag - The tag provided was not a valid tag. This error + * can occur if the tag did not pass input validation. For more information, see Using + * Cost Allocation in Amazon S3 Bucket Tags.

    + *
  • + *
  • + *

    + * MalformedXML - The XML provided does not match the + * schema.

    + *
  • + *
  • + *

    + * OperationAborted - A conflicting conditional action is + * currently in progress against this resource. Please try again.

    + *
  • + *
  • + *

    + * InternalError - The service was unable to apply the provided + * tag to the bucket.

    + *
  • + *
+ *

The following operations are related to PutBucketTagging:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutBucketTaggingCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutBucketTaggingCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutBucketTaggingRequest + * Bucket: "STRING_VALUE", // required + * ContentMD5: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * Tagging: { // Tagging + * TagSet: [ // TagSet // required + * { // Tag + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * ], + * }, + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new PutBucketTaggingCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param PutBucketTaggingCommandInput - {@link PutBucketTaggingCommandInput} + * @returns {@link PutBucketTaggingCommandOutput} + * @see {@link PutBucketTaggingCommandInput} for command's `input` shape. + * @see {@link PutBucketTaggingCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example Set tags on a bucket + * ```javascript + * // The following example sets tags on a bucket. Any existing tags are replaced. + * const input = { + * Bucket: "examplebucket", + * Tagging: { + * TagSet: [ + * { + * Key: "Key1", + * Value: "Value1" + * }, + * { + * Key: "Key2", + * Value: "Value2" + * } + * ] + * } + * }; + * const command = new PutBucketTaggingCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class PutBucketTaggingCommand extends PutBucketTaggingCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutBucketTaggingRequest; + output: {}; + }; + sdk: { + input: PutBucketTaggingCommandInput; + output: PutBucketTaggingCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketVersioningCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketVersioningCommand.d.ts new file mode 100644 index 00000000..6af6cb2a --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketVersioningCommand.d.ts @@ -0,0 +1,146 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketVersioningRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutBucketVersioningCommand}. + */ +export interface PutBucketVersioningCommandInput extends PutBucketVersioningRequest { +} +/** + * @public + * + * The output of {@link PutBucketVersioningCommand}. + */ +export interface PutBucketVersioningCommandOutput extends __MetadataBearer { +} +declare const PutBucketVersioningCommand_base: { + new (input: PutBucketVersioningCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutBucketVersioningCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ * + *

When you enable versioning on a bucket for the first time, it might take a short + * amount of time for the change to be fully propagated. While this change is propagating, + * you might encounter intermittent HTTP 404 NoSuchKey errors for requests to + * objects created or updated after enabling versioning. We recommend that you wait for 15 + * minutes after enabling versioning before issuing write operations (PUT or + * DELETE) on objects in the bucket.

+ *
+ *

Sets the versioning state of an existing bucket.

+ *

You can set the versioning state with one of the following values:

+ *

+ * Enabled—Enables versioning for the objects in the + * bucket. All objects added to the bucket receive a unique version ID.

+ *

+ * Suspended—Disables versioning for the objects in the + * bucket. All objects added to the bucket receive the version ID null.

+ *

If the versioning state has never been set on a bucket, it has no versioning state; a + * GetBucketVersioning request does not return a versioning state value.

+ *

In order to enable MFA Delete, you must be the bucket owner. If you are the bucket owner + * and want to enable MFA Delete in the bucket versioning configuration, you must include the + * x-amz-mfa request header and the Status and the + * MfaDelete request elements in a request to set the versioning state of the + * bucket.

+ * + *

If you have an object expiration lifecycle configuration in your non-versioned bucket + * and you want to maintain the same permanent delete behavior when you enable versioning, + * you must add a noncurrent expiration policy. The noncurrent expiration lifecycle + * configuration will manage the deletes of the noncurrent object versions in the + * version-enabled bucket. (A version-enabled bucket maintains one current and zero or more + * noncurrent object versions.) For more information, see Lifecycle and Versioning.

+ *
+ *

The following operations are related to PutBucketVersioning:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutBucketVersioningCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutBucketVersioningCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutBucketVersioningRequest + * Bucket: "STRING_VALUE", // required + * ContentMD5: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * MFA: "STRING_VALUE", + * VersioningConfiguration: { // VersioningConfiguration + * MFADelete: "Enabled" || "Disabled", + * Status: "Enabled" || "Suspended", + * }, + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new PutBucketVersioningCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param PutBucketVersioningCommandInput - {@link PutBucketVersioningCommandInput} + * @returns {@link PutBucketVersioningCommandOutput} + * @see {@link PutBucketVersioningCommandInput} for command's `input` shape. + * @see {@link PutBucketVersioningCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example Set versioning configuration on a bucket + * ```javascript + * // The following example sets versioning configuration on bucket. The configuration enables versioning on the bucket. + * const input = { + * Bucket: "examplebucket", + * VersioningConfiguration: { + * MFADelete: "Disabled", + * Status: "Enabled" + * } + * }; + * const command = new PutBucketVersioningCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class PutBucketVersioningCommand extends PutBucketVersioningCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutBucketVersioningRequest; + output: {}; + }; + sdk: { + input: PutBucketVersioningCommandInput; + output: PutBucketVersioningCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketWebsiteCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketWebsiteCommand.d.ts new file mode 100644 index 00000000..2852a8c1 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutBucketWebsiteCommand.d.ts @@ -0,0 +1,247 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketWebsiteRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutBucketWebsiteCommand}. + */ +export interface PutBucketWebsiteCommandInput extends PutBucketWebsiteRequest { +} +/** + * @public + * + * The output of {@link PutBucketWebsiteCommand}. + */ +export interface PutBucketWebsiteCommandOutput extends __MetadataBearer { +} +declare const PutBucketWebsiteCommand_base: { + new (input: PutBucketWebsiteCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutBucketWebsiteCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Sets the configuration of the website that is specified in the website + * subresource. To configure a bucket as a website, you can add this subresource on the bucket + * with website configuration information such as the file name of the index document and any + * redirect rules. For more information, see Hosting Websites on Amazon S3.

+ *

This PUT action requires the S3:PutBucketWebsite permission. By default, + * only the bucket owner can configure the website attached to a bucket; however, bucket + * owners can allow other users to set the website configuration by writing a bucket policy + * that grants them the S3:PutBucketWebsite permission.

+ *

To redirect all website requests sent to the bucket's website endpoint, you add a + * website configuration with the following elements. Because all requests are sent to another + * website, you don't need to provide index document name for the bucket.

+ *
    + *
  • + *

    + * WebsiteConfiguration + *

    + *
  • + *
  • + *

    + * RedirectAllRequestsTo + *

    + *
  • + *
  • + *

    + * HostName + *

    + *
  • + *
  • + *

    + * Protocol + *

    + *
  • + *
+ *

If you want granular control over redirects, you can use the following elements to add + * routing rules that describe conditions for redirecting requests and information about the + * redirect destination. In this case, the website configuration must provide an index + * document for the bucket, because some requests might not be redirected.

+ *
    + *
  • + *

    + * WebsiteConfiguration + *

    + *
  • + *
  • + *

    + * IndexDocument + *

    + *
  • + *
  • + *

    + * Suffix + *

    + *
  • + *
  • + *

    + * ErrorDocument + *

    + *
  • + *
  • + *

    + * Key + *

    + *
  • + *
  • + *

    + * RoutingRules + *

    + *
  • + *
  • + *

    + * RoutingRule + *

    + *
  • + *
  • + *

    + * Condition + *

    + *
  • + *
  • + *

    + * HttpErrorCodeReturnedEquals + *

    + *
  • + *
  • + *

    + * KeyPrefixEquals + *

    + *
  • + *
  • + *

    + * Redirect + *

    + *
  • + *
  • + *

    + * Protocol + *

    + *
  • + *
  • + *

    + * HostName + *

    + *
  • + *
  • + *

    + * ReplaceKeyPrefixWith + *

    + *
  • + *
  • + *

    + * ReplaceKeyWith + *

    + *
  • + *
  • + *

    + * HttpRedirectCode + *

    + *
  • + *
+ *

Amazon S3 has a limitation of 50 routing rules per website configuration. If you require more + * than 50 routing rules, you can use object redirect. For more information, see Configuring an + * Object Redirect in the Amazon S3 User Guide.

+ *

The maximum request length is limited to 128 KB.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutBucketWebsiteCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutBucketWebsiteCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutBucketWebsiteRequest + * Bucket: "STRING_VALUE", // required + * ContentMD5: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * WebsiteConfiguration: { // WebsiteConfiguration + * ErrorDocument: { // ErrorDocument + * Key: "STRING_VALUE", // required + * }, + * IndexDocument: { // IndexDocument + * Suffix: "STRING_VALUE", // required + * }, + * RedirectAllRequestsTo: { // RedirectAllRequestsTo + * HostName: "STRING_VALUE", // required + * Protocol: "http" || "https", + * }, + * RoutingRules: [ // RoutingRules + * { // RoutingRule + * Condition: { // Condition + * HttpErrorCodeReturnedEquals: "STRING_VALUE", + * KeyPrefixEquals: "STRING_VALUE", + * }, + * Redirect: { // Redirect + * HostName: "STRING_VALUE", + * HttpRedirectCode: "STRING_VALUE", + * Protocol: "http" || "https", + * ReplaceKeyPrefixWith: "STRING_VALUE", + * ReplaceKeyWith: "STRING_VALUE", + * }, + * }, + * ], + * }, + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new PutBucketWebsiteCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param PutBucketWebsiteCommandInput - {@link PutBucketWebsiteCommandInput} + * @returns {@link PutBucketWebsiteCommandOutput} + * @see {@link PutBucketWebsiteCommandInput} for command's `input` shape. + * @see {@link PutBucketWebsiteCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example Set website configuration on a bucket + * ```javascript + * // The following example adds website configuration to a bucket. + * const input = { + * Bucket: "examplebucket", + * ContentMD5: "", + * WebsiteConfiguration: { + * ErrorDocument: { + * Key: "error.html" + * }, + * IndexDocument: { + * Suffix: "index.html" + * } + * } + * }; + * const command = new PutBucketWebsiteCommand(input); + * const response = await client.send(command); + * /* response is + * { /* metadata only *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class PutBucketWebsiteCommand extends PutBucketWebsiteCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutBucketWebsiteRequest; + output: {}; + }; + sdk: { + input: PutBucketWebsiteCommandInput; + output: PutBucketWebsiteCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutObjectAclCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutObjectAclCommand.d.ts new file mode 100644 index 00000000..895c85bf --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutObjectAclCommand.d.ts @@ -0,0 +1,310 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutObjectAclOutput, PutObjectAclRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutObjectAclCommand}. + */ +export interface PutObjectAclCommandInput extends PutObjectAclRequest { +} +/** + * @public + * + * The output of {@link PutObjectAclCommand}. + */ +export interface PutObjectAclCommandOutput extends PutObjectAclOutput, __MetadataBearer { +} +declare const PutObjectAclCommand_base: { + new (input: PutObjectAclCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutObjectAclCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Uses the acl subresource to set the access control list (ACL) permissions + * for a new or existing object in an S3 bucket. You must have the WRITE_ACP + * permission to set the ACL of an object. For more information, see What + * permissions can I grant? in the Amazon S3 User Guide.

+ *

This functionality is not supported for Amazon S3 on Outposts.

+ *

Depending on your application needs, you can choose to set the ACL on an object using + * either the request body or the headers. For example, if you have an existing application + * that updates a bucket ACL using the request body, you can continue to use that approach. + * For more information, see Access Control List (ACL) Overview + * in the Amazon S3 User Guide.

+ * + *

If your bucket uses the bucket owner enforced setting for S3 Object Ownership, ACLs + * are disabled and no longer affect permissions. You must use policies to grant access to + * your bucket and the objects in it. Requests to set ACLs or update ACLs fail and return + * the AccessControlListNotSupported error code. Requests to read ACLs are + * still supported. For more information, see Controlling object + * ownership in the Amazon S3 User Guide.

+ *
+ *
+ *
Permissions
+ *
+ *

You can set access permissions using one of the following methods:

+ *
    + *
  • + *

    Specify a canned ACL with the x-amz-acl request header. Amazon S3 + * supports a set of predefined ACLs, known as canned ACLs. Each canned ACL has + * a predefined set of grantees and permissions. Specify the canned ACL name as + * the value of x-amz-acl. If you use this header, you cannot use + * other access control-specific headers in your request. For more information, + * see Canned + * ACL.

    + *
  • + *
  • + *

    Specify access permissions explicitly with the + * x-amz-grant-read, x-amz-grant-read-acp, + * x-amz-grant-write-acp, and + * x-amz-grant-full-control headers. When using these headers, + * you specify explicit access permissions and grantees (Amazon Web Services accounts or Amazon S3 + * groups) who will receive the permission. If you use these ACL-specific + * headers, you cannot use x-amz-acl header to set a canned ACL. + * These parameters map to the set of permissions that Amazon S3 supports in an ACL. + * For more information, see Access Control List (ACL) + * Overview.

    + *

    You specify each grantee as a type=value pair, where the type is one of + * the following:

    + *
      + *
    • + *

      + * id – if the value specified is the canonical user ID + * of an Amazon Web Services account

      + *
    • + *
    • + *

      + * uri – if you are granting permissions to a predefined + * group

      + *
    • + *
    • + *

      + * emailAddress – if the value specified is the email + * address of an Amazon Web Services account

      + * + *

      Using email addresses to specify a grantee is only supported in the following Amazon Web Services Regions:

      + *
        + *
      • + *

        US East (N. Virginia)

        + *
      • + *
      • + *

        US West (N. California)

        + *
      • + *
      • + *

        US West (Oregon)

        + *
      • + *
      • + *

        Asia Pacific (Singapore)

        + *
      • + *
      • + *

        Asia Pacific (Sydney)

        + *
      • + *
      • + *

        Asia Pacific (Tokyo)

        + *
      • + *
      • + *

        Europe (Ireland)

        + *
      • + *
      • + *

        South America (São Paulo)

        + *
      • + *
      + *

      For a list of all the Amazon S3 supported Regions and endpoints, see Regions and Endpoints in the Amazon Web Services General Reference.

      + *
      + *
    • + *
    + *

    For example, the following x-amz-grant-read header grants + * list objects permission to the two Amazon Web Services accounts identified by their email + * addresses.

    + *

    + * x-amz-grant-read: emailAddress="xyz@amazon.com", + * emailAddress="abc@amazon.com" + *

    + *
  • + *
+ *

You can use either a canned ACL or specify access permissions explicitly. You + * cannot do both.

+ *
+ *
Grantee Values
+ *
+ *

You can specify the person (grantee) to whom you're assigning access rights + * (using request elements) in the following ways:

+ *
    + *
  • + *

    By the person's ID:

    + *

    + * <>ID<><>GranteesEmail<> + * + *

    + *

    DisplayName is optional and ignored in the request.

    + *
  • + *
  • + *

    By URI:

    + *

    + * <>http://acs.amazonaws.com/groups/global/AuthenticatedUsers<> + *

    + *
  • + *
  • + *

    By Email address:

    + *

    + * <>Grantees@email.com<>lt;/Grantee> + *

    + *

    The grantee is resolved to the CanonicalUser and, in a response to a GET + * Object acl request, appears as the CanonicalUser.

    + * + *

    Using email addresses to specify a grantee is only supported in the following Amazon Web Services Regions:

    + *
      + *
    • + *

      US East (N. Virginia)

      + *
    • + *
    • + *

      US West (N. California)

      + *
    • + *
    • + *

      US West (Oregon)

      + *
    • + *
    • + *

      Asia Pacific (Singapore)

      + *
    • + *
    • + *

      Asia Pacific (Sydney)

      + *
    • + *
    • + *

      Asia Pacific (Tokyo)

      + *
    • + *
    • + *

      Europe (Ireland)

      + *
    • + *
    • + *

      South America (São Paulo)

      + *
    • + *
    + *

    For a list of all the Amazon S3 supported Regions and endpoints, see Regions and Endpoints in the Amazon Web Services General Reference.

    + *
    + *
  • + *
+ *
+ *
Versioning
+ *
+ *

The ACL of an object is set at the object version level. By default, PUT sets + * the ACL of the current version of an object. To set the ACL of a different + * version, use the versionId subresource.

+ *
+ *
+ *

The following operations are related to PutObjectAcl:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutObjectAclCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutObjectAclCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutObjectAclRequest + * ACL: "private" || "public-read" || "public-read-write" || "authenticated-read" || "aws-exec-read" || "bucket-owner-read" || "bucket-owner-full-control", + * AccessControlPolicy: { // AccessControlPolicy + * Grants: [ // Grants + * { // Grant + * Grantee: { // Grantee + * DisplayName: "STRING_VALUE", + * EmailAddress: "STRING_VALUE", + * ID: "STRING_VALUE", + * URI: "STRING_VALUE", + * Type: "CanonicalUser" || "AmazonCustomerByEmail" || "Group", // required + * }, + * Permission: "FULL_CONTROL" || "WRITE" || "WRITE_ACP" || "READ" || "READ_ACP", + * }, + * ], + * Owner: { // Owner + * DisplayName: "STRING_VALUE", + * ID: "STRING_VALUE", + * }, + * }, + * Bucket: "STRING_VALUE", // required + * ContentMD5: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * GrantFullControl: "STRING_VALUE", + * GrantRead: "STRING_VALUE", + * GrantReadACP: "STRING_VALUE", + * GrantWrite: "STRING_VALUE", + * GrantWriteACP: "STRING_VALUE", + * Key: "STRING_VALUE", // required + * RequestPayer: "requester", + * VersionId: "STRING_VALUE", + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new PutObjectAclCommand(input); + * const response = await client.send(command); + * // { // PutObjectAclOutput + * // RequestCharged: "requester", + * // }; + * + * ``` + * + * @param PutObjectAclCommandInput - {@link PutObjectAclCommandInput} + * @returns {@link PutObjectAclCommandOutput} + * @see {@link PutObjectAclCommandInput} for command's `input` shape. + * @see {@link PutObjectAclCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link NoSuchKey} (client fault) + *

The specified key does not exist.

+ * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To grant permissions using object ACL + * ```javascript + * // The following example adds grants to an object ACL. The first permission grants user1 and user2 FULL_CONTROL and the AllUsers group READ permission. + * const input = { + * AccessControlPolicy: { /* empty *\/ }, + * Bucket: "examplebucket", + * GrantFullControl: "emailaddress=user1@example.com,emailaddress=user2@example.com", + * GrantRead: "uri=http://acs.amazonaws.com/groups/global/AllUsers", + * Key: "HappyFace.jpg" + * }; + * const command = new PutObjectAclCommand(input); + * const response = await client.send(command); + * /* response is + * { /* empty *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class PutObjectAclCommand extends PutObjectAclCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutObjectAclRequest; + output: PutObjectAclOutput; + }; + sdk: { + input: PutObjectAclCommandInput; + output: PutObjectAclCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutObjectCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutObjectCommand.d.ts new file mode 100644 index 00000000..7f4cc5ae --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutObjectCommand.d.ts @@ -0,0 +1,456 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer, StreamingBlobPayloadInputTypes } from "@smithy/types"; +import { PutObjectOutput, PutObjectRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutObjectCommand}. + */ +export interface PutObjectCommandInput extends Omit { + Body?: StreamingBlobPayloadInputTypes; +} +/** + * @public + * + * The output of {@link PutObjectCommand}. + */ +export interface PutObjectCommandOutput extends PutObjectOutput, __MetadataBearer { +} +declare const PutObjectCommand_base: { + new (input: PutObjectCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutObjectCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Adds an object to a bucket.

+ * + *
    + *
  • + *

    Amazon S3 never adds partial objects; if you receive a success response, Amazon S3 added + * the entire object to the bucket. You cannot use PutObject to only + * update a single piece of metadata for an existing object. You must put the entire + * object with updated metadata if you want to update some values.

    + *
  • + *
  • + *

    If your bucket uses the bucket owner enforced setting for Object Ownership, + * ACLs are disabled and no longer affect permissions. All objects written to the + * bucket by any account will be owned by the bucket owner.

    + *
  • + *
  • + *

    + * Directory buckets - + * For directory buckets, you must make requests for this API operation to the Zonal endpoint. These endpoints support virtual-hosted-style requests in the format https://amzn-s3-demo-bucket.s3express-zone-id.region-code.amazonaws.com/key-name + * . Path-style requests are not supported. For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

    + *
  • + *
+ *
+ *

Amazon S3 is a distributed system. If it receives multiple write requests for the same object + * simultaneously, it overwrites all but the last object written. However, Amazon S3 provides + * features that can modify this behavior:

+ *
    + *
  • + *

    + * S3 Object Lock - To prevent objects from + * being deleted or overwritten, you can use Amazon S3 Object + * Lock in the Amazon S3 User Guide.

    + * + *

    This functionality is not supported for directory buckets.

    + *
    + *
  • + *
  • + *

    + * If-None-Match - Uploads the object only if the object key name does not already exist in the specified bucket. Otherwise, Amazon S3 returns a 412 Precondition Failed error. If a conflicting operation occurs during the upload, S3 returns a 409 ConditionalRequestConflict response. On a 409 failure, retry the upload.

    + *

    Expects the * character (asterisk).

    + *

    For more information, see Add preconditions to S3 operations with conditional requests in the Amazon S3 User Guide or RFC 7232. + *

    + * + *

    This functionality is not supported for S3 on Outposts.

    + *
    + *
  • + *
  • + *

    + * S3 Versioning - When you enable versioning + * for a bucket, if Amazon S3 receives multiple write requests for the same object + * simultaneously, it stores all versions of the objects. For each write request that is + * made to the same object, Amazon S3 automatically generates a unique version ID of that + * object being stored in Amazon S3. You can retrieve, replace, or delete any version of the + * object. For more information about versioning, see Adding + * Objects to Versioning-Enabled Buckets in the Amazon S3 User + * Guide. For information about returning the versioning state of a + * bucket, see GetBucketVersioning.

    + * + *

    This functionality is not supported for directory buckets.

    + *
    + *
  • + *
+ *
+ *
Permissions
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - The + * following permissions are required in your policies when your + * PutObject request includes specific headers.

    + *
      + *
    • + *

      + * + * s3:PutObject + * - + * To successfully complete the PutObject request, you must + * always have the s3:PutObject permission on a bucket to + * add an object to it.

      + *
    • + *
    • + *

      + * + * s3:PutObjectAcl + * - To successfully change the objects ACL of your + * PutObject request, you must have the + * s3:PutObjectAcl.

      + *
    • + *
    • + *

      + * + * s3:PutObjectTagging + * - To successfully set the tag-set with your + * PutObject request, you must have the + * s3:PutObjectTagging.

      + *
    • + *
    + *
  • + *
  • + *

    + * Directory bucket permissions - To grant access to this API operation on a directory bucket, we recommend that you use the + * CreateSession + * API operation for session-based authorization. Specifically, you grant the s3express:CreateSession permission to the directory bucket in a bucket policy or an IAM identity-based policy. Then, you make the CreateSession API call on the bucket to obtain a session token. With the session token in your request header, you can make API requests to this operation. After the session token expires, you make another CreateSession API call to generate a new session token for use. + * Amazon Web Services CLI or SDKs create session and refresh the session token automatically to avoid service interruptions when a session expires. For more information about authorization, see + * CreateSession + * .

    + *

    If the object is encrypted with SSE-KMS, you must also have the + * kms:GenerateDataKey and kms:Decrypt permissions + * in IAM identity-based policies and KMS key policies for the KMS + * key.

    + *
  • + *
+ *
+ *
Data integrity with Content-MD5
+ *
+ *
    + *
  • + *

    + * General purpose bucket - To ensure that + * data is not corrupted traversing the network, use the + * Content-MD5 header. When you use this header, Amazon S3 checks + * the object against the provided MD5 value and, if they do not match, Amazon S3 + * returns an error. Alternatively, when the object's ETag is its MD5 digest, + * you can calculate the MD5 while putting the object to Amazon S3 and compare the + * returned ETag to the calculated MD5 value.

    + *
  • + *
  • + *

    + * Directory bucket - + * This functionality is not supported for directory buckets.

    + *
  • + *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is + * Bucket-name.s3express-zone-id.region-code.amazonaws.com.

+ *
+ *
+ *

For more information about related Amazon S3 APIs, see the following:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutObjectCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutObjectRequest + * ACL: "private" || "public-read" || "public-read-write" || "authenticated-read" || "aws-exec-read" || "bucket-owner-read" || "bucket-owner-full-control", + * Body: "MULTIPLE_TYPES_ACCEPTED", // see \@smithy/types -> StreamingBlobPayloadInputTypes + * Bucket: "STRING_VALUE", // required + * CacheControl: "STRING_VALUE", + * ContentDisposition: "STRING_VALUE", + * ContentEncoding: "STRING_VALUE", + * ContentLanguage: "STRING_VALUE", + * ContentLength: Number("long"), + * ContentMD5: "STRING_VALUE", + * ContentType: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * ChecksumCRC32: "STRING_VALUE", + * ChecksumCRC32C: "STRING_VALUE", + * ChecksumCRC64NVME: "STRING_VALUE", + * ChecksumSHA1: "STRING_VALUE", + * ChecksumSHA256: "STRING_VALUE", + * Expires: new Date("TIMESTAMP"), + * IfMatch: "STRING_VALUE", + * IfNoneMatch: "STRING_VALUE", + * GrantFullControl: "STRING_VALUE", + * GrantRead: "STRING_VALUE", + * GrantReadACP: "STRING_VALUE", + * GrantWriteACP: "STRING_VALUE", + * Key: "STRING_VALUE", // required + * WriteOffsetBytes: Number("long"), + * Metadata: { // Metadata + * "": "STRING_VALUE", + * }, + * ServerSideEncryption: "AES256" || "aws:kms" || "aws:kms:dsse", + * StorageClass: "STANDARD" || "REDUCED_REDUNDANCY" || "STANDARD_IA" || "ONEZONE_IA" || "INTELLIGENT_TIERING" || "GLACIER" || "DEEP_ARCHIVE" || "OUTPOSTS" || "GLACIER_IR" || "SNOW" || "EXPRESS_ONEZONE", + * WebsiteRedirectLocation: "STRING_VALUE", + * SSECustomerAlgorithm: "STRING_VALUE", + * SSECustomerKey: "STRING_VALUE", + * SSECustomerKeyMD5: "STRING_VALUE", + * SSEKMSKeyId: "STRING_VALUE", + * SSEKMSEncryptionContext: "STRING_VALUE", + * BucketKeyEnabled: true || false, + * RequestPayer: "requester", + * Tagging: "STRING_VALUE", + * ObjectLockMode: "GOVERNANCE" || "COMPLIANCE", + * ObjectLockRetainUntilDate: new Date("TIMESTAMP"), + * ObjectLockLegalHoldStatus: "ON" || "OFF", + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new PutObjectCommand(input); + * const response = await client.send(command); + * // { // PutObjectOutput + * // Expiration: "STRING_VALUE", + * // ETag: "STRING_VALUE", + * // ChecksumCRC32: "STRING_VALUE", + * // ChecksumCRC32C: "STRING_VALUE", + * // ChecksumCRC64NVME: "STRING_VALUE", + * // ChecksumSHA1: "STRING_VALUE", + * // ChecksumSHA256: "STRING_VALUE", + * // ChecksumType: "COMPOSITE" || "FULL_OBJECT", + * // ServerSideEncryption: "AES256" || "aws:kms" || "aws:kms:dsse", + * // VersionId: "STRING_VALUE", + * // SSECustomerAlgorithm: "STRING_VALUE", + * // SSECustomerKeyMD5: "STRING_VALUE", + * // SSEKMSKeyId: "STRING_VALUE", + * // SSEKMSEncryptionContext: "STRING_VALUE", + * // BucketKeyEnabled: true || false, + * // Size: Number("long"), + * // RequestCharged: "requester", + * // }; + * + * ``` + * + * @param PutObjectCommandInput - {@link PutObjectCommandInput} + * @returns {@link PutObjectCommandOutput} + * @see {@link PutObjectCommandInput} for command's `input` shape. + * @see {@link PutObjectCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link EncryptionTypeMismatch} (client fault) + *

+ * The existing object was created with a different encryption type. + * Subsequent write requests must include the appropriate encryption + * parameters in the request or while creating the session. + *

+ * + * @throws {@link InvalidRequest} (client fault) + *

You may receive this error in multiple cases. Depending on the reason for the error, you may receive one of the messages below:

+ *
    + *
  • + *

    Cannot specify both a write offset value and user-defined object metadata for existing objects.

    + *
  • + *
  • + *

    Checksum Type mismatch occurred, expected checksum Type: sha1, actual checksum Type: crc32c.

    + *
  • + *
  • + *

    Request body cannot be empty when 'write offset' is specified.

    + *
  • + *
+ * + * @throws {@link InvalidWriteOffset} (client fault) + *

+ * The write offset value that you specified does not match the current object size. + *

+ * + * @throws {@link TooManyParts} (client fault) + *

+ * You have attempted to add more parts than the maximum of 10000 + * that are allowed for this object. You can use the CopyObject operation + * to copy this object to another and then add more data to the newly copied object. + *

+ * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To create an object. + * ```javascript + * // The following example creates an object. If the bucket is versioning enabled, S3 returns version ID in response. + * const input = { + * Body: "filetoupload", + * Bucket: "examplebucket", + * Key: "objectkey" + * }; + * const command = new PutObjectCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ETag: `"6805f2cfc46c0f04559748bb039d69ae"`, + * VersionId: "Bvq0EDKxOcXLJXNo_Lkz37eM3R4pfzyQ" + * } + * *\/ + * ``` + * + * @example To upload an object (specify optional headers) + * ```javascript + * // The following example uploads an object. The request specifies optional request headers to directs S3 to use specific storage class and use server-side encryption. + * const input = { + * Body: "HappyFace.jpg", + * Bucket: "examplebucket", + * Key: "HappyFace.jpg", + * ServerSideEncryption: "AES256", + * StorageClass: "STANDARD_IA" + * }; + * const command = new PutObjectCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ETag: `"6805f2cfc46c0f04559748bb039d69ae"`, + * ServerSideEncryption: "AES256", + * VersionId: "CG612hodqujkf8FaaNfp8U..FIhLROcp" + * } + * *\/ + * ``` + * + * @example To upload an object + * ```javascript + * // The following example uploads an object to a versioning-enabled bucket. The source file is specified using Windows file syntax. S3 returns VersionId of the newly created object. + * const input = { + * Body: "HappyFace.jpg", + * Bucket: "examplebucket", + * Key: "HappyFace.jpg" + * }; + * const command = new PutObjectCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ETag: `"6805f2cfc46c0f04559748bb039d69ae"`, + * VersionId: "tpf3zF08nBplQK1XLOefGskR7mGDwcDk" + * } + * *\/ + * ``` + * + * @example To upload an object and specify canned ACL. + * ```javascript + * // The following example uploads and object. The request specifies optional canned ACL (access control list) to all READ access to authenticated users. If the bucket is versioning enabled, S3 returns version ID in response. + * const input = { + * ACL: "authenticated-read", + * Body: "filetoupload", + * Bucket: "examplebucket", + * Key: "exampleobject" + * }; + * const command = new PutObjectCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ETag: `"6805f2cfc46c0f04559748bb039d69ae"`, + * VersionId: "Kirh.unyZwjQ69YxcQLA8z4F5j3kJJKr" + * } + * *\/ + * ``` + * + * @example To upload an object and specify optional tags + * ```javascript + * // The following example uploads an object. The request specifies optional object tags. The bucket is versioned, therefore S3 returns version ID of the newly created object. + * const input = { + * Body: "c:\HappyFace.jpg", + * Bucket: "examplebucket", + * Key: "HappyFace.jpg", + * Tagging: "key1=value1&key2=value2" + * }; + * const command = new PutObjectCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ETag: `"6805f2cfc46c0f04559748bb039d69ae"`, + * VersionId: "psM2sYY4.o1501dSx8wMvnkOzSBB.V4a" + * } + * *\/ + * ``` + * + * @example To upload an object and specify server-side encryption and object tags + * ```javascript + * // The following example uploads an object. The request specifies the optional server-side encryption option. The request also specifies optional object tags. If the bucket is versioning enabled, S3 returns version ID in response. + * const input = { + * Body: "filetoupload", + * Bucket: "examplebucket", + * Key: "exampleobject", + * ServerSideEncryption: "AES256", + * Tagging: "key1=value1&key2=value2" + * }; + * const command = new PutObjectCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ETag: `"6805f2cfc46c0f04559748bb039d69ae"`, + * ServerSideEncryption: "AES256", + * VersionId: "Ri.vC6qVlA4dEnjgRV4ZHsHoFIjqEMNt" + * } + * *\/ + * ``` + * + * @example To upload object and specify user-defined metadata + * ```javascript + * // The following example creates an object. The request also specifies optional metadata. If the bucket is versioning enabled, S3 returns version ID in response. + * const input = { + * Body: "filetoupload", + * Bucket: "examplebucket", + * Key: "exampleobject", + * Metadata: { + * metadata1: "value1", + * metadata2: "value2" + * } + * }; + * const command = new PutObjectCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ETag: `"6805f2cfc46c0f04559748bb039d69ae"`, + * VersionId: "pSKidl4pHBiNwukdbcPXAIs.sshFFOc0" + * } + * *\/ + * ``` + * + * @public + */ +export declare class PutObjectCommand extends PutObjectCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutObjectRequest; + output: PutObjectOutput; + }; + sdk: { + input: PutObjectCommandInput; + output: PutObjectCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutObjectLegalHoldCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutObjectLegalHoldCommand.d.ts new file mode 100644 index 00000000..59bb00d4 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutObjectLegalHoldCommand.d.ts @@ -0,0 +1,87 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutObjectLegalHoldOutput, PutObjectLegalHoldRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutObjectLegalHoldCommand}. + */ +export interface PutObjectLegalHoldCommandInput extends PutObjectLegalHoldRequest { +} +/** + * @public + * + * The output of {@link PutObjectLegalHoldCommand}. + */ +export interface PutObjectLegalHoldCommandOutput extends PutObjectLegalHoldOutput, __MetadataBearer { +} +declare const PutObjectLegalHoldCommand_base: { + new (input: PutObjectLegalHoldCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutObjectLegalHoldCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Applies a legal hold configuration to the specified object. For more information, see + * Locking + * Objects.

+ *

This functionality is not supported for Amazon S3 on Outposts.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutObjectLegalHoldCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutObjectLegalHoldCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutObjectLegalHoldRequest + * Bucket: "STRING_VALUE", // required + * Key: "STRING_VALUE", // required + * LegalHold: { // ObjectLockLegalHold + * Status: "ON" || "OFF", + * }, + * RequestPayer: "requester", + * VersionId: "STRING_VALUE", + * ContentMD5: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new PutObjectLegalHoldCommand(input); + * const response = await client.send(command); + * // { // PutObjectLegalHoldOutput + * // RequestCharged: "requester", + * // }; + * + * ``` + * + * @param PutObjectLegalHoldCommandInput - {@link PutObjectLegalHoldCommandInput} + * @returns {@link PutObjectLegalHoldCommandOutput} + * @see {@link PutObjectLegalHoldCommandInput} for command's `input` shape. + * @see {@link PutObjectLegalHoldCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class PutObjectLegalHoldCommand extends PutObjectLegalHoldCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutObjectLegalHoldRequest; + output: PutObjectLegalHoldOutput; + }; + sdk: { + input: PutObjectLegalHoldCommandInput; + output: PutObjectLegalHoldCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutObjectLockConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutObjectLockConfigurationCommand.d.ts new file mode 100644 index 00000000..4dcbb5e6 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutObjectLockConfigurationCommand.d.ts @@ -0,0 +1,110 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutObjectLockConfigurationOutput, PutObjectLockConfigurationRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutObjectLockConfigurationCommand}. + */ +export interface PutObjectLockConfigurationCommandInput extends PutObjectLockConfigurationRequest { +} +/** + * @public + * + * The output of {@link PutObjectLockConfigurationCommand}. + */ +export interface PutObjectLockConfigurationCommandOutput extends PutObjectLockConfigurationOutput, __MetadataBearer { +} +declare const PutObjectLockConfigurationCommand_base: { + new (input: PutObjectLockConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutObjectLockConfigurationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Places an Object Lock configuration on the specified bucket. The rule specified in the + * Object Lock configuration will be applied by default to every new object placed in the + * specified bucket. For more information, see Locking Objects.

+ * + *
    + *
  • + *

    The DefaultRetention settings require both a mode and a + * period.

    + *
  • + *
  • + *

    The DefaultRetention period can be either Days or + * Years but you must select one. You cannot specify + * Days and Years at the same time.

    + *
  • + *
  • + *

    You can enable Object Lock for new or existing buckets. For more information, + * see Configuring Object + * Lock.

    + *
  • + *
+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutObjectLockConfigurationCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutObjectLockConfigurationCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutObjectLockConfigurationRequest + * Bucket: "STRING_VALUE", // required + * ObjectLockConfiguration: { // ObjectLockConfiguration + * ObjectLockEnabled: "Enabled", + * Rule: { // ObjectLockRule + * DefaultRetention: { // DefaultRetention + * Mode: "GOVERNANCE" || "COMPLIANCE", + * Days: Number("int"), + * Years: Number("int"), + * }, + * }, + * }, + * RequestPayer: "requester", + * Token: "STRING_VALUE", + * ContentMD5: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new PutObjectLockConfigurationCommand(input); + * const response = await client.send(command); + * // { // PutObjectLockConfigurationOutput + * // RequestCharged: "requester", + * // }; + * + * ``` + * + * @param PutObjectLockConfigurationCommandInput - {@link PutObjectLockConfigurationCommandInput} + * @returns {@link PutObjectLockConfigurationCommandOutput} + * @see {@link PutObjectLockConfigurationCommandInput} for command's `input` shape. + * @see {@link PutObjectLockConfigurationCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class PutObjectLockConfigurationCommand extends PutObjectLockConfigurationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutObjectLockConfigurationRequest; + output: PutObjectLockConfigurationOutput; + }; + sdk: { + input: PutObjectLockConfigurationCommandInput; + output: PutObjectLockConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutObjectRetentionCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutObjectRetentionCommand.d.ts new file mode 100644 index 00000000..ab2fc995 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutObjectRetentionCommand.d.ts @@ -0,0 +1,90 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutObjectRetentionOutput, PutObjectRetentionRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutObjectRetentionCommand}. + */ +export interface PutObjectRetentionCommandInput extends PutObjectRetentionRequest { +} +/** + * @public + * + * The output of {@link PutObjectRetentionCommand}. + */ +export interface PutObjectRetentionCommandOutput extends PutObjectRetentionOutput, __MetadataBearer { +} +declare const PutObjectRetentionCommand_base: { + new (input: PutObjectRetentionCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutObjectRetentionCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Places an Object Retention configuration on an object. For more information, see Locking Objects. + * Users or accounts require the s3:PutObjectRetention permission in order to + * place an Object Retention configuration on objects. Bypassing a Governance Retention + * configuration requires the s3:BypassGovernanceRetention permission.

+ *

This functionality is not supported for Amazon S3 on Outposts.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutObjectRetentionCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutObjectRetentionCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutObjectRetentionRequest + * Bucket: "STRING_VALUE", // required + * Key: "STRING_VALUE", // required + * Retention: { // ObjectLockRetention + * Mode: "GOVERNANCE" || "COMPLIANCE", + * RetainUntilDate: new Date("TIMESTAMP"), + * }, + * RequestPayer: "requester", + * VersionId: "STRING_VALUE", + * BypassGovernanceRetention: true || false, + * ContentMD5: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new PutObjectRetentionCommand(input); + * const response = await client.send(command); + * // { // PutObjectRetentionOutput + * // RequestCharged: "requester", + * // }; + * + * ``` + * + * @param PutObjectRetentionCommandInput - {@link PutObjectRetentionCommandInput} + * @returns {@link PutObjectRetentionCommandOutput} + * @see {@link PutObjectRetentionCommandInput} for command's `input` shape. + * @see {@link PutObjectRetentionCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class PutObjectRetentionCommand extends PutObjectRetentionCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutObjectRetentionRequest; + output: PutObjectRetentionOutput; + }; + sdk: { + input: PutObjectRetentionCommandInput; + output: PutObjectRetentionCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutObjectTaggingCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutObjectTaggingCommand.d.ts new file mode 100644 index 00000000..e36d2e3e --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutObjectTaggingCommand.d.ts @@ -0,0 +1,169 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutObjectTaggingOutput, PutObjectTaggingRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutObjectTaggingCommand}. + */ +export interface PutObjectTaggingCommandInput extends PutObjectTaggingRequest { +} +/** + * @public + * + * The output of {@link PutObjectTaggingCommand}. + */ +export interface PutObjectTaggingCommandOutput extends PutObjectTaggingOutput, __MetadataBearer { +} +declare const PutObjectTaggingCommand_base: { + new (input: PutObjectTaggingCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutObjectTaggingCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Sets the supplied tag-set to an object that already exists in a bucket. A tag is a + * key-value pair. For more information, see Object Tagging.

+ *

You can associate tags with an object by sending a PUT request against the tagging + * subresource that is associated with the object. You can retrieve tags by sending a GET + * request. For more information, see GetObjectTagging.

+ *

For tagging-related restrictions related to characters and encodings, see Tag + * Restrictions. Note that Amazon S3 limits the maximum number of tags to 10 tags per + * object.

+ *

To use this operation, you must have permission to perform the + * s3:PutObjectTagging action. By default, the bucket owner has this + * permission and can grant this permission to others.

+ *

To put tags of any other version, use the versionId query parameter. You + * also need permission for the s3:PutObjectVersionTagging action.

+ *

+ * PutObjectTagging has the following special errors. For more Amazon S3 errors + * see, Error + * Responses.

+ *
    + *
  • + *

    + * InvalidTag - The tag provided was not a valid tag. This error + * can occur if the tag did not pass input validation. For more information, see Object + * Tagging.

    + *
  • + *
  • + *

    + * MalformedXML - The XML provided does not match the + * schema.

    + *
  • + *
  • + *

    + * OperationAborted - A conflicting conditional action is + * currently in progress against this resource. Please try again.

    + *
  • + *
  • + *

    + * InternalError - The service was unable to apply the provided + * tag to the object.

    + *
  • + *
+ *

The following operations are related to PutObjectTagging:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutObjectTaggingCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutObjectTaggingCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutObjectTaggingRequest + * Bucket: "STRING_VALUE", // required + * Key: "STRING_VALUE", // required + * VersionId: "STRING_VALUE", + * ContentMD5: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * Tagging: { // Tagging + * TagSet: [ // TagSet // required + * { // Tag + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * ], + * }, + * ExpectedBucketOwner: "STRING_VALUE", + * RequestPayer: "requester", + * }; + * const command = new PutObjectTaggingCommand(input); + * const response = await client.send(command); + * // { // PutObjectTaggingOutput + * // VersionId: "STRING_VALUE", + * // }; + * + * ``` + * + * @param PutObjectTaggingCommandInput - {@link PutObjectTaggingCommandInput} + * @returns {@link PutObjectTaggingCommandOutput} + * @see {@link PutObjectTaggingCommandInput} for command's `input` shape. + * @see {@link PutObjectTaggingCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To add tags to an existing object + * ```javascript + * // The following example adds tags to an existing object. + * const input = { + * Bucket: "examplebucket", + * Key: "HappyFace.jpg", + * Tagging: { + * TagSet: [ + * { + * Key: "Key3", + * Value: "Value3" + * }, + * { + * Key: "Key4", + * Value: "Value4" + * } + * ] + * } + * }; + * const command = new PutObjectTaggingCommand(input); + * const response = await client.send(command); + * /* response is + * { + * VersionId: "null" + * } + * *\/ + * ``` + * + * @public + */ +export declare class PutObjectTaggingCommand extends PutObjectTaggingCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutObjectTaggingRequest; + output: PutObjectTaggingOutput; + }; + sdk: { + input: PutObjectTaggingCommandInput; + output: PutObjectTaggingCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/PutPublicAccessBlockCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutPublicAccessBlockCommand.d.ts new file mode 100644 index 00000000..3d3e079f --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/PutPublicAccessBlockCommand.d.ts @@ -0,0 +1,118 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutPublicAccessBlockRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutPublicAccessBlockCommand}. + */ +export interface PutPublicAccessBlockCommandInput extends PutPublicAccessBlockRequest { +} +/** + * @public + * + * The output of {@link PutPublicAccessBlockCommand}. + */ +export interface PutPublicAccessBlockCommandOutput extends __MetadataBearer { +} +declare const PutPublicAccessBlockCommand_base: { + new (input: PutPublicAccessBlockCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutPublicAccessBlockCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Creates or modifies the PublicAccessBlock configuration for an Amazon S3 bucket. + * To use this operation, you must have the s3:PutBucketPublicAccessBlock + * permission. For more information about Amazon S3 permissions, see Specifying Permissions in a + * Policy.

+ * + *

When Amazon S3 evaluates the PublicAccessBlock configuration for a bucket or + * an object, it checks the PublicAccessBlock configuration for both the + * bucket (or the bucket that contains the object) and the bucket owner's account. If the + * PublicAccessBlock configurations are different between the bucket and + * the account, Amazon S3 uses the most restrictive combination of the bucket-level and + * account-level settings.

+ *
+ *

For more information about when Amazon S3 considers a bucket or an object public, see The Meaning of "Public".

+ *

The following operations are related to PutPublicAccessBlock:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, PutPublicAccessBlockCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, PutPublicAccessBlockCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // PutPublicAccessBlockRequest + * Bucket: "STRING_VALUE", // required + * ContentMD5: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * PublicAccessBlockConfiguration: { // PublicAccessBlockConfiguration + * BlockPublicAcls: true || false, + * IgnorePublicAcls: true || false, + * BlockPublicPolicy: true || false, + * RestrictPublicBuckets: true || false, + * }, + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new PutPublicAccessBlockCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param PutPublicAccessBlockCommandInput - {@link PutPublicAccessBlockCommandInput} + * @returns {@link PutPublicAccessBlockCommandOutput} + * @see {@link PutPublicAccessBlockCommandInput} for command's `input` shape. + * @see {@link PutPublicAccessBlockCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class PutPublicAccessBlockCommand extends PutPublicAccessBlockCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutPublicAccessBlockRequest; + output: {}; + }; + sdk: { + input: PutPublicAccessBlockCommandInput; + output: PutPublicAccessBlockCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/RestoreObjectCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/RestoreObjectCommand.d.ts new file mode 100644 index 00000000..0a30596e --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/RestoreObjectCommand.d.ts @@ -0,0 +1,384 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { RestoreObjectOutput, RestoreObjectRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link RestoreObjectCommand}. + */ +export interface RestoreObjectCommandInput extends RestoreObjectRequest { +} +/** + * @public + * + * The output of {@link RestoreObjectCommand}. + */ +export interface RestoreObjectCommandOutput extends RestoreObjectOutput, __MetadataBearer { +} +declare const RestoreObjectCommand_base: { + new (input: RestoreObjectCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: RestoreObjectCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Restores an archived copy of an object back into Amazon S3

+ *

This functionality is not supported for Amazon S3 on Outposts.

+ *

This action performs the following types of requests:

+ *
    + *
  • + *

    + * restore an archive - Restore an archived object

    + *
  • + *
+ *

For more information about the S3 structure in the request body, see the + * following:

+ * + *
+ *
Permissions
+ *
+ *

To use this operation, you must have permissions to perform the + * s3:RestoreObject action. The bucket owner has this permission by + * default and can grant this permission to others. For more information about + * permissions, see Permissions Related to Bucket Subresource Operations and Managing Access Permissions to Your Amazon S3 Resources in the + * Amazon S3 User Guide.

+ *
+ *
Restoring objects
+ *
+ *

Objects that you archive to the S3 Glacier Flexible Retrieval or S3 Glacier Deep Archive storage class, and S3 Intelligent-Tiering Archive or + * S3 Intelligent-Tiering Deep Archive tiers, are not accessible in real time. For objects in the + * S3 Glacier Flexible Retrieval or S3 Glacier Deep Archive + * storage classes, you must first initiate a restore request, and then wait until a + * temporary copy of the object is available. If you want a permanent copy of the + * object, create a copy of it in the Amazon S3 Standard storage class in your S3 bucket. + * To access an archived object, you must restore the object for the duration (number + * of days) that you specify. For objects in the Archive Access or Deep Archive + * Access tiers of S3 Intelligent-Tiering, you must first initiate a restore request, + * and then wait until the object is moved into the Frequent Access tier.

+ *

To restore a specific object version, you can provide a version ID. If you + * don't provide a version ID, Amazon S3 restores the current version.

+ *

When restoring an archived object, you can specify one of the following data + * access tier options in the Tier element of the request body:

+ *
    + *
  • + *

    + * Expedited - Expedited retrievals allow you to quickly access + * your data stored in the S3 Glacier Flexible Retrieval storage class or S3 Intelligent-Tiering Archive tier when occasional urgent requests + * for restoring archives are required. For all but the largest archived + * objects (250 MB+), data accessed using Expedited retrievals is typically + * made available within 1–5 minutes. Provisioned capacity ensures that + * retrieval capacity for Expedited retrievals is available when you need it. + * Expedited retrievals and provisioned capacity are not available for objects + * stored in the S3 Glacier Deep Archive storage class or + * S3 Intelligent-Tiering Deep Archive tier.

    + *
  • + *
  • + *

    + * Standard - Standard retrievals allow you to access any of + * your archived objects within several hours. This is the default option for + * retrieval requests that do not specify the retrieval option. Standard + * retrievals typically finish within 3–5 hours for objects stored in the + * S3 Glacier Flexible Retrieval storage class or + * S3 Intelligent-Tiering Archive tier. They typically finish within 12 hours for + * objects stored in the S3 Glacier Deep Archive storage class or + * S3 Intelligent-Tiering Deep Archive tier. Standard retrievals are free for objects stored + * in S3 Intelligent-Tiering.

    + *
  • + *
  • + *

    + * Bulk - Bulk retrievals free for objects stored in the + * S3 Glacier Flexible Retrieval and S3 Intelligent-Tiering storage classes, + * enabling you to retrieve large amounts, even petabytes, of data at no cost. + * Bulk retrievals typically finish within 5–12 hours for objects stored in the + * S3 Glacier Flexible Retrieval storage class or + * S3 Intelligent-Tiering Archive tier. Bulk retrievals are also the lowest-cost + * retrieval option when restoring objects from + * S3 Glacier Deep Archive. They typically finish within 48 hours for + * objects stored in the S3 Glacier Deep Archive storage class or + * S3 Intelligent-Tiering Deep Archive tier.

    + *
  • + *
+ *

For more information about archive retrieval options and provisioned capacity + * for Expedited data access, see Restoring Archived + * Objects in the Amazon S3 User Guide.

+ *

You can use Amazon S3 restore speed upgrade to change the restore speed to a faster + * speed while it is in progress. For more information, see Upgrading the speed of an in-progress restore in the + * Amazon S3 User Guide.

+ *

To get the status of object restoration, you can send a HEAD + * request. Operations return the x-amz-restore header, which provides + * information about the restoration status, in the response. You can use Amazon S3 event + * notifications to notify you when a restore is initiated or completed. For more + * information, see Configuring Amazon S3 Event + * Notifications in the Amazon S3 User Guide.

+ *

After restoring an archived object, you can update the restoration period by + * reissuing the request with a new period. Amazon S3 updates the restoration period + * relative to the current time and charges only for the request-there are no + * data transfer charges. You cannot update the restoration period when Amazon S3 is + * actively processing your current restore request for the object.

+ *

If your bucket has a lifecycle configuration with a rule that includes an + * expiration action, the object expiration overrides the life span that you specify + * in a restore request. For example, if you restore an object copy for 10 days, but + * the object is scheduled to expire in 3 days, Amazon S3 deletes the object in 3 days. + * For more information about lifecycle configuration, see PutBucketLifecycleConfiguration and Object Lifecycle + * Management in Amazon S3 User Guide.

+ *
+ *
Responses
+ *
+ *

A successful action returns either the 200 OK or 202 + * Accepted status code.

+ *
    + *
  • + *

    If the object is not previously restored, then Amazon S3 returns 202 + * Accepted in the response.

    + *
  • + *
  • + *

    If the object is previously restored, Amazon S3 returns 200 OK in + * the response.

    + *
  • + *
+ *
    + *
  • + *

    Special errors:

    + *
      + *
    • + *

      + * Code: RestoreAlreadyInProgress + *

      + *
    • + *
    • + *

      + * Cause: Object restore is already in progress. + *

      + *
    • + *
    • + *

      + * HTTP Status Code: 409 Conflict + *

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client + *

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: GlacierExpeditedRetrievalNotAvailable + *

      + *
    • + *
    • + *

      + * Cause: expedited retrievals are currently not available. + * Try again later. (Returned if there is insufficient capacity to + * process the Expedited request. This error applies only to Expedited + * retrievals and not to S3 Standard or Bulk retrievals.) + *

      + *
    • + *
    • + *

      + * HTTP Status Code: 503 + *

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: N/A + *

      + *
    • + *
    + *
  • + *
+ *
+ *
+ *

The following operations are related to RestoreObject:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, RestoreObjectCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, RestoreObjectCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // RestoreObjectRequest + * Bucket: "STRING_VALUE", // required + * Key: "STRING_VALUE", // required + * VersionId: "STRING_VALUE", + * RestoreRequest: { // RestoreRequest + * Days: Number("int"), + * GlacierJobParameters: { // GlacierJobParameters + * Tier: "Standard" || "Bulk" || "Expedited", // required + * }, + * Type: "SELECT", + * Tier: "Standard" || "Bulk" || "Expedited", + * Description: "STRING_VALUE", + * SelectParameters: { // SelectParameters + * InputSerialization: { // InputSerialization + * CSV: { // CSVInput + * FileHeaderInfo: "USE" || "IGNORE" || "NONE", + * Comments: "STRING_VALUE", + * QuoteEscapeCharacter: "STRING_VALUE", + * RecordDelimiter: "STRING_VALUE", + * FieldDelimiter: "STRING_VALUE", + * QuoteCharacter: "STRING_VALUE", + * AllowQuotedRecordDelimiter: true || false, + * }, + * CompressionType: "NONE" || "GZIP" || "BZIP2", + * JSON: { // JSONInput + * Type: "DOCUMENT" || "LINES", + * }, + * Parquet: {}, + * }, + * ExpressionType: "SQL", // required + * Expression: "STRING_VALUE", // required + * OutputSerialization: { // OutputSerialization + * CSV: { // CSVOutput + * QuoteFields: "ALWAYS" || "ASNEEDED", + * QuoteEscapeCharacter: "STRING_VALUE", + * RecordDelimiter: "STRING_VALUE", + * FieldDelimiter: "STRING_VALUE", + * QuoteCharacter: "STRING_VALUE", + * }, + * JSON: { // JSONOutput + * RecordDelimiter: "STRING_VALUE", + * }, + * }, + * }, + * OutputLocation: { // OutputLocation + * S3: { // S3Location + * BucketName: "STRING_VALUE", // required + * Prefix: "STRING_VALUE", // required + * Encryption: { // Encryption + * EncryptionType: "AES256" || "aws:kms" || "aws:kms:dsse", // required + * KMSKeyId: "STRING_VALUE", + * KMSContext: "STRING_VALUE", + * }, + * CannedACL: "private" || "public-read" || "public-read-write" || "authenticated-read" || "aws-exec-read" || "bucket-owner-read" || "bucket-owner-full-control", + * AccessControlList: [ // Grants + * { // Grant + * Grantee: { // Grantee + * DisplayName: "STRING_VALUE", + * EmailAddress: "STRING_VALUE", + * ID: "STRING_VALUE", + * URI: "STRING_VALUE", + * Type: "CanonicalUser" || "AmazonCustomerByEmail" || "Group", // required + * }, + * Permission: "FULL_CONTROL" || "WRITE" || "WRITE_ACP" || "READ" || "READ_ACP", + * }, + * ], + * Tagging: { // Tagging + * TagSet: [ // TagSet // required + * { // Tag + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * ], + * }, + * UserMetadata: [ // UserMetadata + * { // MetadataEntry + * Name: "STRING_VALUE", + * Value: "STRING_VALUE", + * }, + * ], + * StorageClass: "STANDARD" || "REDUCED_REDUNDANCY" || "STANDARD_IA" || "ONEZONE_IA" || "INTELLIGENT_TIERING" || "GLACIER" || "DEEP_ARCHIVE" || "OUTPOSTS" || "GLACIER_IR" || "SNOW" || "EXPRESS_ONEZONE", + * }, + * }, + * }, + * RequestPayer: "requester", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new RestoreObjectCommand(input); + * const response = await client.send(command); + * // { // RestoreObjectOutput + * // RequestCharged: "requester", + * // RestoreOutputPath: "STRING_VALUE", + * // }; + * + * ``` + * + * @param RestoreObjectCommandInput - {@link RestoreObjectCommandInput} + * @returns {@link RestoreObjectCommandOutput} + * @see {@link RestoreObjectCommandInput} for command's `input` shape. + * @see {@link RestoreObjectCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link ObjectAlreadyInActiveTierError} (client fault) + *

This action is not allowed against this storage tier.

+ * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To restore an archived object + * ```javascript + * // The following example restores for one day an archived copy of an object back into Amazon S3 bucket. + * const input = { + * Bucket: "examplebucket", + * Key: "archivedobjectkey", + * RestoreRequest: { + * Days: 1, + * GlacierJobParameters: { + * Tier: "Expedited" + * } + * } + * }; + * const command = new RestoreObjectCommand(input); + * const response = await client.send(command); + * /* response is + * { /* empty *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class RestoreObjectCommand extends RestoreObjectCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: RestoreObjectRequest; + output: RestoreObjectOutput; + }; + sdk: { + input: RestoreObjectCommandInput; + output: RestoreObjectCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/SelectObjectContentCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/SelectObjectContentCommand.d.ts new file mode 100644 index 00000000..a3af8c63 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/SelectObjectContentCommand.d.ts @@ -0,0 +1,255 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { SelectObjectContentOutput, SelectObjectContentRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link SelectObjectContentCommand}. + */ +export interface SelectObjectContentCommandInput extends SelectObjectContentRequest { +} +/** + * @public + * + * The output of {@link SelectObjectContentCommand}. + */ +export interface SelectObjectContentCommandOutput extends SelectObjectContentOutput, __MetadataBearer { +} +declare const SelectObjectContentCommand_base: { + new (input: SelectObjectContentCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: SelectObjectContentCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

This action filters the contents of an Amazon S3 object based on a simple structured query + * language (SQL) statement. In the request, along with the SQL expression, you must also + * specify a data serialization format (JSON, CSV, or Apache Parquet) of the object. Amazon S3 uses + * this format to parse object data into records, and returns only records that match the + * specified SQL expression. You must also specify the data serialization format for the + * response.

+ *

This functionality is not supported for Amazon S3 on Outposts.

+ *

For more information about Amazon S3 Select, see Selecting Content from + * Objects and SELECT + * Command in the Amazon S3 User Guide.

+ *

+ *
+ *
Permissions
+ *
+ *

You must have the s3:GetObject permission for this operation. Amazon S3 + * Select does not support anonymous access. For more information about permissions, + * see Specifying Permissions in + * a Policy in the Amazon S3 User Guide.

+ *
+ *
Object Data Formats
+ *
+ *

You can use Amazon S3 Select to query objects that have the following format + * properties:

+ *
    + *
  • + *

    + * CSV, JSON, and Parquet - Objects must be in CSV, + * JSON, or Parquet format.

    + *
  • + *
  • + *

    + * UTF-8 - UTF-8 is the only encoding type Amazon S3 Select + * supports.

    + *
  • + *
  • + *

    + * GZIP or BZIP2 - CSV and JSON files can be compressed + * using GZIP or BZIP2. GZIP and BZIP2 are the only compression formats that + * Amazon S3 Select supports for CSV and JSON files. Amazon S3 Select supports columnar + * compression for Parquet using GZIP or Snappy. Amazon S3 Select does not support + * whole-object compression for Parquet objects.

    + *
  • + *
  • + *

    + * Server-side encryption - Amazon S3 Select supports + * querying objects that are protected with server-side encryption.

    + *

    For objects that are encrypted with customer-provided encryption keys + * (SSE-C), you must use HTTPS, and you must use the headers that are + * documented in the GetObject. For more + * information about SSE-C, see Server-Side Encryption (Using Customer-Provided Encryption Keys) + * in the Amazon S3 User Guide.

    + *

    For objects that are encrypted with Amazon S3 managed keys (SSE-S3) and + * Amazon Web Services KMS keys (SSE-KMS), server-side encryption is handled transparently, + * so you don't need to specify anything. For more information about + * server-side encryption, including SSE-S3 and SSE-KMS, see Protecting Data Using Server-Side Encryption in the + * Amazon S3 User Guide.

    + *
  • + *
+ *
+ *
Working with the Response Body
+ *
+ *

Given the response size is unknown, Amazon S3 Select streams the response as a + * series of messages and includes a Transfer-Encoding header with + * chunked as its value in the response. For more information, see + * Appendix: + * SelectObjectContent + * Response.

+ *
+ *
GetObject Support
+ *
+ *

The SelectObjectContent action does not support the following + * GetObject functionality. For more information, see GetObject.

+ *
    + *
  • + *

    + * Range: Although you can specify a scan range for an Amazon S3 Select + * request (see SelectObjectContentRequest - ScanRange in the request + * parameters), you cannot specify the range of bytes of an object to return. + *

    + *
  • + *
  • + *

    The GLACIER, DEEP_ARCHIVE, and + * REDUCED_REDUNDANCY storage classes, or the + * ARCHIVE_ACCESS and DEEP_ARCHIVE_ACCESS access + * tiers of the INTELLIGENT_TIERING storage class: You cannot + * query objects in the GLACIER, DEEP_ARCHIVE, or + * REDUCED_REDUNDANCY storage classes, nor objects in the + * ARCHIVE_ACCESS or DEEP_ARCHIVE_ACCESS access + * tiers of the INTELLIGENT_TIERING storage class. For more + * information about storage classes, see Using Amazon S3 + * storage classes in the + * Amazon S3 User Guide.

    + *
  • + *
+ *
+ *
Special Errors
+ *
+ *

For a list of special errors for this operation, see List of SELECT Object Content Error Codes + *

+ *
+ *
+ *

The following operations are related to SelectObjectContent:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, SelectObjectContentCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, SelectObjectContentCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // SelectObjectContentRequest + * Bucket: "STRING_VALUE", // required + * Key: "STRING_VALUE", // required + * SSECustomerAlgorithm: "STRING_VALUE", + * SSECustomerKey: "STRING_VALUE", + * SSECustomerKeyMD5: "STRING_VALUE", + * Expression: "STRING_VALUE", // required + * ExpressionType: "SQL", // required + * RequestProgress: { // RequestProgress + * Enabled: true || false, + * }, + * InputSerialization: { // InputSerialization + * CSV: { // CSVInput + * FileHeaderInfo: "USE" || "IGNORE" || "NONE", + * Comments: "STRING_VALUE", + * QuoteEscapeCharacter: "STRING_VALUE", + * RecordDelimiter: "STRING_VALUE", + * FieldDelimiter: "STRING_VALUE", + * QuoteCharacter: "STRING_VALUE", + * AllowQuotedRecordDelimiter: true || false, + * }, + * CompressionType: "NONE" || "GZIP" || "BZIP2", + * JSON: { // JSONInput + * Type: "DOCUMENT" || "LINES", + * }, + * Parquet: {}, + * }, + * OutputSerialization: { // OutputSerialization + * CSV: { // CSVOutput + * QuoteFields: "ALWAYS" || "ASNEEDED", + * QuoteEscapeCharacter: "STRING_VALUE", + * RecordDelimiter: "STRING_VALUE", + * FieldDelimiter: "STRING_VALUE", + * QuoteCharacter: "STRING_VALUE", + * }, + * JSON: { // JSONOutput + * RecordDelimiter: "STRING_VALUE", + * }, + * }, + * ScanRange: { // ScanRange + * Start: Number("long"), + * End: Number("long"), + * }, + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new SelectObjectContentCommand(input); + * const response = await client.send(command); + * // { // SelectObjectContentOutput + * // Payload: { // SelectObjectContentEventStream Union: only one key present + * // Records: { // RecordsEvent + * // Payload: new Uint8Array(), + * // }, + * // Stats: { // StatsEvent + * // Details: { // Stats + * // BytesScanned: Number("long"), + * // BytesProcessed: Number("long"), + * // BytesReturned: Number("long"), + * // }, + * // }, + * // Progress: { // ProgressEvent + * // Details: { // Progress + * // BytesScanned: Number("long"), + * // BytesProcessed: Number("long"), + * // BytesReturned: Number("long"), + * // }, + * // }, + * // Cont: {}, + * // End: {}, + * // }, + * // }; + * + * ``` + * + * @param SelectObjectContentCommandInput - {@link SelectObjectContentCommandInput} + * @returns {@link SelectObjectContentCommandOutput} + * @see {@link SelectObjectContentCommandInput} for command's `input` shape. + * @see {@link SelectObjectContentCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class SelectObjectContentCommand extends SelectObjectContentCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: SelectObjectContentRequest; + output: SelectObjectContentOutput; + }; + sdk: { + input: SelectObjectContentCommandInput; + output: SelectObjectContentCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/UploadPartCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/UploadPartCommand.d.ts new file mode 100644 index 00000000..7ae338f6 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/UploadPartCommand.d.ts @@ -0,0 +1,309 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer, StreamingBlobPayloadInputTypes } from "@smithy/types"; +import { UploadPartOutput, UploadPartRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UploadPartCommand}. + */ +export interface UploadPartCommandInput extends Omit { + Body?: StreamingBlobPayloadInputTypes; +} +/** + * @public + * + * The output of {@link UploadPartCommand}. + */ +export interface UploadPartCommandOutput extends UploadPartOutput, __MetadataBearer { +} +declare const UploadPartCommand_base: { + new (input: UploadPartCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UploadPartCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Uploads a part in a multipart upload.

+ * + *

In this operation, you provide new data as a part of an object in your request. + * However, you have an option to specify your existing Amazon S3 object as a data source for + * the part you are uploading. To upload a part from an existing object, you use the UploadPartCopy operation.

+ *
+ *

You must initiate a multipart upload (see CreateMultipartUpload) + * before you can upload any part. In response to your initiate request, Amazon S3 returns an + * upload ID, a unique identifier that you must include in your upload part request.

+ *

Part numbers can be any number from 1 to 10,000, inclusive. A part number uniquely + * identifies a part and also defines its position within the object being created. If you + * upload a new part using the same part number that was used with a previous part, the + * previously uploaded part is overwritten.

+ *

For information about maximum and minimum part sizes and other multipart upload + * specifications, see Multipart upload limits in the Amazon S3 User Guide.

+ * + *

After you initiate multipart upload and upload one or more parts, you must either + * complete or abort multipart upload in order to stop getting charged for storage of the + * uploaded parts. Only after you either complete or abort multipart upload, Amazon S3 frees up + * the parts storage and stops charging you for the parts storage.

+ *
+ *

For more information on multipart uploads, go to Multipart Upload Overview in the + * Amazon S3 User Guide .

+ * + *

+ * Directory buckets - + * For directory buckets, you must make requests for this API operation to the Zonal endpoint. These endpoints support virtual-hosted-style requests in the format https://amzn-s3-demo-bucket.s3express-zone-id.region-code.amazonaws.com/key-name + * . Path-style requests are not supported. For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

+ *
+ *
+ *
Permissions
+ *
+ *
    + *
  • + *

    + * General purpose bucket permissions - To + * perform a multipart upload with encryption using an Key Management Service key, the + * requester must have permission to the kms:Decrypt and + * kms:GenerateDataKey actions on the key. The requester must + * also have permissions for the kms:GenerateDataKey action for + * the CreateMultipartUpload API. Then, the requester needs + * permissions for the kms:Decrypt action on the + * UploadPart and UploadPartCopy APIs.

    + *

    These permissions are required because Amazon S3 must decrypt and read data + * from the encrypted file parts before it completes the multipart upload. For + * more information about KMS permissions, see Protecting data + * using server-side encryption with KMS in the + * Amazon S3 User Guide. For information about the + * permissions required to use the multipart upload API, see Multipart upload and permissions and Multipart upload API and permissions in the + * Amazon S3 User Guide.

    + *
  • + *
  • + *

    + * Directory bucket permissions - To grant access to this API operation on a directory bucket, we recommend that you use the + * CreateSession + * API operation for session-based authorization. Specifically, you grant the s3express:CreateSession permission to the directory bucket in a bucket policy or an IAM identity-based policy. Then, you make the CreateSession API call on the bucket to obtain a session token. With the session token in your request header, you can make API requests to this operation. After the session token expires, you make another CreateSession API call to generate a new session token for use. + * Amazon Web Services CLI or SDKs create session and refresh the session token automatically to avoid service interruptions when a session expires. For more information about authorization, see + * CreateSession + * .

    + *

    If the object is encrypted with SSE-KMS, you must also have the + * kms:GenerateDataKey and kms:Decrypt permissions + * in IAM identity-based policies and KMS key policies for the KMS + * key.

    + *
  • + *
+ *
+ *
Data integrity
+ *
+ *

+ * General purpose bucket - To ensure that data + * is not corrupted traversing the network, specify the Content-MD5 + * header in the upload part request. Amazon S3 checks the part data against the provided + * MD5 value. If they do not match, Amazon S3 returns an error. If the upload request is + * signed with Signature Version 4, then Amazon Web Services S3 uses the + * x-amz-content-sha256 header as a checksum instead of + * Content-MD5. For more information see Authenticating Requests: Using the Authorization Header (Amazon Web Services Signature + * Version 4).

+ * + *

+ * Directory buckets - MD5 is not supported by directory buckets. You can use checksum algorithms to check object integrity.

+ *
+ *
+ *
Encryption
+ *
+ *
    + *
  • + *

    + * General purpose bucket - Server-side + * encryption is for data encryption at rest. Amazon S3 encrypts your data as it + * writes it to disks in its data centers and decrypts it when you access it. + * You have mutually exclusive options to protect data using server-side + * encryption in Amazon S3, depending on how you choose to manage the encryption + * keys. Specifically, the encryption key options are Amazon S3 managed keys + * (SSE-S3), Amazon Web Services KMS keys (SSE-KMS), and Customer-Provided Keys (SSE-C). + * Amazon S3 encrypts data with server-side encryption using Amazon S3 managed keys + * (SSE-S3) by default. You can optionally tell Amazon S3 to encrypt data at rest + * using server-side encryption with other key options. The option you use + * depends on whether you want to use KMS keys (SSE-KMS) or provide your own + * encryption key (SSE-C).

    + *

    Server-side encryption is supported by the S3 Multipart Upload + * operations. Unless you are using a customer-provided encryption key (SSE-C), + * you don't need to specify the encryption parameters in each UploadPart + * request. Instead, you only need to specify the server-side encryption + * parameters in the initial Initiate Multipart request. For more information, + * see CreateMultipartUpload.

    + *

    If you request server-side encryption using a customer-provided + * encryption key (SSE-C) in your initiate multipart upload request, you must + * provide identical encryption information in each part upload using the + * following request headers.

    + *
      + *
    • + *

      x-amz-server-side-encryption-customer-algorithm

      + *
    • + *
    • + *

      x-amz-server-side-encryption-customer-key

      + *
    • + *
    • + *

      x-amz-server-side-encryption-customer-key-MD5

      + *
    • + *
    + *

    For more information, see Using + * Server-Side Encryption in the + * Amazon S3 User Guide.

    + *
  • + *
  • + *

    + * Directory buckets - + * For directory buckets, there are only two supported options for server-side encryption: server-side encryption with Amazon S3 managed keys (SSE-S3) (AES256) and server-side encryption with KMS keys (SSE-KMS) (aws:kms).

    + *
  • + *
+ *
+ *
Special errors
+ *
+ *
    + *
  • + *

    Error Code: NoSuchUpload + *

    + *
      + *
    • + *

      Description: The specified multipart upload does not exist. The + * upload ID might be invalid, or the multipart upload might have been + * aborted or completed.

      + *
    • + *
    • + *

      HTTP Status Code: 404 Not Found

      + *
    • + *
    • + *

      SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is + * Bucket-name.s3express-zone-id.region-code.amazonaws.com.

+ *
+ *
+ *

The following operations are related to UploadPart:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, UploadPartCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, UploadPartCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // UploadPartRequest + * Body: "MULTIPLE_TYPES_ACCEPTED", // see \@smithy/types -> StreamingBlobPayloadInputTypes + * Bucket: "STRING_VALUE", // required + * ContentLength: Number("long"), + * ContentMD5: "STRING_VALUE", + * ChecksumAlgorithm: "CRC32" || "CRC32C" || "SHA1" || "SHA256" || "CRC64NVME", + * ChecksumCRC32: "STRING_VALUE", + * ChecksumCRC32C: "STRING_VALUE", + * ChecksumCRC64NVME: "STRING_VALUE", + * ChecksumSHA1: "STRING_VALUE", + * ChecksumSHA256: "STRING_VALUE", + * Key: "STRING_VALUE", // required + * PartNumber: Number("int"), // required + * UploadId: "STRING_VALUE", // required + * SSECustomerAlgorithm: "STRING_VALUE", + * SSECustomerKey: "STRING_VALUE", + * SSECustomerKeyMD5: "STRING_VALUE", + * RequestPayer: "requester", + * ExpectedBucketOwner: "STRING_VALUE", + * }; + * const command = new UploadPartCommand(input); + * const response = await client.send(command); + * // { // UploadPartOutput + * // ServerSideEncryption: "AES256" || "aws:kms" || "aws:kms:dsse", + * // ETag: "STRING_VALUE", + * // ChecksumCRC32: "STRING_VALUE", + * // ChecksumCRC32C: "STRING_VALUE", + * // ChecksumCRC64NVME: "STRING_VALUE", + * // ChecksumSHA1: "STRING_VALUE", + * // ChecksumSHA256: "STRING_VALUE", + * // SSECustomerAlgorithm: "STRING_VALUE", + * // SSECustomerKeyMD5: "STRING_VALUE", + * // SSEKMSKeyId: "STRING_VALUE", + * // BucketKeyEnabled: true || false, + * // RequestCharged: "requester", + * // }; + * + * ``` + * + * @param UploadPartCommandInput - {@link UploadPartCommandInput} + * @returns {@link UploadPartCommandOutput} + * @see {@link UploadPartCommandInput} for command's `input` shape. + * @see {@link UploadPartCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To upload a part + * ```javascript + * // The following example uploads part 1 of a multipart upload. The example specifies a file name for the part data. The Upload ID is same that is returned by the initiate multipart upload. + * const input = { + * Body: "fileToUpload", + * Bucket: "examplebucket", + * Key: "examplelargeobject", + * PartNumber: 1, + * UploadId: "xadcOB_7YPBOJuoFiQ9cz4P3Pe6FIZwO4f7wN93uHsNBEw97pl5eNwzExg0LAT2dUN91cOmrEQHDsP3WA60CEg--" + * }; + * const command = new UploadPartCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ETag: `"d8c2eafd90c266e19ab9dcacc479f8af"` + * } + * *\/ + * ``` + * + * @public + */ +export declare class UploadPartCommand extends UploadPartCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UploadPartRequest; + output: UploadPartOutput; + }; + sdk: { + input: UploadPartCommandInput; + output: UploadPartCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/UploadPartCopyCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/UploadPartCopyCommand.d.ts new file mode 100644 index 00000000..39ad7156 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/UploadPartCopyCommand.d.ts @@ -0,0 +1,375 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { UploadPartCopyOutput, UploadPartCopyRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UploadPartCopyCommand}. + */ +export interface UploadPartCopyCommandInput extends UploadPartCopyRequest { +} +/** + * @public + * + * The output of {@link UploadPartCopyCommand}. + */ +export interface UploadPartCopyCommandOutput extends UploadPartCopyOutput, __MetadataBearer { +} +declare const UploadPartCopyCommand_base: { + new (input: UploadPartCopyCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UploadPartCopyCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Uploads a part by copying data from an existing object as data source. To specify the + * data source, you add the request header x-amz-copy-source in your request. To + * specify a byte range, you add the request header x-amz-copy-source-range in + * your request.

+ *

For information about maximum and minimum part sizes and other multipart upload + * specifications, see Multipart upload limits in the Amazon S3 User Guide.

+ * + *

Instead of copying data from an existing object as part data, you might use the + * UploadPart action to upload new data as a part of an object in your + * request.

+ *
+ *

You must initiate a multipart upload before you can upload any part. In response to your + * initiate request, Amazon S3 returns the upload ID, a unique identifier that you must include in + * your upload part request.

+ *

For conceptual information about multipart uploads, see Uploading Objects Using Multipart + * Upload in the Amazon S3 User Guide. For information about + * copying objects using a single atomic action vs. a multipart upload, see Operations on + * Objects in the Amazon S3 User Guide.

+ * + *

+ * Directory buckets - + * For directory buckets, you must make requests for this API operation to the Zonal endpoint. These endpoints support virtual-hosted-style requests in the format https://amzn-s3-demo-bucket.s3express-zone-id.region-code.amazonaws.com/key-name + * . Path-style requests are not supported. For more information about endpoints in Availability Zones, see Regional and Zonal endpoints for directory buckets in Availability Zones in the + * Amazon S3 User Guide. For more information about endpoints in Local Zones, see Concepts for directory buckets in Local Zones in the + * Amazon S3 User Guide.

+ *
+ *
+ *
Authentication and authorization
+ *
+ *

All UploadPartCopy requests must be authenticated and signed by + * using IAM credentials (access key ID and secret access key for the IAM + * identities). All headers with the x-amz- prefix, including + * x-amz-copy-source, must be signed. For more information, see + * REST Authentication.

+ *

+ * Directory buckets - You must use IAM + * credentials to authenticate and authorize your access to the + * UploadPartCopy API operation, instead of using the temporary + * security credentials through the CreateSession API operation.

+ *

Amazon Web Services CLI or SDKs handles authentication and authorization on your + * behalf.

+ *
+ *
Permissions
+ *
+ *

You must have READ access to the source object and + * WRITE access to the destination bucket.

+ *
    + *
  • + *

    + * General purpose bucket permissions - You + * must have the permissions in a policy based on the bucket types of your + * source bucket and destination bucket in an UploadPartCopy + * operation.

    + *
      + *
    • + *

      If the source object is in a general purpose bucket, you must have the + * + * s3:GetObject + * + * permission to read the source object that is being copied.

      + *
    • + *
    • + *

      If the destination bucket is a general purpose bucket, you must have the + * + * s3:PutObject + * + * permission to write the object copy to the destination bucket.

      + *
    • + *
    • + *

      To perform a multipart upload with encryption using an Key Management Service + * key, the requester must have permission to the + * kms:Decrypt and kms:GenerateDataKey + * actions on the key. The requester must also have permissions for the + * kms:GenerateDataKey action for the + * CreateMultipartUpload API. Then, the requester needs + * permissions for the kms:Decrypt action on the + * UploadPart and UploadPartCopy APIs. These + * permissions are required because Amazon S3 must decrypt and read data from + * the encrypted file parts before it completes the multipart upload. For + * more information about KMS permissions, see Protecting + * data using server-side encryption with KMS in the + * Amazon S3 User Guide. For information about the + * permissions required to use the multipart upload API, see Multipart upload + * and permissions and Multipart upload API and permissions in the + * Amazon S3 User Guide.

      + *
    • + *
    + *
  • + *
  • + *

    + * Directory bucket permissions - + * You must have permissions in a bucket policy or an IAM identity-based policy based on the + * source and destination bucket types in an UploadPartCopy + * operation.

    + *
      + *
    • + *

      If the source object that you want to copy is in a + * directory bucket, you must have the + * s3express:CreateSession + * permission in + * the Action element of a policy to read the object. By + * default, the session is in the ReadWrite mode. If you + * want to restrict the access, you can explicitly set the + * s3express:SessionMode condition key to + * ReadOnly on the copy source bucket.

      + *
    • + *
    • + *

      If the copy destination is a directory bucket, you must have the + * + * s3express:CreateSession + * permission in the + * Action element of a policy to write the object to the + * destination. The s3express:SessionMode condition key + * cannot be set to ReadOnly on the copy destination. + *

      + *
    • + *
    + *

    If the object is encrypted with SSE-KMS, you must also have the + * kms:GenerateDataKey and kms:Decrypt permissions + * in IAM identity-based policies and KMS key policies for the KMS + * key.

    + *

    For example policies, see Example bucket policies for S3 Express One Zone and Amazon Web Services Identity and Access Management (IAM) identity-based policies for + * S3 Express One Zone in the Amazon S3 User Guide.

    + *
  • + *
+ *
+ *
Encryption
+ *
+ *
    + *
  • + *

    + * General purpose buckets - + * For information about using + * server-side encryption with customer-provided encryption keys with the + * UploadPartCopy operation, see CopyObject and + * UploadPart.

    + *
  • + *
  • + *

    + * Directory buckets - + * For directory buckets, there are only two supported options for server-side encryption: server-side encryption with Amazon S3 managed keys (SSE-S3) (AES256) and server-side encryption with KMS keys (SSE-KMS) (aws:kms). For more + * information, see Protecting data with server-side encryption in the Amazon S3 User Guide.

    + * + *

    For directory buckets, when you perform a + * CreateMultipartUpload operation and an + * UploadPartCopy operation, the request headers you provide + * in the CreateMultipartUpload request must match the default + * encryption configuration of the destination bucket.

    + *
    + *

    S3 Bucket Keys aren't supported, when you copy SSE-KMS encrypted objects from general purpose buckets + * to directory buckets, from directory buckets to general purpose buckets, or between directory buckets, through UploadPartCopy. In this case, Amazon S3 makes a call to KMS every time a copy request is made for a KMS-encrypted object.

    + *
  • + *
+ *
+ *
Special errors
+ *
+ *
    + *
  • + *

    Error Code: NoSuchUpload + *

    + *
      + *
    • + *

      Description: The specified multipart upload does not exist. The + * upload ID might be invalid, or the multipart upload might have been + * aborted or completed.

      + *
    • + *
    • + *

      HTTP Status Code: 404 Not Found

      + *
    • + *
    + *
  • + *
  • + *

    Error Code: InvalidRequest + *

    + *
      + *
    • + *

      Description: The specified copy source is not supported as a + * byte-range copy source.

      + *
    • + *
    • + *

      HTTP Status Code: 400 Bad Request

      + *
    • + *
    + *
  • + *
+ *
+ *
HTTP Host header syntax
+ *
+ *

+ * Directory buckets - The HTTP Host header syntax is + * Bucket-name.s3express-zone-id.region-code.amazonaws.com.

+ *
+ *
+ *

The following operations are related to UploadPartCopy:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, UploadPartCopyCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, UploadPartCopyCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // UploadPartCopyRequest + * Bucket: "STRING_VALUE", // required + * CopySource: "STRING_VALUE", // required + * CopySourceIfMatch: "STRING_VALUE", + * CopySourceIfModifiedSince: new Date("TIMESTAMP"), + * CopySourceIfNoneMatch: "STRING_VALUE", + * CopySourceIfUnmodifiedSince: new Date("TIMESTAMP"), + * CopySourceRange: "STRING_VALUE", + * Key: "STRING_VALUE", // required + * PartNumber: Number("int"), // required + * UploadId: "STRING_VALUE", // required + * SSECustomerAlgorithm: "STRING_VALUE", + * SSECustomerKey: "STRING_VALUE", + * SSECustomerKeyMD5: "STRING_VALUE", + * CopySourceSSECustomerAlgorithm: "STRING_VALUE", + * CopySourceSSECustomerKey: "STRING_VALUE", + * CopySourceSSECustomerKeyMD5: "STRING_VALUE", + * RequestPayer: "requester", + * ExpectedBucketOwner: "STRING_VALUE", + * ExpectedSourceBucketOwner: "STRING_VALUE", + * }; + * const command = new UploadPartCopyCommand(input); + * const response = await client.send(command); + * // { // UploadPartCopyOutput + * // CopySourceVersionId: "STRING_VALUE", + * // CopyPartResult: { // CopyPartResult + * // ETag: "STRING_VALUE", + * // LastModified: new Date("TIMESTAMP"), + * // ChecksumCRC32: "STRING_VALUE", + * // ChecksumCRC32C: "STRING_VALUE", + * // ChecksumCRC64NVME: "STRING_VALUE", + * // ChecksumSHA1: "STRING_VALUE", + * // ChecksumSHA256: "STRING_VALUE", + * // }, + * // ServerSideEncryption: "AES256" || "aws:kms" || "aws:kms:dsse", + * // SSECustomerAlgorithm: "STRING_VALUE", + * // SSECustomerKeyMD5: "STRING_VALUE", + * // SSEKMSKeyId: "STRING_VALUE", + * // BucketKeyEnabled: true || false, + * // RequestCharged: "requester", + * // }; + * + * ``` + * + * @param UploadPartCopyCommandInput - {@link UploadPartCopyCommandInput} + * @returns {@link UploadPartCopyCommandOutput} + * @see {@link UploadPartCopyCommandInput} for command's `input` shape. + * @see {@link UploadPartCopyCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @example To upload a part by copying byte range from an existing object as data source + * ```javascript + * // The following example uploads a part of a multipart upload by copying a specified byte range from an existing object as data source. + * const input = { + * Bucket: "examplebucket", + * CopySource: "/bucketname/sourceobjectkey", + * CopySourceRange: "bytes=1-100000", + * Key: "examplelargeobject", + * PartNumber: 2, + * UploadId: "exampleuoh_10OhKhT7YukE9bjzTPRiuaCotmZM_pFngJFir9OZNrSr5cWa3cq3LZSUsfjI4FI7PkP91We7Nrw--" + * }; + * const command = new UploadPartCopyCommand(input); + * const response = await client.send(command); + * /* response is + * { + * CopyPartResult: { + * ETag: `"65d16d19e65a7508a51f043180edcc36"`, + * LastModified: "2016-12-29T21:44:28.000Z" + * } + * } + * *\/ + * ``` + * + * @example To upload a part by copying data from an existing object as data source + * ```javascript + * // The following example uploads a part of a multipart upload by copying data from an existing object as data source. + * const input = { + * Bucket: "examplebucket", + * CopySource: "/bucketname/sourceobjectkey", + * Key: "examplelargeobject", + * PartNumber: 1, + * UploadId: "exampleuoh_10OhKhT7YukE9bjzTPRiuaCotmZM_pFngJFir9OZNrSr5cWa3cq3LZSUsfjI4FI7PkP91We7Nrw--" + * }; + * const command = new UploadPartCopyCommand(input); + * const response = await client.send(command); + * /* response is + * { + * CopyPartResult: { + * ETag: `"b0c6f0e7e054ab8fa2536a2677f8734d"`, + * LastModified: "2016-12-29T21:24:43.000Z" + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class UploadPartCopyCommand extends UploadPartCopyCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UploadPartCopyRequest; + output: UploadPartCopyOutput; + }; + sdk: { + input: UploadPartCopyCommandInput; + output: UploadPartCopyCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/WriteGetObjectResponseCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/WriteGetObjectResponseCommand.d.ts new file mode 100644 index 00000000..005e214e --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/WriteGetObjectResponseCommand.d.ts @@ -0,0 +1,149 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer, StreamingBlobPayloadInputTypes } from "@smithy/types"; +import { WriteGetObjectResponseRequest } from "../models/models_1"; +import { S3ClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../S3Client"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link WriteGetObjectResponseCommand}. + */ +export interface WriteGetObjectResponseCommandInput extends Omit { + Body?: StreamingBlobPayloadInputTypes; +} +/** + * @public + * + * The output of {@link WriteGetObjectResponseCommand}. + */ +export interface WriteGetObjectResponseCommandOutput extends __MetadataBearer { +} +declare const WriteGetObjectResponseCommand_base: { + new (input: WriteGetObjectResponseCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: WriteGetObjectResponseCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + * + *

This operation is not supported for directory buckets.

+ *
+ *

Passes transformed objects to a GetObject operation when using Object Lambda access points. For + * information about Object Lambda access points, see Transforming objects with + * Object Lambda access points in the Amazon S3 User Guide.

+ *

This operation supports metadata that can be returned by GetObject, in addition to + * RequestRoute, RequestToken, StatusCode, + * ErrorCode, and ErrorMessage. The GetObject + * response metadata is supported so that the WriteGetObjectResponse caller, + * typically an Lambda function, can provide the same metadata when it internally invokes + * GetObject. When WriteGetObjectResponse is called by a + * customer-owned Lambda function, the metadata returned to the end user + * GetObject call might differ from what Amazon S3 would normally return.

+ *

You can include any number of metadata headers. When including a metadata header, it + * should be prefaced with x-amz-meta. For example, + * x-amz-meta-my-custom-header: MyCustomValue. The primary use case for this + * is to forward GetObject metadata.

+ *

Amazon Web Services provides some prebuilt Lambda functions that you can use with S3 Object Lambda to + * detect and redact personally identifiable information (PII) and decompress S3 objects. + * These Lambda functions are available in the Amazon Web Services Serverless Application Repository, and + * can be selected through the Amazon Web Services Management Console when you create your Object Lambda access point.

+ *

Example 1: PII Access Control - This Lambda function uses Amazon Comprehend, a + * natural language processing (NLP) service using machine learning to find insights and + * relationships in text. It automatically detects personally identifiable information (PII) + * such as names, addresses, dates, credit card numbers, and social security numbers from + * documents in your Amazon S3 bucket.

+ *

Example 2: PII Redaction - This Lambda function uses Amazon Comprehend, a natural + * language processing (NLP) service using machine learning to find insights and relationships + * in text. It automatically redacts personally identifiable information (PII) such as names, + * addresses, dates, credit card numbers, and social security numbers from documents in your + * Amazon S3 bucket.

+ *

Example 3: Decompression - The Lambda function S3ObjectLambdaDecompression, is + * equipped to decompress objects stored in S3 in one of six compressed file formats including + * bzip2, gzip, snappy, zlib, zstandard and ZIP.

+ *

For information on how to view and use these functions, see Using Amazon Web Services built Lambda + * functions in the Amazon S3 User Guide.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { S3Client, WriteGetObjectResponseCommand } from "@aws-sdk/client-s3"; // ES Modules import + * // const { S3Client, WriteGetObjectResponseCommand } = require("@aws-sdk/client-s3"); // CommonJS import + * const client = new S3Client(config); + * const input = { // WriteGetObjectResponseRequest + * RequestRoute: "STRING_VALUE", // required + * RequestToken: "STRING_VALUE", // required + * Body: "MULTIPLE_TYPES_ACCEPTED", // see \@smithy/types -> StreamingBlobPayloadInputTypes + * StatusCode: Number("int"), + * ErrorCode: "STRING_VALUE", + * ErrorMessage: "STRING_VALUE", + * AcceptRanges: "STRING_VALUE", + * CacheControl: "STRING_VALUE", + * ContentDisposition: "STRING_VALUE", + * ContentEncoding: "STRING_VALUE", + * ContentLanguage: "STRING_VALUE", + * ContentLength: Number("long"), + * ContentRange: "STRING_VALUE", + * ContentType: "STRING_VALUE", + * ChecksumCRC32: "STRING_VALUE", + * ChecksumCRC32C: "STRING_VALUE", + * ChecksumCRC64NVME: "STRING_VALUE", + * ChecksumSHA1: "STRING_VALUE", + * ChecksumSHA256: "STRING_VALUE", + * DeleteMarker: true || false, + * ETag: "STRING_VALUE", + * Expires: new Date("TIMESTAMP"), + * Expiration: "STRING_VALUE", + * LastModified: new Date("TIMESTAMP"), + * MissingMeta: Number("int"), + * Metadata: { // Metadata + * "": "STRING_VALUE", + * }, + * ObjectLockMode: "GOVERNANCE" || "COMPLIANCE", + * ObjectLockLegalHoldStatus: "ON" || "OFF", + * ObjectLockRetainUntilDate: new Date("TIMESTAMP"), + * PartsCount: Number("int"), + * ReplicationStatus: "COMPLETE" || "PENDING" || "FAILED" || "REPLICA" || "COMPLETED", + * RequestCharged: "requester", + * Restore: "STRING_VALUE", + * ServerSideEncryption: "AES256" || "aws:kms" || "aws:kms:dsse", + * SSECustomerAlgorithm: "STRING_VALUE", + * SSEKMSKeyId: "STRING_VALUE", + * SSECustomerKeyMD5: "STRING_VALUE", + * StorageClass: "STANDARD" || "REDUCED_REDUNDANCY" || "STANDARD_IA" || "ONEZONE_IA" || "INTELLIGENT_TIERING" || "GLACIER" || "DEEP_ARCHIVE" || "OUTPOSTS" || "GLACIER_IR" || "SNOW" || "EXPRESS_ONEZONE", + * TagCount: Number("int"), + * VersionId: "STRING_VALUE", + * BucketKeyEnabled: true || false, + * }; + * const command = new WriteGetObjectResponseCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param WriteGetObjectResponseCommandInput - {@link WriteGetObjectResponseCommandInput} + * @returns {@link WriteGetObjectResponseCommandOutput} + * @see {@link WriteGetObjectResponseCommandInput} for command's `input` shape. + * @see {@link WriteGetObjectResponseCommandOutput} for command's `response` shape. + * @see {@link S3ClientResolvedConfig | config} for S3Client's `config` shape. + * + * @throws {@link S3ServiceException} + *

Base exception class for all service exceptions from S3 service.

+ * + * + * @public + */ +export declare class WriteGetObjectResponseCommand extends WriteGetObjectResponseCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: WriteGetObjectResponseRequest; + output: {}; + }; + sdk: { + input: WriteGetObjectResponseCommandInput; + output: WriteGetObjectResponseCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/commands/index.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/commands/index.d.ts new file mode 100644 index 00000000..a25a95d9 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/commands/index.d.ts @@ -0,0 +1,98 @@ +export * from "./AbortMultipartUploadCommand"; +export * from "./CompleteMultipartUploadCommand"; +export * from "./CopyObjectCommand"; +export * from "./CreateBucketCommand"; +export * from "./CreateBucketMetadataTableConfigurationCommand"; +export * from "./CreateMultipartUploadCommand"; +export * from "./CreateSessionCommand"; +export * from "./DeleteBucketAnalyticsConfigurationCommand"; +export * from "./DeleteBucketCommand"; +export * from "./DeleteBucketCorsCommand"; +export * from "./DeleteBucketEncryptionCommand"; +export * from "./DeleteBucketIntelligentTieringConfigurationCommand"; +export * from "./DeleteBucketInventoryConfigurationCommand"; +export * from "./DeleteBucketLifecycleCommand"; +export * from "./DeleteBucketMetadataTableConfigurationCommand"; +export * from "./DeleteBucketMetricsConfigurationCommand"; +export * from "./DeleteBucketOwnershipControlsCommand"; +export * from "./DeleteBucketPolicyCommand"; +export * from "./DeleteBucketReplicationCommand"; +export * from "./DeleteBucketTaggingCommand"; +export * from "./DeleteBucketWebsiteCommand"; +export * from "./DeleteObjectCommand"; +export * from "./DeleteObjectTaggingCommand"; +export * from "./DeleteObjectsCommand"; +export * from "./DeletePublicAccessBlockCommand"; +export * from "./GetBucketAccelerateConfigurationCommand"; +export * from "./GetBucketAclCommand"; +export * from "./GetBucketAnalyticsConfigurationCommand"; +export * from "./GetBucketCorsCommand"; +export * from "./GetBucketEncryptionCommand"; +export * from "./GetBucketIntelligentTieringConfigurationCommand"; +export * from "./GetBucketInventoryConfigurationCommand"; +export * from "./GetBucketLifecycleConfigurationCommand"; +export * from "./GetBucketLocationCommand"; +export * from "./GetBucketLoggingCommand"; +export * from "./GetBucketMetadataTableConfigurationCommand"; +export * from "./GetBucketMetricsConfigurationCommand"; +export * from "./GetBucketNotificationConfigurationCommand"; +export * from "./GetBucketOwnershipControlsCommand"; +export * from "./GetBucketPolicyCommand"; +export * from "./GetBucketPolicyStatusCommand"; +export * from "./GetBucketReplicationCommand"; +export * from "./GetBucketRequestPaymentCommand"; +export * from "./GetBucketTaggingCommand"; +export * from "./GetBucketVersioningCommand"; +export * from "./GetBucketWebsiteCommand"; +export * from "./GetObjectAclCommand"; +export * from "./GetObjectAttributesCommand"; +export * from "./GetObjectCommand"; +export * from "./GetObjectLegalHoldCommand"; +export * from "./GetObjectLockConfigurationCommand"; +export * from "./GetObjectRetentionCommand"; +export * from "./GetObjectTaggingCommand"; +export * from "./GetObjectTorrentCommand"; +export * from "./GetPublicAccessBlockCommand"; +export * from "./HeadBucketCommand"; +export * from "./HeadObjectCommand"; +export * from "./ListBucketAnalyticsConfigurationsCommand"; +export * from "./ListBucketIntelligentTieringConfigurationsCommand"; +export * from "./ListBucketInventoryConfigurationsCommand"; +export * from "./ListBucketMetricsConfigurationsCommand"; +export * from "./ListBucketsCommand"; +export * from "./ListDirectoryBucketsCommand"; +export * from "./ListMultipartUploadsCommand"; +export * from "./ListObjectVersionsCommand"; +export * from "./ListObjectsCommand"; +export * from "./ListObjectsV2Command"; +export * from "./ListPartsCommand"; +export * from "./PutBucketAccelerateConfigurationCommand"; +export * from "./PutBucketAclCommand"; +export * from "./PutBucketAnalyticsConfigurationCommand"; +export * from "./PutBucketCorsCommand"; +export * from "./PutBucketEncryptionCommand"; +export * from "./PutBucketIntelligentTieringConfigurationCommand"; +export * from "./PutBucketInventoryConfigurationCommand"; +export * from "./PutBucketLifecycleConfigurationCommand"; +export * from "./PutBucketLoggingCommand"; +export * from "./PutBucketMetricsConfigurationCommand"; +export * from "./PutBucketNotificationConfigurationCommand"; +export * from "./PutBucketOwnershipControlsCommand"; +export * from "./PutBucketPolicyCommand"; +export * from "./PutBucketReplicationCommand"; +export * from "./PutBucketRequestPaymentCommand"; +export * from "./PutBucketTaggingCommand"; +export * from "./PutBucketVersioningCommand"; +export * from "./PutBucketWebsiteCommand"; +export * from "./PutObjectAclCommand"; +export * from "./PutObjectCommand"; +export * from "./PutObjectLegalHoldCommand"; +export * from "./PutObjectLockConfigurationCommand"; +export * from "./PutObjectRetentionCommand"; +export * from "./PutObjectTaggingCommand"; +export * from "./PutPublicAccessBlockCommand"; +export * from "./RestoreObjectCommand"; +export * from "./SelectObjectContentCommand"; +export * from "./UploadPartCommand"; +export * from "./UploadPartCopyCommand"; +export * from "./WriteGetObjectResponseCommand"; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/endpoint/EndpointParameters.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/endpoint/EndpointParameters.d.ts new file mode 100644 index 00000000..55a5b3ad --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/endpoint/EndpointParameters.d.ts @@ -0,0 +1,83 @@ +import { Endpoint, EndpointParameters as __EndpointParameters, EndpointV2, Provider } from "@smithy/types"; +/** + * @public + */ +export interface ClientInputEndpointParameters { + region?: string | Provider; + useFipsEndpoint?: boolean | Provider; + useDualstackEndpoint?: boolean | Provider; + endpoint?: string | Provider | Endpoint | Provider | EndpointV2 | Provider; + forcePathStyle?: boolean | Provider; + useAccelerateEndpoint?: boolean | Provider; + useGlobalEndpoint?: boolean | Provider; + disableMultiregionAccessPoints?: boolean | Provider; + useArnRegion?: boolean | Provider; + disableS3ExpressSessionAuth?: boolean | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: (options: T & ClientInputEndpointParameters) => T & ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const commonParams: { + readonly ForcePathStyle: { + readonly type: "clientContextParams"; + readonly name: "forcePathStyle"; + }; + readonly UseArnRegion: { + readonly type: "clientContextParams"; + readonly name: "useArnRegion"; + }; + readonly DisableMultiRegionAccessPoints: { + readonly type: "clientContextParams"; + readonly name: "disableMultiregionAccessPoints"; + }; + readonly Accelerate: { + readonly type: "clientContextParams"; + readonly name: "useAccelerateEndpoint"; + }; + readonly DisableS3ExpressSessionAuth: { + readonly type: "clientContextParams"; + readonly name: "disableS3ExpressSessionAuth"; + }; + readonly UseGlobalEndpoint: { + readonly type: "builtInParams"; + readonly name: "useGlobalEndpoint"; + }; + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Bucket?: string; + Region?: string; + UseFIPS?: boolean; + UseDualStack?: boolean; + Endpoint?: string; + ForcePathStyle?: boolean; + Accelerate?: boolean; + UseGlobalEndpoint?: boolean; + UseObjectLambdaEndpoint?: boolean; + Key?: string; + Prefix?: string; + CopySource?: string; + DisableAccessPoints?: boolean; + DisableMultiRegionAccessPoints?: boolean; + UseArnRegion?: boolean; + UseS3ExpressControlEndpoint?: boolean; + DisableS3ExpressSessionAuth?: boolean; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/endpoint/endpointResolver.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/endpoint/endpointResolver.d.ts new file mode 100644 index 00000000..70a8eaec --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/endpoint/endpointResolver.d.ts @@ -0,0 +1,5 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: (endpointParams: EndpointParameters, context?: { + logger?: Logger; +}) => EndpointV2; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/endpoint/ruleset.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/endpoint/ruleset.d.ts new file mode 100644 index 00000000..4b238994 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/extensionConfiguration.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/extensionConfiguration.d.ts new file mode 100644 index 00000000..9932c444 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +/** + * @internal + */ +export interface S3ExtensionConfiguration extends HttpHandlerExtensionConfiguration, DefaultExtensionConfiguration, AwsRegionExtensionConfiguration, HttpAuthExtensionConfiguration { +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/index.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/index.d.ts new file mode 100644 index 00000000..8ad0422f --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/index.d.ts @@ -0,0 +1,15 @@ +/** + *

+ * + * @packageDocumentation + */ +export * from "./S3Client"; +export * from "./S3"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export type { RuntimeExtension } from "./runtimeExtensions"; +export type { S3ExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./pagination"; +export * from "./waiters"; +export * from "./models"; +export { S3ServiceException } from "./models/S3ServiceException"; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/models/S3ServiceException.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/models/S3ServiceException.d.ts new file mode 100644 index 00000000..c8499f74 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/models/S3ServiceException.d.ts @@ -0,0 +1,14 @@ +import { ServiceException as __ServiceException, ServiceExceptionOptions as __ServiceExceptionOptions } from "@smithy/smithy-client"; +export type { __ServiceExceptionOptions }; +export { __ServiceException }; +/** + * @public + * + * Base exception class for all service exceptions from S3 service. + */ +export declare class S3ServiceException extends __ServiceException { + /** + * @internal + */ + constructor(options: __ServiceExceptionOptions); +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/models/index.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/models/index.d.ts new file mode 100644 index 00000000..ae1cfffa --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/models/index.d.ts @@ -0,0 +1,2 @@ +export * from "./models_0"; +export * from "./models_1"; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/models/models_0.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/models/models_0.d.ts new file mode 100644 index 00000000..f9b2e6aa --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/models/models_0.d.ts @@ -0,0 +1,13408 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { StreamingBlobTypes } from "@smithy/types"; +import { S3ServiceException as __BaseException } from "./S3ServiceException"; +/** + *

Specifies the days since the initiation of an incomplete multipart upload that Amazon S3 will + * wait before permanently removing all parts of the upload. For more information, see + * Aborting Incomplete Multipart Uploads Using a Bucket Lifecycle Configuration in + * the Amazon S3 User Guide.

+ * @public + */ +export interface AbortIncompleteMultipartUpload { + /** + *

Specifies the number of days after which Amazon S3 aborts an incomplete multipart + * upload.

+ * @public + */ + DaysAfterInitiation?: number | undefined; +} +/** + * @public + * @enum + */ +export declare const RequestCharged: { + readonly requester: "requester"; +}; +/** + * @public + */ +export type RequestCharged = (typeof RequestCharged)[keyof typeof RequestCharged]; +/** + * @public + */ +export interface AbortMultipartUploadOutput { + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; +} +/** + * @public + * @enum + */ +export declare const RequestPayer: { + readonly requester: "requester"; +}; +/** + * @public + */ +export type RequestPayer = (typeof RequestPayer)[keyof typeof RequestPayer]; +/** + * @public + */ +export interface AbortMultipartUploadRequest { + /** + *

The bucket name to which the upload was taking place.

+ *

+ * Directory buckets - + * When you use this operation with a directory bucket, you must use virtual-hosted-style requests in the format + * Bucket-name.s3express-zone-id.region-code.amazonaws.com. Path-style requests are not supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * amzn-s3-demo-bucket--usw2-az1--x-s3). For information about bucket naming + * restrictions, see Directory bucket naming + * rules in the Amazon S3 User Guide.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ * + *

Object Lambda access points are not supported by directory buckets.

+ *
+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Key of the object for which the multipart upload was initiated.

+ * @public + */ + Key: string | undefined; + /** + *

Upload ID that identifies the multipart upload.

+ * @public + */ + UploadId: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

If present, this header aborts an in progress multipart upload only if it was initiated on the provided timestamp. + * If the initiated timestamp of the multipart upload does not match the provided value, the operation returns a 412 Precondition Failed error. + * If the initiated timestamp matches or if the multipart upload doesn’t exist, the operation returns a 204 Success (No Content) response. + *

+ * + *

This functionality is only supported for directory buckets.

+ *
+ * @public + */ + IfMatchInitiatedTime?: Date | undefined; +} +/** + *

The specified multipart upload does not exist.

+ * @public + */ +export declare class NoSuchUpload extends __BaseException { + readonly name: "NoSuchUpload"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + * @enum + */ +export declare const BucketAccelerateStatus: { + readonly Enabled: "Enabled"; + readonly Suspended: "Suspended"; +}; +/** + * @public + */ +export type BucketAccelerateStatus = (typeof BucketAccelerateStatus)[keyof typeof BucketAccelerateStatus]; +/** + *

Configures the transfer acceleration state for an Amazon S3 bucket. For more information, see + * Amazon S3 + * Transfer Acceleration in the Amazon S3 User Guide.

+ * @public + */ +export interface AccelerateConfiguration { + /** + *

Specifies the transfer acceleration status of the bucket.

+ * @public + */ + Status?: BucketAccelerateStatus | undefined; +} +/** + * @public + * @enum + */ +export declare const Type: { + readonly AmazonCustomerByEmail: "AmazonCustomerByEmail"; + readonly CanonicalUser: "CanonicalUser"; + readonly Group: "Group"; +}; +/** + * @public + */ +export type Type = (typeof Type)[keyof typeof Type]; +/** + *

Container for the person being granted permissions.

+ * @public + */ +export interface Grantee { + /** + *

Screen name of the grantee.

+ * @public + */ + DisplayName?: string | undefined; + /** + *

Email address of the grantee.

+ * + *

Using email addresses to specify a grantee is only supported in the following Amazon Web Services Regions:

+ *
    + *
  • + *

    US East (N. Virginia)

    + *
  • + *
  • + *

    US West (N. California)

    + *
  • + *
  • + *

    US West (Oregon)

    + *
  • + *
  • + *

    Asia Pacific (Singapore)

    + *
  • + *
  • + *

    Asia Pacific (Sydney)

    + *
  • + *
  • + *

    Asia Pacific (Tokyo)

    + *
  • + *
  • + *

    Europe (Ireland)

    + *
  • + *
  • + *

    South America (São Paulo)

    + *
  • + *
+ *

For a list of all the Amazon S3 supported Regions and endpoints, see Regions and Endpoints in the Amazon Web Services General Reference.

+ *
+ * @public + */ + EmailAddress?: string | undefined; + /** + *

The canonical user ID of the grantee.

+ * @public + */ + ID?: string | undefined; + /** + *

URI of the grantee group.

+ * @public + */ + URI?: string | undefined; + /** + *

Type of grantee

+ * @public + */ + Type: Type | undefined; +} +/** + * @public + * @enum + */ +export declare const Permission: { + readonly FULL_CONTROL: "FULL_CONTROL"; + readonly READ: "READ"; + readonly READ_ACP: "READ_ACP"; + readonly WRITE: "WRITE"; + readonly WRITE_ACP: "WRITE_ACP"; +}; +/** + * @public + */ +export type Permission = (typeof Permission)[keyof typeof Permission]; +/** + *

Container for grant information.

+ * @public + */ +export interface Grant { + /** + *

The person being granted permissions.

+ * @public + */ + Grantee?: Grantee | undefined; + /** + *

Specifies the permission given to the grantee.

+ * @public + */ + Permission?: Permission | undefined; +} +/** + *

Container for the owner's display name and ID.

+ * @public + */ +export interface Owner { + /** + *

Container for the display name of the owner. This value is only supported in the + * following Amazon Web Services Regions:

+ *
    + *
  • + *

    US East (N. Virginia)

    + *
  • + *
  • + *

    US West (N. California)

    + *
  • + *
  • + *

    US West (Oregon)

    + *
  • + *
  • + *

    Asia Pacific (Singapore)

    + *
  • + *
  • + *

    Asia Pacific (Sydney)

    + *
  • + *
  • + *

    Asia Pacific (Tokyo)

    + *
  • + *
  • + *

    Europe (Ireland)

    + *
  • + *
  • + *

    South America (São Paulo)

    + *
  • + *
+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + DisplayName?: string | undefined; + /** + *

Container for the ID of the owner.

+ * @public + */ + ID?: string | undefined; +} +/** + *

Contains the elements that set the ACL permissions for an object per grantee.

+ * @public + */ +export interface AccessControlPolicy { + /** + *

A list of grants.

+ * @public + */ + Grants?: Grant[] | undefined; + /** + *

Container for the bucket owner's display name and ID.

+ * @public + */ + Owner?: Owner | undefined; +} +/** + * @public + * @enum + */ +export declare const OwnerOverride: { + readonly Destination: "Destination"; +}; +/** + * @public + */ +export type OwnerOverride = (typeof OwnerOverride)[keyof typeof OwnerOverride]; +/** + *

A container for information about access control for replicas.

+ * @public + */ +export interface AccessControlTranslation { + /** + *

Specifies the replica ownership. For default and valid values, see PUT bucket + * replication in the Amazon S3 API Reference.

+ * @public + */ + Owner: OwnerOverride | undefined; +} +/** + * @public + * @enum + */ +export declare const ChecksumType: { + readonly COMPOSITE: "COMPOSITE"; + readonly FULL_OBJECT: "FULL_OBJECT"; +}; +/** + * @public + */ +export type ChecksumType = (typeof ChecksumType)[keyof typeof ChecksumType]; +/** + * @public + * @enum + */ +export declare const ServerSideEncryption: { + readonly AES256: "AES256"; + readonly aws_kms: "aws:kms"; + readonly aws_kms_dsse: "aws:kms:dsse"; +}; +/** + * @public + */ +export type ServerSideEncryption = (typeof ServerSideEncryption)[keyof typeof ServerSideEncryption]; +/** + * @public + */ +export interface CompleteMultipartUploadOutput { + /** + *

The URI that identifies the newly created object.

+ * @public + */ + Location?: string | undefined; + /** + *

The name of the bucket that contains the newly created object. Does not return the access point + * ARN or access point alias if used.

+ * + *

Access points are not supported by directory buckets.

+ *
+ * @public + */ + Bucket?: string | undefined; + /** + *

The object key of the newly created object.

+ * @public + */ + Key?: string | undefined; + /** + *

If the object expiration is configured, this will contain the expiration date + * (expiry-date) and rule ID (rule-id). The value of + * rule-id is URL-encoded.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + Expiration?: string | undefined; + /** + *

Entity tag that identifies the newly created object's data. Objects with different + * object data will have different entity tags. The entity tag is an opaque string. The entity + * tag may or may not be an MD5 digest of the object data. If the entity tag is not an MD5 + * digest of the object data, it will contain one or more nonhexadecimal characters and/or + * will consist of less than 32 or more than 32 hexadecimal digits. For more information about + * how the entity tag is calculated, see Checking object + * integrity in the Amazon S3 User Guide.

+ * @public + */ + ETag?: string | undefined; + /** + *

The Base64 encoded, 32-bit CRC32 checksum of the object. This checksum is only be present if the checksum was uploaded + * with the object. When you use an API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32?: string | undefined; + /** + *

The Base64 encoded, 32-bit CRC32C checksum of the object. This checksum is only present if the checksum was uploaded + * with the object. When you use an API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32C?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is + * the same data that was originally sent. This header specifies the Base64 encoded, 64-bit + * CRC64NVME checksum of the object. The CRC64NVME checksum is + * always a full object checksum. For more information, see Checking object integrity + * in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC64NVME?: string | undefined; + /** + *

The Base64 encoded, 160-bit SHA1 digest of the object. This will only be present if the object was uploaded + * with the object. When you use the API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA1?: string | undefined; + /** + *

The Base64 encoded, 256-bit SHA256 digest of the object. This will only be present if the object was uploaded + * with the object. When you use an API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA256?: string | undefined; + /** + *

The checksum type, which determines how part-level checksums are combined to create an + * object-level checksum for multipart objects. You can use this header as a data integrity + * check to verify that the checksum type that is received is the same checksum type that was + * specified during the CreateMultipartUpload request. For more information, see + * Checking object integrity + * in the Amazon S3 User Guide.

+ * @public + */ + ChecksumType?: ChecksumType | undefined; + /** + *

The server-side encryption algorithm used when storing this object in Amazon S3 (for example, + * AES256, aws:kms).

+ * @public + */ + ServerSideEncryption?: ServerSideEncryption | undefined; + /** + *

Version ID of the newly created object, in case the bucket has versioning turned + * on.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + VersionId?: string | undefined; + /** + *

If present, indicates the ID of the KMS key that was used for object encryption.

+ * @public + */ + SSEKMSKeyId?: string | undefined; + /** + *

Indicates whether the multipart upload uses an S3 Bucket Key for server-side encryption + * with Key Management Service (KMS) keys (SSE-KMS).

+ * @public + */ + BucketKeyEnabled?: boolean | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; +} +/** + *

Details of the parts that were uploaded.

+ * @public + */ +export interface CompletedPart { + /** + *

Entity tag returned when the part was uploaded.

+ * @public + */ + ETag?: string | undefined; + /** + *

The Base64 encoded, 32-bit CRC32 checksum of the part. This checksum is present + * if the multipart upload request was created with the CRC32 checksum algorithm. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32?: string | undefined; + /** + *

The Base64 encoded, 32-bit CRC32C checksum of the part. This checksum is present + * if the multipart upload request was created with the CRC32C checksum algorithm. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32C?: string | undefined; + /** + *

The Base64 encoded, 64-bit CRC64NVME checksum of the part. This checksum is present + * if the multipart upload request was created with the CRC64NVME checksum algorithm to the uploaded object). For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC64NVME?: string | undefined; + /** + *

The Base64 encoded, 160-bit SHA1 checksum of the part. This checksum is present + * if the multipart upload request was created with the SHA1 checksum algorithm. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA1?: string | undefined; + /** + *

The Base64 encoded, 256-bit SHA256 checksum of the part. This checksum is present + * if the multipart upload request was created with the SHA256 checksum algorithm. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA256?: string | undefined; + /** + *

Part number that identifies the part. This is a positive integer between 1 and + * 10,000.

+ * + *
    + *
  • + *

    + * General purpose buckets - In + * CompleteMultipartUpload, when a additional checksum (including + * x-amz-checksum-crc32, x-amz-checksum-crc32c, + * x-amz-checksum-sha1, or x-amz-checksum-sha256) is + * applied to each part, the PartNumber must start at 1 and the part + * numbers must be consecutive. Otherwise, Amazon S3 generates an HTTP 400 Bad + * Request status code and an InvalidPartOrder error + * code.

    + *
  • + *
  • + *

    + * Directory buckets - In + * CompleteMultipartUpload, the PartNumber must start at + * 1 and the part numbers must be consecutive.

    + *
  • + *
+ *
+ * @public + */ + PartNumber?: number | undefined; +} +/** + *

The container for the completed multipart upload details.

+ * @public + */ +export interface CompletedMultipartUpload { + /** + *

Array of CompletedPart data types.

+ *

If you do not supply a valid Part with your request, the service sends back + * an HTTP 400 response.

+ * @public + */ + Parts?: CompletedPart[] | undefined; +} +/** + * @public + */ +export interface CompleteMultipartUploadRequest { + /** + *

Name of the bucket to which the multipart upload was initiated.

+ *

+ * Directory buckets - + * When you use this operation with a directory bucket, you must use virtual-hosted-style requests in the format + * Bucket-name.s3express-zone-id.region-code.amazonaws.com. Path-style requests are not supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * amzn-s3-demo-bucket--usw2-az1--x-s3). For information about bucket naming + * restrictions, see Directory bucket naming + * rules in the Amazon S3 User Guide.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ * + *

Object Lambda access points are not supported by directory buckets.

+ *
+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Object key for which the multipart upload was initiated.

+ * @public + */ + Key: string | undefined; + /** + *

The container for the multipart upload request information.

+ * @public + */ + MultipartUpload?: CompletedMultipartUpload | undefined; + /** + *

ID for the initiated multipart upload.

+ * @public + */ + UploadId: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is the same data that was originally sent. + * This header specifies the Base64 encoded, 32-bit CRC32 checksum of the object. For more information, see + * Checking object integrity in the + * Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is the same data that was originally sent. + * This header specifies the Base64 encoded, 32-bit CRC32C checksum of the object. For more information, see + * Checking object integrity in the + * Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32C?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is + * the same data that was originally sent. This header specifies the Base64 encoded, 64-bit + * CRC64NVME checksum of the object. The CRC64NVME checksum is + * always a full object checksum. For more information, see Checking object integrity + * in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC64NVME?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is the same data that was originally sent. + * This header specifies the Base64 encoded, 160-bit SHA1 digest of the object. For more information, see + * Checking object integrity in the + * Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA1?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is the same data that was originally sent. + * This header specifies the Base64 encoded, 256-bit SHA256 digest of the object. For more information, see + * Checking object integrity in the + * Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA256?: string | undefined; + /** + *

This header specifies the checksum type of the object, which determines how part-level + * checksums are combined to create an object-level checksum for multipart objects. You can + * use this header as a data integrity check to verify that the checksum type that is received + * is the same checksum that was specified. If the checksum type doesn’t match the checksum + * type that was specified for the object during the CreateMultipartUpload + * request, it’ll result in a BadDigest error. For more information, see Checking + * object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumType?: ChecksumType | undefined; + /** + *

The expected total object size of the multipart upload request. If there’s a mismatch + * between the specified object size value and the actual object size value, it results in an + * HTTP 400 InvalidRequest error.

+ * @public + */ + MpuObjectSize?: number | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

Uploads the object only if the ETag (entity tag) value provided during the WRITE + * operation matches the ETag of the object in S3. If the ETag values do not match, the + * operation returns a 412 Precondition Failed error.

+ *

If a conflicting operation occurs during the upload S3 returns a 409 + * ConditionalRequestConflict response. On a 409 failure you should fetch the object's ETag, re-initiate the + * multipart upload with CreateMultipartUpload, and re-upload each part.

+ *

Expects the ETag value as a string.

+ *

For more information about conditional requests, see RFC 7232, or Conditional requests in the Amazon S3 User Guide.

+ * @public + */ + IfMatch?: string | undefined; + /** + *

Uploads the object only if the object key name does not already exist in the bucket + * specified. Otherwise, Amazon S3 returns a 412 Precondition Failed error.

+ *

If a conflicting operation occurs during the upload S3 returns a 409 + * ConditionalRequestConflict response. On a 409 failure you should re-initiate the + * multipart upload with CreateMultipartUpload and re-upload each part.

+ *

Expects the '*' (asterisk) character.

+ *

For more information about conditional requests, see RFC 7232, or Conditional requests in the Amazon S3 User Guide.

+ * @public + */ + IfNoneMatch?: string | undefined; + /** + *

The server-side encryption (SSE) algorithm used to encrypt the object. This parameter is + * required only when the object was created using a checksum algorithm or if your bucket + * policy requires the use of SSE-C. For more information, see Protecting data using SSE-C keys in the Amazon S3 User + * Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerAlgorithm?: string | undefined; + /** + *

The server-side encryption (SSE) customer managed key. This parameter is needed only when the object was created using a checksum algorithm. + * For more information, see + * Protecting data using SSE-C keys in the + * Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKey?: string | undefined; + /** + *

The MD5 server-side encryption (SSE) customer managed key. This parameter is needed only when the object was created using a checksum + * algorithm. For more information, + * see Protecting data using SSE-C keys in the + * Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKeyMD5?: string | undefined; +} +/** + *

Container for all response elements.

+ * @public + */ +export interface CopyObjectResult { + /** + *

Returns the ETag of the new object. The ETag reflects only changes to the contents of an + * object, not its metadata.

+ * @public + */ + ETag?: string | undefined; + /** + *

Creation date of the object.

+ * @public + */ + LastModified?: Date | undefined; + /** + *

The checksum type that is used to calculate the object’s + * checksum value. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumType?: ChecksumType | undefined; + /** + *

The Base64 encoded, 32-bit CRC32 checksum of the object. This checksum is only present if the object was uploaded + * with the object. For more information, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32?: string | undefined; + /** + *

The Base64 encoded, 32-bit CRC32C checksum of the object. This will only be present if the object was uploaded + * with the object. For more information, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32C?: string | undefined; + /** + *

The Base64 encoded, 64-bit CRC64NVME checksum of the object. This checksum is present + * if the object being copied was uploaded with the CRC64NVME checksum algorithm, or if the object was uploaded without a + * checksum (and Amazon S3 added the default checksum, CRC64NVME, to the uploaded object). For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC64NVME?: string | undefined; + /** + *

The Base64 encoded, 160-bit SHA1 digest of the object. This will only be present if the object was uploaded + * with the object. For more information, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA1?: string | undefined; + /** + *

The Base64 encoded, 256-bit SHA256 digest of the object. This will only be present if the object was uploaded + * with the object. For more information, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA256?: string | undefined; +} +/** + * @public + */ +export interface CopyObjectOutput { + /** + *

Container for all response elements.

+ * @public + */ + CopyObjectResult?: CopyObjectResult | undefined; + /** + *

If the object expiration is configured, the response includes this header.

+ * + *

Object expiration information is not returned in directory buckets and this header returns the value "NotImplemented" in all responses for directory buckets.

+ *
+ * @public + */ + Expiration?: string | undefined; + /** + *

Version ID of the source object that was copied.

+ * + *

This functionality is not supported when the source object is in a directory bucket.

+ *
+ * @public + */ + CopySourceVersionId?: string | undefined; + /** + *

Version ID of the newly created copy.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + VersionId?: string | undefined; + /** + *

The server-side encryption algorithm used when you store this object in Amazon S3 (for + * example, AES256, aws:kms, aws:kms:dsse).

+ * @public + */ + ServerSideEncryption?: ServerSideEncryption | undefined; + /** + *

If server-side encryption with a customer-provided encryption key was requested, the + * response will include this header to confirm the encryption algorithm that's used.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerAlgorithm?: string | undefined; + /** + *

If server-side encryption with a customer-provided encryption key was requested, the + * response will include this header to provide the round-trip message integrity verification + * of the customer-provided encryption key.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKeyMD5?: string | undefined; + /** + *

If present, indicates the ID of the KMS key that was used for object encryption.

+ * @public + */ + SSEKMSKeyId?: string | undefined; + /** + *

If present, indicates the Amazon Web Services KMS Encryption Context to use for object encryption. The + * value of this header is a Base64 encoded UTF-8 string holding JSON with the encryption + * context key-value pairs.

+ * @public + */ + SSEKMSEncryptionContext?: string | undefined; + /** + *

Indicates whether the copied object uses an S3 Bucket Key for server-side encryption + * with Key Management Service (KMS) keys (SSE-KMS).

+ * @public + */ + BucketKeyEnabled?: boolean | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; +} +/** + * @public + * @enum + */ +export declare const ObjectCannedACL: { + readonly authenticated_read: "authenticated-read"; + readonly aws_exec_read: "aws-exec-read"; + readonly bucket_owner_full_control: "bucket-owner-full-control"; + readonly bucket_owner_read: "bucket-owner-read"; + readonly private: "private"; + readonly public_read: "public-read"; + readonly public_read_write: "public-read-write"; +}; +/** + * @public + */ +export type ObjectCannedACL = (typeof ObjectCannedACL)[keyof typeof ObjectCannedACL]; +/** + * @public + * @enum + */ +export declare const ChecksumAlgorithm: { + readonly CRC32: "CRC32"; + readonly CRC32C: "CRC32C"; + readonly CRC64NVME: "CRC64NVME"; + readonly SHA1: "SHA1"; + readonly SHA256: "SHA256"; +}; +/** + * @public + */ +export type ChecksumAlgorithm = (typeof ChecksumAlgorithm)[keyof typeof ChecksumAlgorithm]; +/** + * @public + * @enum + */ +export declare const MetadataDirective: { + readonly COPY: "COPY"; + readonly REPLACE: "REPLACE"; +}; +/** + * @public + */ +export type MetadataDirective = (typeof MetadataDirective)[keyof typeof MetadataDirective]; +/** + * @public + * @enum + */ +export declare const ObjectLockLegalHoldStatus: { + readonly OFF: "OFF"; + readonly ON: "ON"; +}; +/** + * @public + */ +export type ObjectLockLegalHoldStatus = (typeof ObjectLockLegalHoldStatus)[keyof typeof ObjectLockLegalHoldStatus]; +/** + * @public + * @enum + */ +export declare const ObjectLockMode: { + readonly COMPLIANCE: "COMPLIANCE"; + readonly GOVERNANCE: "GOVERNANCE"; +}; +/** + * @public + */ +export type ObjectLockMode = (typeof ObjectLockMode)[keyof typeof ObjectLockMode]; +/** + * @public + * @enum + */ +export declare const StorageClass: { + readonly DEEP_ARCHIVE: "DEEP_ARCHIVE"; + readonly EXPRESS_ONEZONE: "EXPRESS_ONEZONE"; + readonly GLACIER: "GLACIER"; + readonly GLACIER_IR: "GLACIER_IR"; + readonly INTELLIGENT_TIERING: "INTELLIGENT_TIERING"; + readonly ONEZONE_IA: "ONEZONE_IA"; + readonly OUTPOSTS: "OUTPOSTS"; + readonly REDUCED_REDUNDANCY: "REDUCED_REDUNDANCY"; + readonly SNOW: "SNOW"; + readonly STANDARD: "STANDARD"; + readonly STANDARD_IA: "STANDARD_IA"; +}; +/** + * @public + */ +export type StorageClass = (typeof StorageClass)[keyof typeof StorageClass]; +/** + * @public + * @enum + */ +export declare const TaggingDirective: { + readonly COPY: "COPY"; + readonly REPLACE: "REPLACE"; +}; +/** + * @public + */ +export type TaggingDirective = (typeof TaggingDirective)[keyof typeof TaggingDirective]; +/** + * @public + */ +export interface CopyObjectRequest { + /** + *

The canned access control list (ACL) to apply to the object.

+ *

When you copy an object, the ACL metadata is not preserved and is set to + * private by default. Only the owner has full access control. To override the + * default ACL setting, specify a new ACL when you generate a copy request. For more + * information, see Using ACLs.

+ *

If the destination bucket that you're copying objects to uses the bucket owner enforced + * setting for S3 Object Ownership, ACLs are disabled and no longer affect permissions. + * Buckets that use this setting only accept PUT requests that don't specify an + * ACL or PUT requests that specify bucket owner full control ACLs, such as the + * bucket-owner-full-control canned ACL or an equivalent form of this ACL + * expressed in the XML format. For more information, see Controlling ownership of + * objects and disabling ACLs in the Amazon S3 User Guide.

+ * + *
    + *
  • + *

    If your destination bucket uses the bucket owner enforced setting for Object + * Ownership, all objects written to the bucket by any account will be owned by the + * bucket owner.

    + *
  • + *
  • + *

    This functionality is not supported for directory buckets.

    + *
  • + *
  • + *

    This functionality is not supported for Amazon S3 on Outposts.

    + *
  • + *
+ *
+ * @public + */ + ACL?: ObjectCannedACL | undefined; + /** + *

The name of the destination bucket.

+ *

+ * Directory buckets - When you use this operation with a directory bucket, you must use virtual-hosted-style requests in the format + * Bucket-name.s3express-zone-id.region-code.amazonaws.com. Path-style requests are not supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * amzn-s3-demo-bucket--usw2-az1--x-s3). For information about bucket naming + * restrictions, see Directory bucket naming + * rules in the Amazon S3 User Guide.

+ * + *

Copying objects across different Amazon Web Services Regions isn't supported when the source or destination bucket is in Amazon Web Services Local Zones. The source and destination buckets must have the same parent Amazon Web Services Region. Otherwise, + * you get an HTTP 400 Bad Request error with the error code InvalidRequest.

+ *
+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ * + *

Object Lambda access points are not supported by directory buckets.

+ *
+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must use the Outpost bucket access point ARN or the access point alias for the destination bucket. + * + * You can only copy objects within the same Outpost bucket. It's not supported to copy objects across different Amazon Web Services Outposts, between buckets on the same Outposts, or between Outposts buckets and any other bucket types. + * For more information about S3 on Outposts, see What is S3 on Outposts? in the S3 on Outposts guide. + * When you use this action with S3 on Outposts through the REST API, you must direct requests to the S3 on Outposts hostname, in the format + * + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. The hostname isn't required when you use the Amazon Web Services CLI or SDKs. + *

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Specifies the caching behavior along the request/reply chain.

+ * @public + */ + CacheControl?: string | undefined; + /** + *

Indicates the algorithm that you want Amazon S3 to use to create the checksum for the object. For more information, see + * Checking object integrity in + * the Amazon S3 User Guide.

+ *

When you copy an object, if the source object has a checksum, that checksum value will + * be copied to the new object by default. If the CopyObject request does not + * include this x-amz-checksum-algorithm header, the checksum algorithm will be + * copied from the source object to the destination object (if it's present on the source + * object). You can optionally specify a different checksum algorithm to use with the + * x-amz-checksum-algorithm header. Unrecognized or unsupported values will + * respond with the HTTP status code 400 Bad Request.

+ * + *

For directory buckets, when you use Amazon Web Services SDKs, CRC32 is the default checksum algorithm that's used for performance.

+ *
+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

Specifies presentational information for the object. Indicates whether an object should + * be displayed in a web browser or downloaded as a file. It allows specifying the desired + * filename for the downloaded file.

+ * @public + */ + ContentDisposition?: string | undefined; + /** + *

Specifies what content encodings have been applied to the object and thus what decoding + * mechanisms must be applied to obtain the media-type referenced by the Content-Type header + * field.

+ * + *

For directory buckets, only the aws-chunked value is supported in this header field.

+ *
+ * @public + */ + ContentEncoding?: string | undefined; + /** + *

The language the content is in.

+ * @public + */ + ContentLanguage?: string | undefined; + /** + *

A standard MIME type that describes the format of the object data.

+ * @public + */ + ContentType?: string | undefined; + /** + *

Specifies the source object for the copy operation. The source object can be up to 5 GB. + * If the source object is an object that was uploaded by using a multipart upload, the object + * copy will be a single part object after the source object is copied to the destination + * bucket.

+ *

You specify the value of the copy source in one of two formats, depending on whether you + * want to access the source object through an access point:

+ *
    + *
  • + *

    For objects not accessed through an access point, specify the name of the source bucket + * and the key of the source object, separated by a slash (/). For example, to copy the + * object reports/january.pdf from the general purpose bucket + * awsexamplebucket, use + * awsexamplebucket/reports/january.pdf. The value must be URL-encoded. + * To copy the object reports/january.pdf from the directory bucket + * awsexamplebucket--use1-az5--x-s3, use + * awsexamplebucket--use1-az5--x-s3/reports/january.pdf. The value must + * be URL-encoded.

    + *
  • + *
  • + *

    For objects accessed through access points, specify the Amazon Resource Name (ARN) of the object as accessed through the access point, in the format arn:aws:s3:::accesspoint//object/. For example, to copy the object reports/january.pdf through access point my-access-point owned by account 123456789012 in Region us-west-2, use the URL encoding of arn:aws:s3:us-west-2:123456789012:accesspoint/my-access-point/object/reports/january.pdf. The value must be URL encoded.

    + * + *
      + *
    • + *

      Amazon S3 supports copy operations using Access points only when the source and destination buckets are in the same Amazon Web Services Region.

      + *
    • + *
    • + *

      Access points are not supported by directory buckets.

      + *
    • + *
    + *
    + *

    Alternatively, for objects accessed through Amazon S3 on Outposts, specify the ARN of the object as accessed in the format arn:aws:s3-outposts:::outpost//object/. For example, to copy the object reports/january.pdf through outpost my-outpost owned by account 123456789012 in Region us-west-2, use the URL encoding of arn:aws:s3-outposts:us-west-2:123456789012:outpost/my-outpost/object/reports/january.pdf. The value must be URL-encoded.

    + *
  • + *
+ *

If your source bucket versioning is enabled, the x-amz-copy-source header + * by default identifies the current version of an object to copy. If the current version is a + * delete marker, Amazon S3 behaves as if the object was deleted. To copy a different version, use + * the versionId query parameter. Specifically, append + * ?versionId= to the value (for example, + * awsexamplebucket/reports/january.pdf?versionId=QUpfdndhfd8438MNFDN93jdnJFkdmqnh893). + * If you don't specify a version ID, Amazon S3 copies the latest version of the source + * object.

+ *

If you enable versioning on the destination bucket, Amazon S3 generates a unique version ID + * for the copied object. This version ID is different from the version ID of the source + * object. Amazon S3 returns the version ID of the copied object in the + * x-amz-version-id response header in the response.

+ *

If you do not enable versioning or suspend it on the destination bucket, the version ID + * that Amazon S3 generates in the x-amz-version-id response header is always + * null.

+ * + *

+ * Directory buckets - + * S3 Versioning isn't enabled and supported for directory buckets.

+ *
+ * @public + */ + CopySource: string | undefined; + /** + *

Copies the object if its entity tag (ETag) matches the specified tag.

+ *

If both the x-amz-copy-source-if-match and + * x-amz-copy-source-if-unmodified-since headers are present in the request + * and evaluate as follows, Amazon S3 returns 200 OK and copies the data:

+ *
    + *
  • + *

    + * x-amz-copy-source-if-match condition evaluates to true

    + *
  • + *
  • + *

    + * x-amz-copy-source-if-unmodified-since condition evaluates to + * false

    + *
  • + *
+ * @public + */ + CopySourceIfMatch?: string | undefined; + /** + *

Copies the object if it has been modified since the specified time.

+ *

If both the x-amz-copy-source-if-none-match and + * x-amz-copy-source-if-modified-since headers are present in the request and + * evaluate as follows, Amazon S3 returns the 412 Precondition Failed response + * code:

+ *
    + *
  • + *

    + * x-amz-copy-source-if-none-match condition evaluates to false

    + *
  • + *
  • + *

    + * x-amz-copy-source-if-modified-since condition evaluates to + * true

    + *
  • + *
+ * @public + */ + CopySourceIfModifiedSince?: Date | undefined; + /** + *

Copies the object if its entity tag (ETag) is different than the specified ETag.

+ *

If both the x-amz-copy-source-if-none-match and + * x-amz-copy-source-if-modified-since headers are present in the request and + * evaluate as follows, Amazon S3 returns the 412 Precondition Failed response + * code:

+ *
    + *
  • + *

    + * x-amz-copy-source-if-none-match condition evaluates to false

    + *
  • + *
  • + *

    + * x-amz-copy-source-if-modified-since condition evaluates to + * true

    + *
  • + *
+ * @public + */ + CopySourceIfNoneMatch?: string | undefined; + /** + *

Copies the object if it hasn't been modified since the specified time.

+ *

If both the x-amz-copy-source-if-match and + * x-amz-copy-source-if-unmodified-since headers are present in the request + * and evaluate as follows, Amazon S3 returns 200 OK and copies the data:

+ *
    + *
  • + *

    + * x-amz-copy-source-if-match condition evaluates to true

    + *
  • + *
  • + *

    + * x-amz-copy-source-if-unmodified-since condition evaluates to + * false

    + *
  • + *
+ * @public + */ + CopySourceIfUnmodifiedSince?: Date | undefined; + /** + *

The date and time at which the object is no longer cacheable.

+ * @public + */ + Expires?: Date | undefined; + /** + *

Gives the grantee READ, READ_ACP, and WRITE_ACP permissions on the object.

+ * + *
    + *
  • + *

    This functionality is not supported for directory buckets.

    + *
  • + *
  • + *

    This functionality is not supported for Amazon S3 on Outposts.

    + *
  • + *
+ *
+ * @public + */ + GrantFullControl?: string | undefined; + /** + *

Allows grantee to read the object data and its metadata.

+ * + *
    + *
  • + *

    This functionality is not supported for directory buckets.

    + *
  • + *
  • + *

    This functionality is not supported for Amazon S3 on Outposts.

    + *
  • + *
+ *
+ * @public + */ + GrantRead?: string | undefined; + /** + *

Allows grantee to read the object ACL.

+ * + *
    + *
  • + *

    This functionality is not supported for directory buckets.

    + *
  • + *
  • + *

    This functionality is not supported for Amazon S3 on Outposts.

    + *
  • + *
+ *
+ * @public + */ + GrantReadACP?: string | undefined; + /** + *

Allows grantee to write the ACL for the applicable object.

+ * + *
    + *
  • + *

    This functionality is not supported for directory buckets.

    + *
  • + *
  • + *

    This functionality is not supported for Amazon S3 on Outposts.

    + *
  • + *
+ *
+ * @public + */ + GrantWriteACP?: string | undefined; + /** + *

The key of the destination object.

+ * @public + */ + Key: string | undefined; + /** + *

A map of metadata to store with the object in S3.

+ * @public + */ + Metadata?: Record | undefined; + /** + *

Specifies whether the metadata is copied from the source object or replaced with + * metadata that's provided in the request. When copying an object, you can preserve all + * metadata (the default) or specify new metadata. If this header isn’t specified, + * COPY is the default behavior.

+ *

+ * General purpose bucket - For general purpose buckets, when you + * grant permissions, you can use the s3:x-amz-metadata-directive condition key + * to enforce certain metadata behavior when objects are uploaded. For more information, see + * Amazon S3 + * condition key examples in the Amazon S3 User Guide.

+ * + *

+ * x-amz-website-redirect-location is unique to each object and is not + * copied when using the x-amz-metadata-directive header. To copy the value, + * you must specify x-amz-website-redirect-location in the request + * header.

+ *
+ * @public + */ + MetadataDirective?: MetadataDirective | undefined; + /** + *

Specifies whether the object tag-set is copied from the source object or replaced with + * the tag-set that's provided in the request.

+ *

The default value is COPY.

+ * + *

+ * Directory buckets - For directory buckets in a CopyObject operation, only the empty tag-set is supported. Any requests that attempt to write non-empty tags into directory buckets will receive a 501 Not Implemented status code. + * When the destination bucket is a directory bucket, you will receive a 501 Not Implemented response in any of the following situations:

+ *
    + *
  • + *

    When you attempt to COPY the tag-set from an S3 source object that has non-empty tags.

    + *
  • + *
  • + *

    When you attempt to REPLACE the tag-set of a source object and set a non-empty value to x-amz-tagging.

    + *
  • + *
  • + *

    When you don't set the x-amz-tagging-directive header and the source object has non-empty tags. This is because the default value of x-amz-tagging-directive is COPY.

    + *
  • + *
+ *

Because only the empty tag-set is supported for directory buckets in a CopyObject operation, the following situations are allowed:

+ *
    + *
  • + *

    When you attempt to COPY the tag-set from a directory bucket source object that has no tags to a general purpose bucket. It copies an empty tag-set to the destination object.

    + *
  • + *
  • + *

    When you attempt to REPLACE the tag-set of a directory bucket source object and set the x-amz-tagging value of the directory bucket destination object to empty.

    + *
  • + *
  • + *

    When you attempt to REPLACE the tag-set of a general purpose bucket source object that has non-empty tags and set the x-amz-tagging value of the directory bucket destination object to empty.

    + *
  • + *
  • + *

    When you attempt to REPLACE the tag-set of a directory bucket source object and don't set the x-amz-tagging value of the directory bucket destination object. This is because the default value of x-amz-tagging is the empty value.

    + *
  • + *
+ *
+ * @public + */ + TaggingDirective?: TaggingDirective | undefined; + /** + *

The server-side encryption algorithm used when storing this object in Amazon S3. Unrecognized + * or unsupported values won’t write a destination object and will receive a 400 Bad + * Request response.

+ *

Amazon S3 automatically encrypts all new objects that are copied to an S3 bucket. When + * copying an object, if you don't specify encryption information in your copy request, the + * encryption setting of the target object is set to the default encryption configuration of + * the destination bucket. By default, all buckets have a base level of encryption + * configuration that uses server-side encryption with Amazon S3 managed keys (SSE-S3). If the + * destination bucket has a different default encryption configuration, Amazon S3 uses the + * corresponding encryption key to encrypt the target object copy.

+ *

With server-side encryption, Amazon S3 encrypts your data as it writes your data to disks in + * its data centers and decrypts the data when you access it. For more information about + * server-side encryption, see Using Server-Side Encryption + * in the Amazon S3 User Guide.

+ *

+ * General purpose buckets + *

+ *
    + *
  • + *

    For general purpose buckets, there are the following supported options for server-side + * encryption: server-side encryption with Key Management Service (KMS) keys (SSE-KMS), dual-layer + * server-side encryption with Amazon Web Services KMS keys (DSSE-KMS), and server-side encryption + * with customer-provided encryption keys (SSE-C). Amazon S3 uses the corresponding + * KMS key, or a customer-provided key to encrypt the target object copy.

    + *
  • + *
  • + *

    When you perform a CopyObject operation, if you want to use a + * different type of encryption setting for the target object, you can specify + * appropriate encryption-related headers to encrypt the target object with an Amazon S3 + * managed key, a KMS key, or a customer-provided key. If the encryption setting in + * your request is different from the default encryption configuration of the + * destination bucket, the encryption setting in your request takes precedence.

    + *
  • + *
+ *

+ * Directory buckets + *

+ *
    + *
  • + *

    For directory buckets, there are only two supported options for server-side encryption: server-side encryption with Amazon S3 managed keys (SSE-S3) (AES256) and server-side encryption with KMS keys (SSE-KMS) (aws:kms). We recommend that the bucket's default encryption uses the desired encryption configuration and you don't override the bucket default encryption in your + * CreateSession requests or PUT object requests. Then, new objects + * are automatically encrypted with the desired encryption settings. For more + * information, see Protecting data with server-side encryption in the Amazon S3 User Guide. For more information about the encryption overriding behaviors in directory buckets, see Specifying server-side encryption with KMS for new object uploads.

    + *
  • + *
  • + *

    To encrypt new object copies to a directory bucket with SSE-KMS, we recommend you + * specify SSE-KMS as the directory bucket's default encryption configuration with + * a KMS key (specifically, a customer managed key). + * The Amazon Web Services managed key (aws/s3) isn't supported. Your SSE-KMS + * configuration can only support 1 customer managed key per + * directory bucket for the lifetime of the bucket. After you specify a customer managed key for + * SSE-KMS, you can't override the customer managed key for the bucket's SSE-KMS + * configuration. Then, when you perform a CopyObject operation and want to + * specify server-side encryption settings for new object copies with SSE-KMS in the + * encryption-related request headers, you must ensure the encryption key is the same + * customer managed key that you specified for the directory bucket's default encryption + * configuration. + *

    + *
  • + *
+ * @public + */ + ServerSideEncryption?: ServerSideEncryption | undefined; + /** + *

If the x-amz-storage-class header is not used, the copied object will be + * stored in the STANDARD Storage Class by default. The STANDARD + * storage class provides high durability and high availability. Depending on performance + * needs, you can specify a different Storage Class.

+ * + *
    + *
  • + *

    + * Directory buckets - + * Directory buckets only support EXPRESS_ONEZONE (the S3 Express One Zone storage class) in Availability Zones and ONEZONE_IA (the S3 One Zone-Infrequent Access storage class) in Dedicated Local Zones. + * Unsupported storage class values won't write a destination object and will respond with the HTTP status code 400 Bad Request.

    + *
  • + *
  • + *

    + * Amazon S3 on Outposts - S3 on Outposts only + * uses the OUTPOSTS Storage Class.

    + *
  • + *
+ *
+ *

You can use the CopyObject action to change the storage class of an object + * that is already stored in Amazon S3 by using the x-amz-storage-class header. For + * more information, see Storage Classes in the + * Amazon S3 User Guide.

+ *

Before using an object as a source object for the copy operation, you must restore a + * copy of it if it meets any of the following conditions:

+ *
    + *
  • + *

    The storage class of the source object is GLACIER or + * DEEP_ARCHIVE.

    + *
  • + *
  • + *

    The storage class of the source object is INTELLIGENT_TIERING and + * it's S3 Intelligent-Tiering access tier is Archive Access or + * Deep Archive Access.

    + *
  • + *
+ *

For more information, see RestoreObject and Copying + * Objects in the Amazon S3 User Guide.

+ * @public + */ + StorageClass?: StorageClass | undefined; + /** + *

If the destination bucket is configured as a website, redirects requests for this object + * copy to another object in the same bucket or to an external URL. Amazon S3 stores the value of + * this header in the object metadata. This value is unique to each object and is not copied + * when using the x-amz-metadata-directive header. Instead, you may opt to + * provide this header in combination with the x-amz-metadata-directive + * header.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + WebsiteRedirectLocation?: string | undefined; + /** + *

Specifies the algorithm to use when encrypting the object (for example, + * AES256).

+ *

When you perform a CopyObject operation, if you want to use a different + * type of encryption setting for the target object, you can specify appropriate + * encryption-related headers to encrypt the target object with an Amazon S3 managed key, a + * KMS key, or a customer-provided key. If the encryption setting in your request is + * different from the default encryption configuration of the destination bucket, the + * encryption setting in your request takes precedence.

+ * + *

This functionality is not supported when the destination bucket is a directory bucket.

+ *
+ * @public + */ + SSECustomerAlgorithm?: string | undefined; + /** + *

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This + * value is used to store the object and then it is discarded. Amazon S3 does not store the + * encryption key. The key must be appropriate for use with the algorithm specified in the + * x-amz-server-side-encryption-customer-algorithm header.

+ * + *

This functionality is not supported when the destination bucket is a directory bucket.

+ *
+ * @public + */ + SSECustomerKey?: string | undefined; + /** + *

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses + * this header for a message integrity check to ensure that the encryption key was transmitted + * without error.

+ * + *

This functionality is not supported when the destination bucket is a directory bucket.

+ *
+ * @public + */ + SSECustomerKeyMD5?: string | undefined; + /** + *

Specifies the KMS key ID (Key ID, Key ARN, or Key Alias) to use for object encryption. + * All GET and PUT requests for an object protected by KMS will fail if they're not made via + * SSL or using SigV4. For information about configuring any of the officially supported Amazon Web Services + * SDKs and Amazon Web Services CLI, see Specifying the + * Signature Version in Request Authentication in the + * Amazon S3 User Guide.

+ *

+ * Directory buckets - + * To encrypt data using SSE-KMS, it's recommended to specify the + * x-amz-server-side-encryption header to aws:kms. Then, the x-amz-server-side-encryption-aws-kms-key-id header implicitly uses + * the bucket's default KMS customer managed key ID. If you want to explicitly set the + * x-amz-server-side-encryption-aws-kms-key-id header, it must match the bucket's default customer managed key (using key ID or ARN, not alias). Your SSE-KMS configuration can only support 1 customer managed key per directory bucket's lifetime. + * The Amazon Web Services managed key (aws/s3) isn't supported. + * + * Incorrect key specification results in an HTTP 400 Bad Request error.

+ * @public + */ + SSEKMSKeyId?: string | undefined; + /** + *

Specifies the Amazon Web Services KMS Encryption Context as an additional encryption context to use + * for the destination object encryption. The value of this header is a base64-encoded UTF-8 string holding JSON with the encryption context key-value pairs.

+ *

+ * General purpose buckets - This value must be explicitly + * added to specify encryption context for CopyObject requests if you want an + * additional encryption context for your destination object. The additional encryption + * context of the source object won't be copied to the destination object. For more + * information, see Encryption + * context in the Amazon S3 User Guide.

+ *

+ * Directory buckets - You can optionally provide an explicit encryption context value. The value must match the default encryption context - the bucket Amazon Resource Name (ARN). An additional encryption context value is not supported.

+ * @public + */ + SSEKMSEncryptionContext?: string | undefined; + /** + *

Specifies whether Amazon S3 should use an S3 Bucket Key for object encryption with + * server-side encryption using Key Management Service (KMS) keys (SSE-KMS). If a target object uses + * SSE-KMS, you can enable an S3 Bucket Key for the object.

+ *

Setting this header to true causes Amazon S3 to use an S3 Bucket Key for object + * encryption with SSE-KMS. Specifying this header with a COPY action doesn’t affect + * bucket-level settings for S3 Bucket Key.

+ *

For more information, see Amazon S3 Bucket Keys in the + * Amazon S3 User Guide.

+ * + *

+ * Directory buckets - + * S3 Bucket Keys aren't supported, when you copy SSE-KMS encrypted objects from general purpose buckets + * to directory buckets, from directory buckets to general purpose buckets, or between directory buckets, through CopyObject. In this case, Amazon S3 makes a call to KMS every time a copy request is made for a KMS-encrypted object.

+ *
+ * @public + */ + BucketKeyEnabled?: boolean | undefined; + /** + *

Specifies the algorithm to use when decrypting the source object (for example, + * AES256).

+ *

If the source object for the copy is stored in Amazon S3 using SSE-C, you must provide the + * necessary encryption information in your request so that Amazon S3 can decrypt the object for + * copying.

+ * + *

This functionality is not supported when the source object is in a directory bucket.

+ *
+ * @public + */ + CopySourceSSECustomerAlgorithm?: string | undefined; + /** + *

Specifies the customer-provided encryption key for Amazon S3 to use to decrypt the source + * object. The encryption key provided in this header must be the same one that was used when + * the source object was created.

+ *

If the source object for the copy is stored in Amazon S3 using SSE-C, you must provide the + * necessary encryption information in your request so that Amazon S3 can decrypt the object for + * copying.

+ * + *

This functionality is not supported when the source object is in a directory bucket.

+ *
+ * @public + */ + CopySourceSSECustomerKey?: string | undefined; + /** + *

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses + * this header for a message integrity check to ensure that the encryption key was transmitted + * without error.

+ *

If the source object for the copy is stored in Amazon S3 using SSE-C, you must provide the + * necessary encryption information in your request so that Amazon S3 can decrypt the object for + * copying.

+ * + *

This functionality is not supported when the source object is in a directory bucket.

+ *
+ * @public + */ + CopySourceSSECustomerKeyMD5?: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

The tag-set for the object copy in the destination bucket. This value must be used in + * conjunction with the x-amz-tagging-directive if you choose + * REPLACE for the x-amz-tagging-directive. If you choose + * COPY for the x-amz-tagging-directive, you don't need to set + * the x-amz-tagging header, because the tag-set will be copied from the source + * object directly. The tag-set must be encoded as URL Query parameters.

+ *

The default value is the empty value.

+ * + *

+ * Directory buckets - For directory buckets in a CopyObject operation, only the empty tag-set is supported. Any requests that attempt to write non-empty tags into directory buckets will receive a 501 Not Implemented status code. + * When the destination bucket is a directory bucket, you will receive a 501 Not Implemented response in any of the following situations:

+ *
    + *
  • + *

    When you attempt to COPY the tag-set from an S3 source object that has non-empty tags.

    + *
  • + *
  • + *

    When you attempt to REPLACE the tag-set of a source object and set a non-empty value to x-amz-tagging.

    + *
  • + *
  • + *

    When you don't set the x-amz-tagging-directive header and the source object has non-empty tags. This is because the default value of x-amz-tagging-directive is COPY.

    + *
  • + *
+ *

Because only the empty tag-set is supported for directory buckets in a CopyObject operation, the following situations are allowed:

+ *
    + *
  • + *

    When you attempt to COPY the tag-set from a directory bucket source object that has no tags to a general purpose bucket. It copies an empty tag-set to the destination object.

    + *
  • + *
  • + *

    When you attempt to REPLACE the tag-set of a directory bucket source object and set the x-amz-tagging value of the directory bucket destination object to empty.

    + *
  • + *
  • + *

    When you attempt to REPLACE the tag-set of a general purpose bucket source object that has non-empty tags and set the x-amz-tagging value of the directory bucket destination object to empty.

    + *
  • + *
  • + *

    When you attempt to REPLACE the tag-set of a directory bucket source object and don't set the x-amz-tagging value of the directory bucket destination object. This is because the default value of x-amz-tagging is the empty value.

    + *
  • + *
+ *
+ * @public + */ + Tagging?: string | undefined; + /** + *

The Object Lock mode that you want to apply to the object copy.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ObjectLockMode?: ObjectLockMode | undefined; + /** + *

The date and time when you want the Object Lock of the object copy to expire.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ObjectLockRetainUntilDate?: Date | undefined; + /** + *

Specifies whether you want to apply a legal hold to the object copy.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ObjectLockLegalHoldStatus?: ObjectLockLegalHoldStatus | undefined; + /** + *

The account ID of the expected destination bucket owner. If the account ID that you provide does not match the actual owner of the destination bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

The account ID of the expected source bucket owner. If the account ID that you provide does not match the actual owner of the source bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedSourceBucketOwner?: string | undefined; +} +/** + *

The source object of the COPY action is not in the active tier and is only stored in + * Amazon S3 Glacier.

+ * @public + */ +export declare class ObjectNotInActiveTierError extends __BaseException { + readonly name: "ObjectNotInActiveTierError"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The requested bucket name is not available. The bucket namespace is shared by all users + * of the system. Select a different name and try again.

+ * @public + */ +export declare class BucketAlreadyExists extends __BaseException { + readonly name: "BucketAlreadyExists"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The bucket you tried to create already exists, and you own it. Amazon S3 returns this error + * in all Amazon Web Services Regions except in the North Virginia Region. For legacy compatibility, if you + * re-create an existing bucket that you already own in the North Virginia Region, Amazon S3 + * returns 200 OK and resets the bucket access control lists (ACLs).

+ * @public + */ +export declare class BucketAlreadyOwnedByYou extends __BaseException { + readonly name: "BucketAlreadyOwnedByYou"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface CreateBucketOutput { + /** + *

A forward slash followed by the name of the bucket.

+ * @public + */ + Location?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const BucketCannedACL: { + readonly authenticated_read: "authenticated-read"; + readonly private: "private"; + readonly public_read: "public-read"; + readonly public_read_write: "public-read-write"; +}; +/** + * @public + */ +export type BucketCannedACL = (typeof BucketCannedACL)[keyof typeof BucketCannedACL]; +/** + * @public + * @enum + */ +export declare const DataRedundancy: { + readonly SingleAvailabilityZone: "SingleAvailabilityZone"; + readonly SingleLocalZone: "SingleLocalZone"; +}; +/** + * @public + */ +export type DataRedundancy = (typeof DataRedundancy)[keyof typeof DataRedundancy]; +/** + * @public + * @enum + */ +export declare const BucketType: { + readonly Directory: "Directory"; +}; +/** + * @public + */ +export type BucketType = (typeof BucketType)[keyof typeof BucketType]; +/** + *

Specifies the information about the bucket that will be created. For more information + * about directory buckets, see Directory buckets in the Amazon S3 User Guide.

+ * + *

This functionality is only supported by directory buckets.

+ *
+ * @public + */ +export interface BucketInfo { + /** + *

The number of Zone (Availability Zone or Local Zone) that's used for redundancy for the bucket.

+ * @public + */ + DataRedundancy?: DataRedundancy | undefined; + /** + *

The type of bucket.

+ * @public + */ + Type?: BucketType | undefined; +} +/** + * @public + * @enum + */ +export declare const LocationType: { + readonly AvailabilityZone: "AvailabilityZone"; + readonly LocalZone: "LocalZone"; +}; +/** + * @public + */ +export type LocationType = (typeof LocationType)[keyof typeof LocationType]; +/** + *

Specifies the location where the bucket will be created.

+ *

For directory buckets, the location type is Availability Zone or Local Zone. For more information about directory buckets, see + * Working with directory buckets in the Amazon S3 User Guide.

+ * + *

This functionality is only supported by directory buckets.

+ *
+ * @public + */ +export interface LocationInfo { + /** + *

The type of location where the bucket will be created.

+ * @public + */ + Type?: LocationType | undefined; + /** + *

The name of the location where the bucket will be created.

+ *

For directory buckets, the name of the location is the Zone ID of the Availability Zone (AZ) or Local Zone (LZ) where the bucket will be created. An example AZ ID value is usw2-az1.

+ * @public + */ + Name?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const BucketLocationConstraint: { + readonly EU: "EU"; + readonly af_south_1: "af-south-1"; + readonly ap_east_1: "ap-east-1"; + readonly ap_northeast_1: "ap-northeast-1"; + readonly ap_northeast_2: "ap-northeast-2"; + readonly ap_northeast_3: "ap-northeast-3"; + readonly ap_south_1: "ap-south-1"; + readonly ap_south_2: "ap-south-2"; + readonly ap_southeast_1: "ap-southeast-1"; + readonly ap_southeast_2: "ap-southeast-2"; + readonly ap_southeast_3: "ap-southeast-3"; + readonly ap_southeast_4: "ap-southeast-4"; + readonly ap_southeast_5: "ap-southeast-5"; + readonly ca_central_1: "ca-central-1"; + readonly cn_north_1: "cn-north-1"; + readonly cn_northwest_1: "cn-northwest-1"; + readonly eu_central_1: "eu-central-1"; + readonly eu_central_2: "eu-central-2"; + readonly eu_north_1: "eu-north-1"; + readonly eu_south_1: "eu-south-1"; + readonly eu_south_2: "eu-south-2"; + readonly eu_west_1: "eu-west-1"; + readonly eu_west_2: "eu-west-2"; + readonly eu_west_3: "eu-west-3"; + readonly il_central_1: "il-central-1"; + readonly me_central_1: "me-central-1"; + readonly me_south_1: "me-south-1"; + readonly sa_east_1: "sa-east-1"; + readonly us_east_2: "us-east-2"; + readonly us_gov_east_1: "us-gov-east-1"; + readonly us_gov_west_1: "us-gov-west-1"; + readonly us_west_1: "us-west-1"; + readonly us_west_2: "us-west-2"; +}; +/** + * @public + */ +export type BucketLocationConstraint = (typeof BucketLocationConstraint)[keyof typeof BucketLocationConstraint]; +/** + *

The configuration information for the bucket.

+ * @public + */ +export interface CreateBucketConfiguration { + /** + *

Specifies the Region where the bucket will be created. You might choose a Region to + * optimize latency, minimize costs, or address regulatory requirements. For example, if you + * reside in Europe, you will probably find it advantageous to create buckets in the Europe + * (Ireland) Region.

+ *

If you don't specify a Region, the bucket is created in the US East (N. Virginia) Region + * (us-east-1) by default. Configurations using the value EU will create a bucket in eu-west-1.

+ *

For a list of the valid values for all of the Amazon Web Services Regions, see Regions and + * Endpoints.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + LocationConstraint?: BucketLocationConstraint | undefined; + /** + *

Specifies the location where the bucket will be created.

+ *

+ * Directory buckets - The location type is Availability Zone or Local Zone. + * To use the Local Zone location type, your account must be enabled for Dedicated Local Zones. Otherwise, you get an HTTP 403 Forbidden error with the + * error code AccessDenied. To learn more, see Enable accounts for Dedicated Local Zones in the Amazon S3 User Guide. + *

+ * + *

This functionality is only supported by directory buckets.

+ *
+ * @public + */ + Location?: LocationInfo | undefined; + /** + *

Specifies the information about the bucket that will be created.

+ * + *

This functionality is only supported by directory buckets.

+ *
+ * @public + */ + Bucket?: BucketInfo | undefined; +} +/** + * @public + * @enum + */ +export declare const ObjectOwnership: { + readonly BucketOwnerEnforced: "BucketOwnerEnforced"; + readonly BucketOwnerPreferred: "BucketOwnerPreferred"; + readonly ObjectWriter: "ObjectWriter"; +}; +/** + * @public + */ +export type ObjectOwnership = (typeof ObjectOwnership)[keyof typeof ObjectOwnership]; +/** + * @public + */ +export interface CreateBucketRequest { + /** + *

The canned ACL to apply to the bucket.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ACL?: BucketCannedACL | undefined; + /** + *

The name of the bucket to create.

+ *

+ * General purpose buckets - For information about bucket naming + * restrictions, see Bucket naming + * rules in the Amazon S3 User Guide.

+ *

+ * Directory buckets - When you use this operation with a directory bucket, you must use path-style requests in the format https://s3express-control.region-code.amazonaws.com/bucket-name + * . Virtual-hosted-style requests aren't supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must also follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * DOC-EXAMPLE-BUCKET--usw2-az1--x-s3). For information about bucket naming restrictions, see Directory bucket naming rules in the Amazon S3 User Guide + *

+ * @public + */ + Bucket: string | undefined; + /** + *

The configuration information for the bucket.

+ * @public + */ + CreateBucketConfiguration?: CreateBucketConfiguration | undefined; + /** + *

Allows grantee the read, write, read ACP, and write ACP permissions on the + * bucket.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + GrantFullControl?: string | undefined; + /** + *

Allows grantee to list the objects in the bucket.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + GrantRead?: string | undefined; + /** + *

Allows grantee to read the bucket ACL.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + GrantReadACP?: string | undefined; + /** + *

Allows grantee to create new objects in the bucket.

+ *

For the bucket and object owners of existing objects, also allows deletions and + * overwrites of those objects.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + GrantWrite?: string | undefined; + /** + *

Allows grantee to write the ACL for the applicable bucket.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + GrantWriteACP?: string | undefined; + /** + *

Specifies whether you want S3 Object Lock to be enabled for the new bucket.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ObjectLockEnabledForBucket?: boolean | undefined; + /** + *

The container element for object ownership for a bucket's ownership controls.

+ *

+ * BucketOwnerPreferred - Objects uploaded to the bucket change ownership to + * the bucket owner if the objects are uploaded with the + * bucket-owner-full-control canned ACL.

+ *

+ * ObjectWriter - The uploading account will own the object if the object is + * uploaded with the bucket-owner-full-control canned ACL.

+ *

+ * BucketOwnerEnforced - Access control lists (ACLs) are disabled and no + * longer affect permissions. The bucket owner automatically owns and has full control over + * every object in the bucket. The bucket only accepts PUT requests that don't specify an ACL + * or specify bucket owner full control ACLs (such as the predefined + * bucket-owner-full-control canned ACL or a custom ACL in XML format that + * grants the same permissions).

+ *

By default, ObjectOwnership is set to BucketOwnerEnforced and + * ACLs are disabled. We recommend keeping ACLs disabled, except in uncommon use cases where + * you must control access for each object individually. For more information about S3 Object + * Ownership, see Controlling ownership of + * objects and disabling ACLs for your bucket in the + * Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets. Directory buckets use the bucket owner enforced setting for S3 Object Ownership.

+ *
+ * @public + */ + ObjectOwnership?: ObjectOwnership | undefined; +} +/** + *

+ * The destination information for the metadata table configuration. The destination table bucket + * must be in the same Region and Amazon Web Services account as the general purpose bucket. The specified metadata + * table name must be unique within the aws_s3_metadata namespace in the destination + * table bucket. + *

+ * @public + */ +export interface S3TablesDestination { + /** + *

+ * The Amazon Resource Name (ARN) for the table bucket that's specified as the + * destination in the metadata table configuration. The destination table bucket + * must be in the same Region and Amazon Web Services account as the general purpose bucket. + *

+ * @public + */ + TableBucketArn: string | undefined; + /** + *

+ * The name for the metadata table in your metadata table configuration. The specified metadata + * table name must be unique within the aws_s3_metadata namespace in the destination + * table bucket. + *

+ * @public + */ + TableName: string | undefined; +} +/** + *

+ * The metadata table configuration for a general purpose bucket. + *

+ * @public + */ +export interface MetadataTableConfiguration { + /** + *

+ * The destination information for the metadata table configuration. The destination table bucket + * must be in the same Region and Amazon Web Services account as the general purpose bucket. The specified metadata + * table name must be unique within the aws_s3_metadata namespace in the destination + * table bucket. + *

+ * @public + */ + S3TablesDestination: S3TablesDestination | undefined; +} +/** + * @public + */ +export interface CreateBucketMetadataTableConfigurationRequest { + /** + *

+ * The general purpose bucket that you want to create the metadata table configuration in. + *

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

+ * The Content-MD5 header for the metadata table configuration. + *

+ * @public + */ + ContentMD5?: string | undefined; + /** + *

+ * The checksum algorithm to use with your metadata table configuration. + *

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

+ * The contents of your metadata table configuration. + *

+ * @public + */ + MetadataTableConfiguration: MetadataTableConfiguration | undefined; + /** + *

+ * The expected owner of the general purpose bucket that contains your metadata table configuration. + *

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface CreateMultipartUploadOutput { + /** + *

If the bucket has a lifecycle rule configured with an action to abort incomplete + * multipart uploads and the prefix in the lifecycle rule matches the object name in the + * request, the response includes this header. The header indicates when the initiated + * multipart upload becomes eligible for an abort operation. For more information, see + * Aborting Incomplete Multipart Uploads Using a Bucket Lifecycle Configuration in + * the Amazon S3 User Guide.

+ *

The response also includes the x-amz-abort-rule-id header that provides the + * ID of the lifecycle configuration rule that defines the abort action.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + AbortDate?: Date | undefined; + /** + *

This header is returned along with the x-amz-abort-date header. It + * identifies the applicable lifecycle configuration rule that defines the action to abort + * incomplete multipart uploads.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + AbortRuleId?: string | undefined; + /** + *

The name of the bucket to which the multipart upload was initiated. Does not return the + * access point ARN or access point alias if used.

+ * + *

Access points are not supported by directory buckets.

+ *
+ * @public + */ + Bucket?: string | undefined; + /** + *

Object key for which the multipart upload was initiated.

+ * @public + */ + Key?: string | undefined; + /** + *

ID for the initiated multipart upload.

+ * @public + */ + UploadId?: string | undefined; + /** + *

The server-side encryption algorithm used when you store this object in Amazon S3 (for + * example, AES256, aws:kms).

+ * @public + */ + ServerSideEncryption?: ServerSideEncryption | undefined; + /** + *

If server-side encryption with a customer-provided encryption key was requested, the + * response will include this header to confirm the encryption algorithm that's used.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerAlgorithm?: string | undefined; + /** + *

If server-side encryption with a customer-provided encryption key was requested, the + * response will include this header to provide the round-trip message integrity verification + * of the customer-provided encryption key.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKeyMD5?: string | undefined; + /** + *

If present, indicates the ID of the KMS key that was used for object encryption.

+ * @public + */ + SSEKMSKeyId?: string | undefined; + /** + *

If present, indicates the Amazon Web Services KMS Encryption Context to use for object encryption. The value of + * this header is a Base64 encoded string of a UTF-8 encoded JSON, which contains the encryption context as key-value pairs.

+ * @public + */ + SSEKMSEncryptionContext?: string | undefined; + /** + *

Indicates whether the multipart upload uses an S3 Bucket Key for server-side encryption + * with Key Management Service (KMS) keys (SSE-KMS).

+ * @public + */ + BucketKeyEnabled?: boolean | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; + /** + *

The algorithm that was used to create a checksum of the object.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

Indicates the checksum type that you want Amazon S3 to use to calculate the object’s + * checksum value. For more information, see Checking object integrity + * in the Amazon S3 User Guide.

+ * @public + */ + ChecksumType?: ChecksumType | undefined; +} +/** + * @public + */ +export interface CreateMultipartUploadRequest { + /** + *

The canned ACL to apply to the object. Amazon S3 supports a set of predefined ACLs, known as + * canned ACLs. Each canned ACL has a predefined set of grantees and + * permissions. For more information, see Canned ACL in the + * Amazon S3 User Guide.

+ *

By default, all objects are private. Only the owner has full access control. When + * uploading an object, you can grant access permissions to individual Amazon Web Services accounts or to + * predefined groups defined by Amazon S3. These permissions are then added to the access control + * list (ACL) on the new object. For more information, see Using ACLs. One way to grant + * the permissions using the request headers is to specify a canned ACL with the + * x-amz-acl request header.

+ * + *
    + *
  • + *

    This functionality is not supported for directory buckets.

    + *
  • + *
  • + *

    This functionality is not supported for Amazon S3 on Outposts.

    + *
  • + *
+ *
+ * @public + */ + ACL?: ObjectCannedACL | undefined; + /** + *

The name of the bucket where the multipart upload is initiated and where the object is + * uploaded.

+ *

+ * Directory buckets - + * When you use this operation with a directory bucket, you must use virtual-hosted-style requests in the format + * Bucket-name.s3express-zone-id.region-code.amazonaws.com. Path-style requests are not supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * amzn-s3-demo-bucket--usw2-az1--x-s3). For information about bucket naming + * restrictions, see Directory bucket naming + * rules in the Amazon S3 User Guide.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ * + *

Object Lambda access points are not supported by directory buckets.

+ *
+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Specifies caching behavior along the request/reply chain.

+ * @public + */ + CacheControl?: string | undefined; + /** + *

Specifies presentational information for the object.

+ * @public + */ + ContentDisposition?: string | undefined; + /** + *

Specifies what content encodings have been applied to the object and thus what decoding + * mechanisms must be applied to obtain the media-type referenced by the Content-Type header + * field.

+ * + *

For directory buckets, only the aws-chunked value is supported in this header field.

+ *
+ * @public + */ + ContentEncoding?: string | undefined; + /** + *

The language that the content is in.

+ * @public + */ + ContentLanguage?: string | undefined; + /** + *

A standard MIME type describing the format of the object data.

+ * @public + */ + ContentType?: string | undefined; + /** + *

The date and time at which the object is no longer cacheable.

+ * @public + */ + Expires?: Date | undefined; + /** + *

Specify access permissions explicitly to give the grantee READ, READ_ACP, and WRITE_ACP + * permissions on the object.

+ *

By default, all objects are private. Only the owner has full access control. When + * uploading an object, you can use this header to explicitly grant access permissions to + * specific Amazon Web Services accounts or groups. This header maps to specific permissions that Amazon S3 + * supports in an ACL. For more information, see Access Control List (ACL) Overview + * in the Amazon S3 User Guide.

+ *

You specify each grantee as a type=value pair, where the type is one of the + * following:

+ *
    + *
  • + *

    + * id – if the value specified is the canonical user ID of an + * Amazon Web Services account

    + *
  • + *
  • + *

    + * uri – if you are granting permissions to a predefined group

    + *
  • + *
  • + *

    + * emailAddress – if the value specified is the email address of an + * Amazon Web Services account

    + * + *

    Using email addresses to specify a grantee is only supported in the following Amazon Web Services Regions:

    + *
      + *
    • + *

      US East (N. Virginia)

      + *
    • + *
    • + *

      US West (N. California)

      + *
    • + *
    • + *

      US West (Oregon)

      + *
    • + *
    • + *

      Asia Pacific (Singapore)

      + *
    • + *
    • + *

      Asia Pacific (Sydney)

      + *
    • + *
    • + *

      Asia Pacific (Tokyo)

      + *
    • + *
    • + *

      Europe (Ireland)

      + *
    • + *
    • + *

      South America (São Paulo)

      + *
    • + *
    + *

    For a list of all the Amazon S3 supported Regions and endpoints, see Regions and Endpoints in the Amazon Web Services General Reference.

    + *
    + *
  • + *
+ *

For example, the following x-amz-grant-read header grants the Amazon Web Services accounts identified by account IDs permissions to read object data and its metadata:

+ *

+ * x-amz-grant-read: id="11112222333", id="444455556666" + *

+ * + *
    + *
  • + *

    This functionality is not supported for directory buckets.

    + *
  • + *
  • + *

    This functionality is not supported for Amazon S3 on Outposts.

    + *
  • + *
+ *
+ * @public + */ + GrantFullControl?: string | undefined; + /** + *

Specify access permissions explicitly to allow grantee to read the object data and its + * metadata.

+ *

By default, all objects are private. Only the owner has full access control. When + * uploading an object, you can use this header to explicitly grant access permissions to + * specific Amazon Web Services accounts or groups. This header maps to specific permissions that Amazon S3 + * supports in an ACL. For more information, see Access Control List (ACL) Overview + * in the Amazon S3 User Guide.

+ *

You specify each grantee as a type=value pair, where the type is one of the + * following:

+ *
    + *
  • + *

    + * id – if the value specified is the canonical user ID of an + * Amazon Web Services account

    + *
  • + *
  • + *

    + * uri – if you are granting permissions to a predefined group

    + *
  • + *
  • + *

    + * emailAddress – if the value specified is the email address of an + * Amazon Web Services account

    + * + *

    Using email addresses to specify a grantee is only supported in the following Amazon Web Services Regions:

    + *
      + *
    • + *

      US East (N. Virginia)

      + *
    • + *
    • + *

      US West (N. California)

      + *
    • + *
    • + *

      US West (Oregon)

      + *
    • + *
    • + *

      Asia Pacific (Singapore)

      + *
    • + *
    • + *

      Asia Pacific (Sydney)

      + *
    • + *
    • + *

      Asia Pacific (Tokyo)

      + *
    • + *
    • + *

      Europe (Ireland)

      + *
    • + *
    • + *

      South America (São Paulo)

      + *
    • + *
    + *

    For a list of all the Amazon S3 supported Regions and endpoints, see Regions and Endpoints in the Amazon Web Services General Reference.

    + *
    + *
  • + *
+ *

For example, the following x-amz-grant-read header grants the Amazon Web Services accounts identified by account IDs permissions to read object data and its metadata:

+ *

+ * x-amz-grant-read: id="11112222333", id="444455556666" + *

+ * + *
    + *
  • + *

    This functionality is not supported for directory buckets.

    + *
  • + *
  • + *

    This functionality is not supported for Amazon S3 on Outposts.

    + *
  • + *
+ *
+ * @public + */ + GrantRead?: string | undefined; + /** + *

Specify access permissions explicitly to allows grantee to read the object ACL.

+ *

By default, all objects are private. Only the owner has full access control. When + * uploading an object, you can use this header to explicitly grant access permissions to + * specific Amazon Web Services accounts or groups. This header maps to specific permissions that Amazon S3 + * supports in an ACL. For more information, see Access Control List (ACL) Overview + * in the Amazon S3 User Guide.

+ *

You specify each grantee as a type=value pair, where the type is one of the + * following:

+ *
    + *
  • + *

    + * id – if the value specified is the canonical user ID of an + * Amazon Web Services account

    + *
  • + *
  • + *

    + * uri – if you are granting permissions to a predefined group

    + *
  • + *
  • + *

    + * emailAddress – if the value specified is the email address of an + * Amazon Web Services account

    + * + *

    Using email addresses to specify a grantee is only supported in the following Amazon Web Services Regions:

    + *
      + *
    • + *

      US East (N. Virginia)

      + *
    • + *
    • + *

      US West (N. California)

      + *
    • + *
    • + *

      US West (Oregon)

      + *
    • + *
    • + *

      Asia Pacific (Singapore)

      + *
    • + *
    • + *

      Asia Pacific (Sydney)

      + *
    • + *
    • + *

      Asia Pacific (Tokyo)

      + *
    • + *
    • + *

      Europe (Ireland)

      + *
    • + *
    • + *

      South America (São Paulo)

      + *
    • + *
    + *

    For a list of all the Amazon S3 supported Regions and endpoints, see Regions and Endpoints in the Amazon Web Services General Reference.

    + *
    + *
  • + *
+ *

For example, the following x-amz-grant-read header grants the Amazon Web Services accounts identified by account IDs permissions to read object data and its metadata:

+ *

+ * x-amz-grant-read: id="11112222333", id="444455556666" + *

+ * + *
    + *
  • + *

    This functionality is not supported for directory buckets.

    + *
  • + *
  • + *

    This functionality is not supported for Amazon S3 on Outposts.

    + *
  • + *
+ *
+ * @public + */ + GrantReadACP?: string | undefined; + /** + *

Specify access permissions explicitly to allows grantee to allow grantee to write the + * ACL for the applicable object.

+ *

By default, all objects are private. Only the owner has full access control. When + * uploading an object, you can use this header to explicitly grant access permissions to + * specific Amazon Web Services accounts or groups. This header maps to specific permissions that Amazon S3 + * supports in an ACL. For more information, see Access Control List (ACL) Overview + * in the Amazon S3 User Guide.

+ *

You specify each grantee as a type=value pair, where the type is one of the + * following:

+ *
    + *
  • + *

    + * id – if the value specified is the canonical user ID of an + * Amazon Web Services account

    + *
  • + *
  • + *

    + * uri – if you are granting permissions to a predefined group

    + *
  • + *
  • + *

    + * emailAddress – if the value specified is the email address of an + * Amazon Web Services account

    + * + *

    Using email addresses to specify a grantee is only supported in the following Amazon Web Services Regions:

    + *
      + *
    • + *

      US East (N. Virginia)

      + *
    • + *
    • + *

      US West (N. California)

      + *
    • + *
    • + *

      US West (Oregon)

      + *
    • + *
    • + *

      Asia Pacific (Singapore)

      + *
    • + *
    • + *

      Asia Pacific (Sydney)

      + *
    • + *
    • + *

      Asia Pacific (Tokyo)

      + *
    • + *
    • + *

      Europe (Ireland)

      + *
    • + *
    • + *

      South America (São Paulo)

      + *
    • + *
    + *

    For a list of all the Amazon S3 supported Regions and endpoints, see Regions and Endpoints in the Amazon Web Services General Reference.

    + *
    + *
  • + *
+ *

For example, the following x-amz-grant-read header grants the Amazon Web Services accounts identified by account IDs permissions to read object data and its metadata:

+ *

+ * x-amz-grant-read: id="11112222333", id="444455556666" + *

+ * + *
    + *
  • + *

    This functionality is not supported for directory buckets.

    + *
  • + *
  • + *

    This functionality is not supported for Amazon S3 on Outposts.

    + *
  • + *
+ *
+ * @public + */ + GrantWriteACP?: string | undefined; + /** + *

Object key for which the multipart upload is to be initiated.

+ * @public + */ + Key: string | undefined; + /** + *

A map of metadata to store with the object in S3.

+ * @public + */ + Metadata?: Record | undefined; + /** + *

The server-side encryption algorithm used when you store this object in Amazon S3 (for + * example, AES256, aws:kms).

+ *
    + *
  • + *

    + * Directory buckets - + * For directory buckets, there are only two supported options for server-side encryption: server-side encryption with Amazon S3 managed keys (SSE-S3) (AES256) and server-side encryption with KMS keys (SSE-KMS) (aws:kms). We recommend that the bucket's default encryption uses the desired encryption configuration and you don't override the bucket default encryption in your + * CreateSession requests or PUT object requests. Then, new objects + * are automatically encrypted with the desired encryption settings. For more + * information, see Protecting data with server-side encryption in the Amazon S3 User Guide. For more information about the encryption overriding behaviors in directory buckets, see Specifying server-side encryption with KMS for new object uploads.

    + *

    In the Zonal endpoint API calls (except CopyObject and UploadPartCopy) using the REST API, the encryption request headers must match the encryption settings that are specified in the CreateSession request. + * You can't override the values of the encryption settings (x-amz-server-side-encryption, x-amz-server-side-encryption-aws-kms-key-id, x-amz-server-side-encryption-context, and x-amz-server-side-encryption-bucket-key-enabled) that are specified in the CreateSession request. + * You don't need to explicitly specify these encryption settings values in Zonal endpoint API calls, and + * Amazon S3 will use the encryption settings values from the CreateSession request to protect new objects in the directory bucket. + *

    + * + *

    When you use the CLI or the Amazon Web Services SDKs, for CreateSession, the session token refreshes automatically to avoid service interruptions when a session expires. The CLI or the Amazon Web Services SDKs use the bucket's default encryption configuration for the + * CreateSession request. It's not supported to override the encryption settings values in the CreateSession request. + * So in the Zonal endpoint API calls (except CopyObject and UploadPartCopy), + * the encryption request headers must match the default encryption configuration of the directory bucket. + * + *

    + *
    + *
  • + *
+ * @public + */ + ServerSideEncryption?: ServerSideEncryption | undefined; + /** + *

By default, Amazon S3 uses the STANDARD Storage Class to store newly created objects. The + * STANDARD storage class provides high durability and high availability. Depending on + * performance needs, you can specify a different Storage Class. For more information, see + * Storage + * Classes in the Amazon S3 User Guide.

+ * + *
    + *
  • + *

    Directory buckets only support EXPRESS_ONEZONE (the S3 Express One Zone storage class) in Availability Zones and ONEZONE_IA (the S3 One Zone-Infrequent Access storage class) in Dedicated Local Zones.

    + *
  • + *
  • + *

    Amazon S3 on Outposts only uses the OUTPOSTS Storage Class.

    + *
  • + *
+ *
+ * @public + */ + StorageClass?: StorageClass | undefined; + /** + *

If the bucket is configured as a website, redirects requests for this object to another + * object in the same bucket or to an external URL. Amazon S3 stores the value of this header in + * the object metadata.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + WebsiteRedirectLocation?: string | undefined; + /** + *

Specifies the algorithm to use when encrypting the object (for example, AES256).

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerAlgorithm?: string | undefined; + /** + *

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This + * value is used to store the object and then it is discarded; Amazon S3 does not store the + * encryption key. The key must be appropriate for use with the algorithm specified in the + * x-amz-server-side-encryption-customer-algorithm header.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKey?: string | undefined; + /** + *

Specifies the 128-bit MD5 digest of the customer-provided encryption key according to + * RFC 1321. Amazon S3 uses this header for a message integrity check to ensure that the encryption + * key was transmitted without error.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKeyMD5?: string | undefined; + /** + *

Specifies the KMS key ID (Key ID, Key ARN, or Key Alias) to use for object encryption. If the KMS key doesn't exist in the same + * account that's issuing the command, you must use the full Key ARN not the Key ID.

+ *

+ * General purpose buckets - If you specify x-amz-server-side-encryption with aws:kms or aws:kms:dsse, this header specifies the ID (Key ID, Key ARN, or Key Alias) of the KMS + * key to use. If you specify + * x-amz-server-side-encryption:aws:kms or + * x-amz-server-side-encryption:aws:kms:dsse, but do not provide x-amz-server-side-encryption-aws-kms-key-id, Amazon S3 uses the Amazon Web Services managed key + * (aws/s3) to protect the data.

+ *

+ * Directory buckets - To encrypt data using SSE-KMS, it's recommended to specify the + * x-amz-server-side-encryption header to aws:kms. Then, the x-amz-server-side-encryption-aws-kms-key-id header implicitly uses + * the bucket's default KMS customer managed key ID. If you want to explicitly set the + * x-amz-server-side-encryption-aws-kms-key-id header, it must match the bucket's default customer managed key (using key ID or ARN, not alias). Your SSE-KMS configuration can only support 1 customer managed key per directory bucket's lifetime. + * The Amazon Web Services managed key (aws/s3) isn't supported. + * + * Incorrect key specification results in an HTTP 400 Bad Request error.

+ * @public + */ + SSEKMSKeyId?: string | undefined; + /** + *

Specifies the Amazon Web Services KMS Encryption Context to use for object encryption. The value of + * this header is a Base64 encoded string of a UTF-8 encoded JSON, which contains the encryption context as key-value pairs.

+ *

+ * Directory buckets - You can optionally provide an explicit encryption context value. The value must match the default encryption context - the bucket Amazon Resource Name (ARN). An additional encryption context value is not supported.

+ * @public + */ + SSEKMSEncryptionContext?: string | undefined; + /** + *

Specifies whether Amazon S3 should use an S3 Bucket Key for object encryption with + * server-side encryption using Key Management Service (KMS) keys (SSE-KMS).

+ *

+ * General purpose buckets - Setting this header to + * true causes Amazon S3 to use an S3 Bucket Key for object encryption with + * SSE-KMS. Also, specifying this header with a PUT action doesn't affect bucket-level settings for S3 + * Bucket Key.

+ *

+ * Directory buckets - S3 Bucket Keys are always enabled for GET and PUT operations in a directory bucket and can’t be disabled. S3 Bucket Keys aren't supported, when you copy SSE-KMS encrypted objects from general purpose buckets + * to directory buckets, from directory buckets to general purpose buckets, or between directory buckets, through CopyObject, UploadPartCopy, the Copy operation in Batch Operations, or + * the import jobs. In this case, Amazon S3 makes a call to KMS every time a copy request is made for a KMS-encrypted object.

+ * @public + */ + BucketKeyEnabled?: boolean | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

The tag-set for the object. The tag-set must be encoded as URL Query parameters.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + Tagging?: string | undefined; + /** + *

Specifies the Object Lock mode that you want to apply to the uploaded object.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ObjectLockMode?: ObjectLockMode | undefined; + /** + *

Specifies the date and time when you want the Object Lock to expire.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ObjectLockRetainUntilDate?: Date | undefined; + /** + *

Specifies whether you want to apply a legal hold to the uploaded object.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ObjectLockLegalHoldStatus?: ObjectLockLegalHoldStatus | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

Indicates the algorithm that you want Amazon S3 to use to create the checksum for the object. For more information, see + * Checking object integrity in + * the Amazon S3 User Guide.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

Indicates the checksum type that you want Amazon S3 to use to calculate the object’s + * checksum value. For more information, see Checking object integrity + * in the Amazon S3 User Guide.

+ * @public + */ + ChecksumType?: ChecksumType | undefined; +} +/** + *

The established temporary security credentials of the session.

+ * + *

+ * Directory buckets - These session + * credentials are only supported for the authentication and authorization of Zonal endpoint API operations + * on directory buckets.

+ *
+ * @public + */ +export interface SessionCredentials { + /** + *

A unique identifier that's associated with a secret access key. The access key ID and + * the secret access key are used together to sign programmatic Amazon Web Services requests + * cryptographically.

+ * @public + */ + AccessKeyId: string | undefined; + /** + *

A key that's used with the access key ID to cryptographically sign programmatic Amazon Web Services + * requests. Signing a request identifies the sender and prevents the request from being + * altered.

+ * @public + */ + SecretAccessKey: string | undefined; + /** + *

A part of the temporary security credentials. The session token is used to validate the + * temporary security credentials. + * + *

+ * @public + */ + SessionToken: string | undefined; + /** + *

Temporary security credentials expire after a specified interval. After temporary + * credentials expire, any calls that you make with those credentials will fail. So you must + * generate a new set of temporary credentials. Temporary credentials cannot be extended or + * refreshed beyond the original specified interval.

+ * @public + */ + Expiration: Date | undefined; +} +/** + * @public + */ +export interface CreateSessionOutput { + /** + *

The server-side encryption algorithm used when you store objects in the directory bucket.

+ * @public + */ + ServerSideEncryption?: ServerSideEncryption | undefined; + /** + *

If you specify x-amz-server-side-encryption with aws:kms, this header indicates the ID of the KMS + * symmetric encryption customer managed key that was used for object encryption.

+ * @public + */ + SSEKMSKeyId?: string | undefined; + /** + *

If present, indicates the Amazon Web Services KMS Encryption Context to use for object encryption. The value of + * this header is a Base64 encoded string of a UTF-8 encoded JSON, which contains the encryption context as key-value pairs. + * This value is stored as object metadata and automatically gets + * passed on to Amazon Web Services KMS for future GetObject + * operations on this object.

+ * @public + */ + SSEKMSEncryptionContext?: string | undefined; + /** + *

Indicates whether to use an S3 Bucket Key for server-side encryption + * with KMS keys (SSE-KMS).

+ * @public + */ + BucketKeyEnabled?: boolean | undefined; + /** + *

The established temporary security credentials for the created session.

+ * @public + */ + Credentials: SessionCredentials | undefined; +} +/** + * @public + * @enum + */ +export declare const SessionMode: { + readonly ReadOnly: "ReadOnly"; + readonly ReadWrite: "ReadWrite"; +}; +/** + * @public + */ +export type SessionMode = (typeof SessionMode)[keyof typeof SessionMode]; +/** + * @public + */ +export interface CreateSessionRequest { + /** + *

Specifies the mode of the session that will be created, either ReadWrite or + * ReadOnly. By default, a ReadWrite session is created. A + * ReadWrite session is capable of executing all the Zonal endpoint API operations on a + * directory bucket. A ReadOnly session is constrained to execute the following + * Zonal endpoint API operations: GetObject, HeadObject, ListObjectsV2, + * GetObjectAttributes, ListParts, and + * ListMultipartUploads.

+ * @public + */ + SessionMode?: SessionMode | undefined; + /** + *

The name of the bucket that you create a session for.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The server-side encryption algorithm to use when you store objects in the directory bucket.

+ *

For directory buckets, there are only two supported options for server-side encryption: server-side encryption with Amazon S3 managed keys (SSE-S3) (AES256) and server-side encryption with KMS keys (SSE-KMS) (aws:kms). By default, Amazon S3 encrypts data with SSE-S3. + * For more + * information, see Protecting data with server-side encryption in the Amazon S3 User Guide.

+ * @public + */ + ServerSideEncryption?: ServerSideEncryption | undefined; + /** + *

If you specify x-amz-server-side-encryption with aws:kms, you must specify the + * x-amz-server-side-encryption-aws-kms-key-id header with the ID (Key ID or Key ARN) of the KMS + * symmetric encryption customer managed key to use. Otherwise, you get an HTTP 400 Bad Request error. Only use the key ID or key ARN. The key alias format of the KMS key isn't supported. Also, if the KMS key doesn't exist in the same + * account that't issuing the command, you must use the full Key ARN not the Key ID.

+ *

Your SSE-KMS configuration can only support 1 customer managed key per directory bucket's lifetime. + * The Amazon Web Services managed key (aws/s3) isn't supported. + *

+ * @public + */ + SSEKMSKeyId?: string | undefined; + /** + *

Specifies the Amazon Web Services KMS Encryption Context as an additional encryption context to use for object encryption. The value of + * this header is a Base64 encoded string of a UTF-8 encoded JSON, which contains the encryption context as key-value pairs. + * This value is stored as object metadata and automatically gets passed on + * to Amazon Web Services KMS for future GetObject operations on + * this object.

+ *

+ * General purpose buckets - This value must be explicitly added during CopyObject operations if you want an additional encryption context for your object. For more information, see Encryption context in the Amazon S3 User Guide.

+ *

+ * Directory buckets - You can optionally provide an explicit encryption context value. The value must match the default encryption context - the bucket Amazon Resource Name (ARN). An additional encryption context value is not supported.

+ * @public + */ + SSEKMSEncryptionContext?: string | undefined; + /** + *

Specifies whether Amazon S3 should use an S3 Bucket Key for object encryption with + * server-side encryption using KMS keys (SSE-KMS).

+ *

S3 Bucket Keys are always enabled for GET and PUT operations in a directory bucket and can’t be disabled. S3 Bucket Keys aren't supported, when you copy SSE-KMS encrypted objects from general purpose buckets + * to directory buckets, from directory buckets to general purpose buckets, or between directory buckets, through CopyObject, UploadPartCopy, the Copy operation in Batch Operations, or + * the import jobs. In this case, Amazon S3 makes a call to KMS every time a copy request is made for a KMS-encrypted object.

+ * @public + */ + BucketKeyEnabled?: boolean | undefined; +} +/** + *

The specified bucket does not exist.

+ * @public + */ +export declare class NoSuchBucket extends __BaseException { + readonly name: "NoSuchBucket"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface DeleteBucketRequest { + /** + *

Specifies the bucket being deleted.

+ *

+ * Directory buckets - When you use this operation with a directory bucket, you must use path-style requests in the format https://s3express-control.region-code.amazonaws.com/bucket-name + * . Virtual-hosted-style requests aren't supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must also follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * DOC-EXAMPLE-BUCKET--usw2-az1--x-s3). For information about bucket naming restrictions, see Directory bucket naming rules in the Amazon S3 User Guide + *

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * + *

For directory buckets, this header is not supported in this API operation. If you specify this header, the request fails with the HTTP status code + * 501 Not Implemented.

+ *
+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface DeleteBucketAnalyticsConfigurationRequest { + /** + *

The name of the bucket from which an analytics configuration is deleted.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The ID that identifies the analytics configuration.

+ * @public + */ + Id: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface DeleteBucketCorsRequest { + /** + *

Specifies the bucket whose cors configuration is being deleted.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface DeleteBucketEncryptionRequest { + /** + *

The name of the bucket containing the server-side encryption configuration to + * delete.

+ *

+ * Directory buckets - When you use this operation with a directory bucket, you must use path-style requests in the format https://s3express-control.region-code.amazonaws.com/bucket-name + * . Virtual-hosted-style requests aren't supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must also follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * DOC-EXAMPLE-BUCKET--usw2-az1--x-s3). For information about bucket naming restrictions, see Directory bucket naming rules in the Amazon S3 User Guide + *

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * + *

For directory buckets, this header is not supported in this API operation. If you specify this header, the request fails with the HTTP status code + * 501 Not Implemented.

+ *
+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface DeleteBucketIntelligentTieringConfigurationRequest { + /** + *

The name of the Amazon S3 bucket whose configuration you want to modify or retrieve.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The ID used to identify the S3 Intelligent-Tiering configuration.

+ * @public + */ + Id: string | undefined; +} +/** + * @public + */ +export interface DeleteBucketInventoryConfigurationRequest { + /** + *

The name of the bucket containing the inventory configuration to delete.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The ID used to identify the inventory configuration.

+ * @public + */ + Id: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface DeleteBucketLifecycleRequest { + /** + *

The bucket name of the lifecycle to delete.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * + *

This parameter applies to general purpose buckets only. It is not supported for + * directory bucket lifecycle configurations.

+ *
+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface DeleteBucketMetadataTableConfigurationRequest { + /** + *

+ * The general purpose bucket that you want to remove the metadata table configuration from. + *

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

+ * The expected bucket owner of the general purpose bucket that you want to remove the + * metadata table configuration from. + *

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface DeleteBucketMetricsConfigurationRequest { + /** + *

The name of the bucket containing the metrics configuration to delete.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The ID used to identify the metrics configuration. The ID has a 64 character limit and + * can only contain letters, numbers, periods, dashes, and underscores.

+ * @public + */ + Id: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface DeleteBucketOwnershipControlsRequest { + /** + *

The Amazon S3 bucket whose OwnershipControls you want to delete.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface DeleteBucketPolicyRequest { + /** + *

The bucket name.

+ *

+ * Directory buckets - When you use this operation with a directory bucket, you must use path-style requests in the format https://s3express-control.region-code.amazonaws.com/bucket-name + * . Virtual-hosted-style requests aren't supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must also follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * DOC-EXAMPLE-BUCKET--usw2-az1--x-s3). For information about bucket naming restrictions, see Directory bucket naming rules in the Amazon S3 User Guide + *

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * + *

For directory buckets, this header is not supported in this API operation. If you specify this header, the request fails with the HTTP status code + * 501 Not Implemented.

+ *
+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface DeleteBucketReplicationRequest { + /** + *

The bucket name.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface DeleteBucketTaggingRequest { + /** + *

The bucket that has the tag set to be removed.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface DeleteBucketWebsiteRequest { + /** + *

The bucket name for which you want to remove the website configuration.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface DeleteObjectOutput { + /** + *

Indicates whether the specified object version that was permanently deleted was (true) + * or was not (false) a delete marker before deletion. In a simple DELETE, this header + * indicates whether (true) or not (false) the current version of the object is a delete + * marker. To learn more about delete markers, see Working with delete markers.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + DeleteMarker?: boolean | undefined; + /** + *

Returns the version ID of the delete marker created as a result of the DELETE + * operation.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + VersionId?: string | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; +} +/** + * @public + */ +export interface DeleteObjectRequest { + /** + *

The bucket name of the bucket containing the object.

+ *

+ * Directory buckets - + * When you use this operation with a directory bucket, you must use virtual-hosted-style requests in the format + * Bucket-name.s3express-zone-id.region-code.amazonaws.com. Path-style requests are not supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * amzn-s3-demo-bucket--usw2-az1--x-s3). For information about bucket naming + * restrictions, see Directory bucket naming + * rules in the Amazon S3 User Guide.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ * + *

Object Lambda access points are not supported by directory buckets.

+ *
+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Key name of the object to delete.

+ * @public + */ + Key: string | undefined; + /** + *

The concatenation of the authentication device's serial number, a space, and the value + * that is displayed on your authentication device. Required to permanently delete a versioned + * object if versioning is configured with MFA delete enabled.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + MFA?: string | undefined; + /** + *

Version ID used to reference a specific version of the object.

+ * + *

For directory buckets in this API operation, only the null value of the version ID is supported.

+ *
+ * @public + */ + VersionId?: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

Indicates whether S3 Object Lock should bypass Governance-mode restrictions to process + * this operation. To use this header, you must have the + * s3:BypassGovernanceRetention permission.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + BypassGovernanceRetention?: boolean | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

The If-Match header field makes the request method conditional on ETags. If the ETag value does not match, the operation returns + * a 412 Precondition Failed error. If the ETag matches or if the object doesn't exist, the operation will return a 204 Success (No + * Content) response.

+ *

For more information about conditional requests, see RFC 7232.

+ * + *

This functionality is only supported for directory buckets.

+ *
+ * @public + */ + IfMatch?: string | undefined; + /** + *

If present, the object is deleted only if its modification times matches the provided + * Timestamp. If the Timestamp values do not match, the operation + * returns a 412 Precondition Failed error. If the Timestamp matches + * or if the object doesn’t exist, the operation returns a 204 Success (No + * Content) response.

+ * + *

This functionality is only supported for directory buckets.

+ *
+ * @public + */ + IfMatchLastModifiedTime?: Date | undefined; + /** + *

If present, the object is deleted only if its size matches the provided size in bytes. If the Size value does not match, the operation returns a 412 Precondition Failed error. If the Size matches or if the object doesn’t exist, + * the operation returns a 204 Success (No Content) response.

+ * + *

This functionality is only supported for directory buckets.

+ *
+ * + *

You can use the If-Match, x-amz-if-match-last-modified-time and x-amz-if-match-size + * conditional headers in conjunction with each-other or individually.

+ *
+ * @public + */ + IfMatchSize?: number | undefined; +} +/** + *

Information about the deleted object.

+ * @public + */ +export interface DeletedObject { + /** + *

The name of the deleted object.

+ * @public + */ + Key?: string | undefined; + /** + *

The version ID of the deleted object.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + VersionId?: string | undefined; + /** + *

Indicates whether the specified object version that was permanently deleted was (true) + * or was not (false) a delete marker before deletion. In a simple DELETE, this header + * indicates whether (true) or not (false) the current version of the object is a delete + * marker. To learn more about delete markers, see Working with delete markers.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + DeleteMarker?: boolean | undefined; + /** + *

The version ID of the delete marker created as a result of the DELETE operation. If you + * delete a specific object version, the value returned by this header is the version ID of + * the object version deleted.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + DeleteMarkerVersionId?: string | undefined; +} +/** + *

Container for all error elements.

+ * @public + */ +export interface _Error { + /** + *

The error key.

+ * @public + */ + Key?: string | undefined; + /** + *

The version ID of the error.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + VersionId?: string | undefined; + /** + *

The error code is a string that uniquely identifies an error condition. It is meant to + * be read and understood by programs that detect and handle errors by type. The following is + * a list of Amazon S3 error codes. For more information, see Error responses.

+ *
    + *
  • + *
      + *
    • + *

      + * Code: AccessDenied

      + *
    • + *
    • + *

      + * Description: Access Denied

      + *
    • + *
    • + *

      + * HTTP Status Code: 403 Forbidden

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: AccountProblem

      + *
    • + *
    • + *

      + * Description: There is a problem with your Amazon Web Services account + * that prevents the action from completing successfully. Contact Amazon Web Services Support + * for further assistance.

      + *
    • + *
    • + *

      + * HTTP Status Code: 403 Forbidden

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: AllAccessDisabled

      + *
    • + *
    • + *

      + * Description: All access to this Amazon S3 resource has been + * disabled. Contact Amazon Web Services Support for further assistance.

      + *
    • + *
    • + *

      + * HTTP Status Code: 403 Forbidden

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: AmbiguousGrantByEmailAddress

      + *
    • + *
    • + *

      + * Description: The email address you provided is + * associated with more than one account.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: AuthorizationHeaderMalformed

      + *
    • + *
    • + *

      + * Description: The authorization header you provided is + * invalid.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * HTTP Status Code: N/A

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: BadDigest

      + *
    • + *
    • + *

      + * Description: The Content-MD5 you specified did not + * match what we received.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: BucketAlreadyExists

      + *
    • + *
    • + *

      + * Description: The requested bucket name is not + * available. The bucket namespace is shared by all users of the system. Please + * select a different name and try again.

      + *
    • + *
    • + *

      + * HTTP Status Code: 409 Conflict

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: BucketAlreadyOwnedByYou

      + *
    • + *
    • + *

      + * Description: The bucket you tried to create already + * exists, and you own it. Amazon S3 returns this error in all Amazon Web Services Regions except in + * the North Virginia Region. For legacy compatibility, if you re-create an + * existing bucket that you already own in the North Virginia Region, Amazon S3 returns + * 200 OK and resets the bucket access control lists (ACLs).

      + *
    • + *
    • + *

      + * Code: 409 Conflict (in all Regions except the North + * Virginia Region)

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: BucketNotEmpty

      + *
    • + *
    • + *

      + * Description: The bucket you tried to delete is not + * empty.

      + *
    • + *
    • + *

      + * HTTP Status Code: 409 Conflict

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: CredentialsNotSupported

      + *
    • + *
    • + *

      + * Description: This request does not support + * credentials.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: CrossLocationLoggingProhibited

      + *
    • + *
    • + *

      + * Description: Cross-location logging not allowed. + * Buckets in one geographic location cannot log information to a bucket in + * another location.

      + *
    • + *
    • + *

      + * HTTP Status Code: 403 Forbidden

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: EntityTooSmall

      + *
    • + *
    • + *

      + * Description: Your proposed upload is smaller than the + * minimum allowed object size.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: EntityTooLarge

      + *
    • + *
    • + *

      + * Description: Your proposed upload exceeds the maximum + * allowed object size.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: ExpiredToken

      + *
    • + *
    • + *

      + * Description: The provided token has expired.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: IllegalVersioningConfigurationException

      + *
    • + *
    • + *

      + * Description: Indicates that the versioning + * configuration specified in the request is invalid.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: IncompleteBody

      + *
    • + *
    • + *

      + * Description: You did not provide the number of bytes + * specified by the Content-Length HTTP header

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: IncorrectNumberOfFilesInPostRequest

      + *
    • + *
    • + *

      + * Description: POST requires exactly one file upload per + * request.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InlineDataTooLarge

      + *
    • + *
    • + *

      + * Description: Inline data exceeds the maximum allowed + * size.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InternalError

      + *
    • + *
    • + *

      + * Description: We encountered an internal error. Please + * try again.

      + *
    • + *
    • + *

      + * HTTP Status Code: 500 Internal Server Error

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Server

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidAccessKeyId

      + *
    • + *
    • + *

      + * Description: The Amazon Web Services access key ID you provided does + * not exist in our records.

      + *
    • + *
    • + *

      + * HTTP Status Code: 403 Forbidden

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidAddressingHeader

      + *
    • + *
    • + *

      + * Description: You must specify the Anonymous + * role.

      + *
    • + *
    • + *

      + * HTTP Status Code: N/A

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidArgument

      + *
    • + *
    • + *

      + * Description: Invalid Argument

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidBucketName

      + *
    • + *
    • + *

      + * Description: The specified bucket is not valid.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidBucketState

      + *
    • + *
    • + *

      + * Description: The request is not valid with the current + * state of the bucket.

      + *
    • + *
    • + *

      + * HTTP Status Code: 409 Conflict

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidDigest

      + *
    • + *
    • + *

      + * Description: The Content-MD5 you specified is not + * valid.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidEncryptionAlgorithmError

      + *
    • + *
    • + *

      + * Description: The encryption request you specified is + * not valid. The valid value is AES256.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidLocationConstraint

      + *
    • + *
    • + *

      + * Description: The specified location constraint is not + * valid. For more information about Regions, see How to Select + * a Region for Your Buckets.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidObjectState

      + *
    • + *
    • + *

      + * Description: The action is not valid for the current + * state of the object.

      + *
    • + *
    • + *

      + * HTTP Status Code: 403 Forbidden

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidPart

      + *
    • + *
    • + *

      + * Description: One or more of the specified parts could + * not be found. The part might not have been uploaded, or the specified entity + * tag might not have matched the part's entity tag.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidPartOrder

      + *
    • + *
    • + *

      + * Description: The list of parts was not in ascending + * order. Parts list must be specified in order by part number.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidPayer

      + *
    • + *
    • + *

      + * Description: All access to this object has been + * disabled. Please contact Amazon Web Services Support for further assistance.

      + *
    • + *
    • + *

      + * HTTP Status Code: 403 Forbidden

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidPolicyDocument

      + *
    • + *
    • + *

      + * Description: The content of the form does not meet the + * conditions specified in the policy document.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidRange

      + *
    • + *
    • + *

      + * Description: The requested range cannot be + * satisfied.

      + *
    • + *
    • + *

      + * HTTP Status Code: 416 Requested Range Not + * Satisfiable

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidRequest

      + *
    • + *
    • + *

      + * Description: Please use + * AWS4-HMAC-SHA256.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * Code: N/A

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidRequest

      + *
    • + *
    • + *

      + * Description: SOAP requests must be made over an HTTPS + * connection.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidRequest

      + *
    • + *
    • + *

      + * Description: Amazon S3 Transfer Acceleration is not + * supported for buckets with non-DNS compliant names.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * Code: N/A

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidRequest

      + *
    • + *
    • + *

      + * Description: Amazon S3 Transfer Acceleration is not + * supported for buckets with periods (.) in their names.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * Code: N/A

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidRequest

      + *
    • + *
    • + *

      + * Description: Amazon S3 Transfer Accelerate endpoint only + * supports virtual style requests.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * Code: N/A

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidRequest

      + *
    • + *
    • + *

      + * Description: Amazon S3 Transfer Accelerate is not configured + * on this bucket.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * Code: N/A

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidRequest

      + *
    • + *
    • + *

      + * Description: Amazon S3 Transfer Accelerate is disabled on + * this bucket.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * Code: N/A

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidRequest

      + *
    • + *
    • + *

      + * Description: Amazon S3 Transfer Acceleration is not + * supported on this bucket. Contact Amazon Web Services Support for more information.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * Code: N/A

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidRequest

      + *
    • + *
    • + *

      + * Description: Amazon S3 Transfer Acceleration cannot be + * enabled on this bucket. Contact Amazon Web Services Support for more information.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * Code: N/A

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidSecurity

      + *
    • + *
    • + *

      + * Description: The provided security credentials are not + * valid.

      + *
    • + *
    • + *

      + * HTTP Status Code: 403 Forbidden

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidSOAPRequest

      + *
    • + *
    • + *

      + * Description: The SOAP request body is invalid.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidStorageClass

      + *
    • + *
    • + *

      + * Description: The storage class you specified is not + * valid.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidTargetBucketForLogging

      + *
    • + *
    • + *

      + * Description: The target bucket for logging does not + * exist, is not owned by you, or does not have the appropriate grants for the + * log-delivery group.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidToken

      + *
    • + *
    • + *

      + * Description: The provided token is malformed or + * otherwise invalid.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: InvalidURI

      + *
    • + *
    • + *

      + * Description: Couldn't parse the specified URI.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: KeyTooLongError

      + *
    • + *
    • + *

      + * Description: Your key is too long.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: MalformedACLError

      + *
    • + *
    • + *

      + * Description: The XML you provided was not well-formed + * or did not validate against our published schema.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: MalformedPOSTRequest

      + *
    • + *
    • + *

      + * Description: The body of your POST request is not + * well-formed multipart/form-data.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: MalformedXML

      + *
    • + *
    • + *

      + * Description: This happens when the user sends malformed + * XML (XML that doesn't conform to the published XSD) for the configuration. The + * error message is, "The XML you provided was not well-formed or did not validate + * against our published schema."

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: MaxMessageLengthExceeded

      + *
    • + *
    • + *

      + * Description: Your request was too big.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: MaxPostPreDataLengthExceededError

      + *
    • + *
    • + *

      + * Description: Your POST request fields preceding the + * upload file were too large.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: MetadataTooLarge

      + *
    • + *
    • + *

      + * Description: Your metadata headers exceed the maximum + * allowed metadata size.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: MethodNotAllowed

      + *
    • + *
    • + *

      + * Description: The specified method is not allowed + * against this resource.

      + *
    • + *
    • + *

      + * HTTP Status Code: 405 Method Not Allowed

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: MissingAttachment

      + *
    • + *
    • + *

      + * Description: A SOAP attachment was expected, but none + * were found.

      + *
    • + *
    • + *

      + * HTTP Status Code: N/A

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: MissingContentLength

      + *
    • + *
    • + *

      + * Description: You must provide the Content-Length HTTP + * header.

      + *
    • + *
    • + *

      + * HTTP Status Code: 411 Length Required

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: MissingRequestBodyError

      + *
    • + *
    • + *

      + * Description: This happens when the user sends an empty + * XML document as a request. The error message is, "Request body is empty." + *

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: MissingSecurityElement

      + *
    • + *
    • + *

      + * Description: The SOAP 1.1 request is missing a security + * element.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: MissingSecurityHeader

      + *
    • + *
    • + *

      + * Description: Your request is missing a required + * header.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: NoLoggingStatusForKey

      + *
    • + *
    • + *

      + * Description: There is no such thing as a logging status + * subresource for a key.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: NoSuchBucket

      + *
    • + *
    • + *

      + * Description: The specified bucket does not + * exist.

      + *
    • + *
    • + *

      + * HTTP Status Code: 404 Not Found

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: NoSuchBucketPolicy

      + *
    • + *
    • + *

      + * Description: The specified bucket does not have a + * bucket policy.

      + *
    • + *
    • + *

      + * HTTP Status Code: 404 Not Found

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: NoSuchKey

      + *
    • + *
    • + *

      + * Description: The specified key does not exist.

      + *
    • + *
    • + *

      + * HTTP Status Code: 404 Not Found

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: NoSuchLifecycleConfiguration

      + *
    • + *
    • + *

      + * Description: The lifecycle configuration does not + * exist.

      + *
    • + *
    • + *

      + * HTTP Status Code: 404 Not Found

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: NoSuchUpload

      + *
    • + *
    • + *

      + * Description: The specified multipart upload does not + * exist. The upload ID might be invalid, or the multipart upload might have been + * aborted or completed.

      + *
    • + *
    • + *

      + * HTTP Status Code: 404 Not Found

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: NoSuchVersion

      + *
    • + *
    • + *

      + * Description: Indicates that the version ID specified in + * the request does not match an existing version.

      + *
    • + *
    • + *

      + * HTTP Status Code: 404 Not Found

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: NotImplemented

      + *
    • + *
    • + *

      + * Description: A header you provided implies + * functionality that is not implemented.

      + *
    • + *
    • + *

      + * HTTP Status Code: 501 Not Implemented

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Server

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: NotSignedUp

      + *
    • + *
    • + *

      + * Description: Your account is not signed up for the Amazon S3 + * service. You must sign up before you can use Amazon S3. You can sign up at the + * following URL: Amazon S3 + *

      + *
    • + *
    • + *

      + * HTTP Status Code: 403 Forbidden

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: OperationAborted

      + *
    • + *
    • + *

      + * Description: A conflicting conditional action is + * currently in progress against this resource. Try again.

      + *
    • + *
    • + *

      + * HTTP Status Code: 409 Conflict

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: PermanentRedirect

      + *
    • + *
    • + *

      + * Description: The bucket you are attempting to access + * must be addressed using the specified endpoint. Send all future requests to + * this endpoint.

      + *
    • + *
    • + *

      + * HTTP Status Code: 301 Moved Permanently

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: PreconditionFailed

      + *
    • + *
    • + *

      + * Description: At least one of the preconditions you + * specified did not hold.

      + *
    • + *
    • + *

      + * HTTP Status Code: 412 Precondition Failed

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: Redirect

      + *
    • + *
    • + *

      + * Description: Temporary redirect.

      + *
    • + *
    • + *

      + * HTTP Status Code: 307 Moved Temporarily

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: RestoreAlreadyInProgress

      + *
    • + *
    • + *

      + * Description: Object restore is already in + * progress.

      + *
    • + *
    • + *

      + * HTTP Status Code: 409 Conflict

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: RequestIsNotMultiPartContent

      + *
    • + *
    • + *

      + * Description: Bucket POST must be of the enclosure-type + * multipart/form-data.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: RequestTimeout

      + *
    • + *
    • + *

      + * Description: Your socket connection to the server was + * not read from or written to within the timeout period.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: RequestTimeTooSkewed

      + *
    • + *
    • + *

      + * Description: The difference between the request time + * and the server's time is too large.

      + *
    • + *
    • + *

      + * HTTP Status Code: 403 Forbidden

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: RequestTorrentOfBucketError

      + *
    • + *
    • + *

      + * Description: Requesting the torrent file of a bucket is + * not permitted.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: SignatureDoesNotMatch

      + *
    • + *
    • + *

      + * Description: The request signature we calculated does + * not match the signature you provided. Check your Amazon Web Services secret access key and + * signing method. For more information, see REST + * Authentication and SOAP + * Authentication for details.

      + *
    • + *
    • + *

      + * HTTP Status Code: 403 Forbidden

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: ServiceUnavailable

      + *
    • + *
    • + *

      + * Description: Service is unable to handle + * request.

      + *
    • + *
    • + *

      + * HTTP Status Code: 503 Service Unavailable

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Server

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: SlowDown

      + *
    • + *
    • + *

      + * Description: Reduce your request rate.

      + *
    • + *
    • + *

      + * HTTP Status Code: 503 Slow Down

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Server

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: TemporaryRedirect

      + *
    • + *
    • + *

      + * Description: You are being redirected to the bucket + * while DNS updates.

      + *
    • + *
    • + *

      + * HTTP Status Code: 307 Moved Temporarily

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: TokenRefreshRequired

      + *
    • + *
    • + *

      + * Description: The provided token must be + * refreshed.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: TooManyBuckets

      + *
    • + *
    • + *

      + * Description: You have attempted to create more buckets + * than allowed.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: UnexpectedContent

      + *
    • + *
    • + *

      + * Description: This request does not support + * content.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: UnresolvableGrantByEmailAddress

      + *
    • + *
    • + *

      + * Description: The email address you provided does not + * match any account on record.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
  • + *
      + *
    • + *

      + * Code: UserKeyMustBeSpecified

      + *
    • + *
    • + *

      + * Description: The bucket POST must contain the specified + * field name. If it is specified, check the order of the fields.

      + *
    • + *
    • + *

      + * HTTP Status Code: 400 Bad Request

      + *
    • + *
    • + *

      + * SOAP Fault Code Prefix: Client

      + *
    • + *
    + *
  • + *
+ *

+ * @public + */ + Code?: string | undefined; + /** + *

The error message contains a generic description of the error condition in English. It + * is intended for a human audience. Simple programs display the message directly to the end + * user if they encounter an error condition they don't know how or don't care to handle. + * Sophisticated programs with more exhaustive error handling and proper internationalization + * are more likely to ignore the error message.

+ * @public + */ + Message?: string | undefined; +} +/** + * @public + */ +export interface DeleteObjectsOutput { + /** + *

Container element for a successful delete. It identifies the object that was + * successfully deleted.

+ * @public + */ + Deleted?: DeletedObject[] | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; + /** + *

Container for a failed delete action that describes the object that Amazon S3 attempted to + * delete and the error it encountered.

+ * @public + */ + Errors?: _Error[] | undefined; +} +/** + *

Object Identifier is unique value to identify objects.

+ * @public + */ +export interface ObjectIdentifier { + /** + *

Key name of the object.

+ * + *

Replacement must be made for object keys containing special characters (such as carriage returns) when using + * XML requests. For more information, see + * XML related object key constraints.

+ *
+ * @public + */ + Key: string | undefined; + /** + *

Version ID for the specific version of the object to delete.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + VersionId?: string | undefined; + /** + *

An entity tag (ETag) is an identifier assigned by a web server to a specific version of a resource found at a URL. + * This header field makes the request method conditional on ETags.

+ * + *

Entity tags (ETags) for S3 Express One Zone are random alphanumeric strings unique to the object.

+ *
+ * @public + */ + ETag?: string | undefined; + /** + *

If present, the objects are deleted only if its modification times matches the provided Timestamp. + *

+ * + *

This functionality is only supported for directory buckets.

+ *
+ * @public + */ + LastModifiedTime?: Date | undefined; + /** + *

If present, the objects are deleted only if its size matches the provided size in bytes.

+ * + *

This functionality is only supported for directory buckets.

+ *
+ * @public + */ + Size?: number | undefined; +} +/** + *

Container for the objects to delete.

+ * @public + */ +export interface Delete { + /** + *

The object to delete.

+ * + *

+ * Directory buckets - For directory buckets, + * an object that's composed entirely of whitespace characters is not supported by the + * DeleteObjects API operation. The request will receive a 400 Bad + * Request error and none of the objects in the request will be deleted.

+ *
+ * @public + */ + Objects: ObjectIdentifier[] | undefined; + /** + *

Element to enable quiet mode for the request. When you add this element, you must set + * its value to true.

+ * @public + */ + Quiet?: boolean | undefined; +} +/** + * @public + */ +export interface DeleteObjectsRequest { + /** + *

The bucket name containing the objects to delete.

+ *

+ * Directory buckets - + * When you use this operation with a directory bucket, you must use virtual-hosted-style requests in the format + * Bucket-name.s3express-zone-id.region-code.amazonaws.com. Path-style requests are not supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * amzn-s3-demo-bucket--usw2-az1--x-s3). For information about bucket naming + * restrictions, see Directory bucket naming + * rules in the Amazon S3 User Guide.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ * + *

Object Lambda access points are not supported by directory buckets.

+ *
+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Container for the request.

+ * @public + */ + Delete: Delete | undefined; + /** + *

The concatenation of the authentication device's serial number, a space, and the value + * that is displayed on your authentication device. Required to permanently delete a versioned + * object if versioning is configured with MFA delete enabled.

+ *

When performing the DeleteObjects operation on an MFA delete enabled + * bucket, which attempts to delete the specified versioned objects, you must include an MFA + * token. If you don't provide an MFA token, the entire request will fail, even if there are + * non-versioned objects that you are trying to delete. If you provide an invalid token, + * whether there are versioned object keys in the request or not, the entire Multi-Object + * Delete request will fail. For information about MFA Delete, see MFA + * Delete in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + MFA?: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

Specifies whether you want to delete this object even if it has a Governance-type Object + * Lock in place. To use this header, you must have the + * s3:BypassGovernanceRetention permission.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + BypassGovernanceRetention?: boolean | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the object when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum-algorithm + * or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request.

+ *

For the x-amz-checksum-algorithm + * header, replace + * algorithm + * with the supported algorithm from the following list:

+ *
    + *
  • + *

    + * CRC32 + *

    + *
  • + *
  • + *

    + * CRC32C + *

    + *
  • + *
  • + *

    + * CRC64NVME + *

    + *
  • + *
  • + *

    + * SHA1 + *

    + *
  • + *
  • + *

    + * SHA256 + *

    + *
  • + *
+ *

For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If the individual checksum value you provide through x-amz-checksum-algorithm + * doesn't match the checksum algorithm you set through x-amz-sdk-checksum-algorithm, Amazon S3 fails the request with a BadDigest error.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; +} +/** + * @public + */ +export interface DeleteObjectTaggingOutput { + /** + *

The versionId of the object the tag-set was removed from.

+ * @public + */ + VersionId?: string | undefined; +} +/** + * @public + */ +export interface DeleteObjectTaggingRequest { + /** + *

The bucket name containing the objects from which to remove the tags.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The key that identifies the object in the bucket from which to remove all tags.

+ * @public + */ + Key: string | undefined; + /** + *

The versionId of the object that the tag-set will be removed from.

+ * @public + */ + VersionId?: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface DeletePublicAccessBlockRequest { + /** + *

The Amazon S3 bucket whose PublicAccessBlock configuration you want to delete. + *

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface GetBucketAccelerateConfigurationOutput { + /** + *

The accelerate configuration of the bucket.

+ * @public + */ + Status?: BucketAccelerateStatus | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; +} +/** + * @public + */ +export interface GetBucketAccelerateConfigurationRequest { + /** + *

The name of the bucket for which the accelerate configuration is retrieved.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; +} +/** + * @public + */ +export interface GetBucketAclOutput { + /** + *

Container for the bucket owner's display name and ID.

+ * @public + */ + Owner?: Owner | undefined; + /** + *

A list of grants.

+ * @public + */ + Grants?: Grant[] | undefined; +} +/** + * @public + */ +export interface GetBucketAclRequest { + /** + *

Specifies the S3 bucket whose ACL is being requested.

+ *

When you use this API operation with an access point, provide the alias of the access point in place of the bucket name.

+ *

When you use this API operation with an Object Lambda access point, provide the alias of the Object Lambda access point in place of the bucket name. + * If the Object Lambda access point alias in a request is not valid, the error code InvalidAccessPointAliasError is returned. + * For more information about InvalidAccessPointAliasError, see List of + * Error Codes.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

A container of a key value name pair.

+ * @public + */ +export interface Tag { + /** + *

Name of the object key.

+ * @public + */ + Key: string | undefined; + /** + *

Value of the tag.

+ * @public + */ + Value: string | undefined; +} +/** + *

A conjunction (logical AND) of predicates, which is used in evaluating a metrics filter. + * The operator must have at least two predicates in any combination, and an object must match + * all of the predicates for the filter to apply.

+ * @public + */ +export interface AnalyticsAndOperator { + /** + *

The prefix to use when evaluating an AND predicate: The prefix that an object must have + * to be included in the metrics results.

+ * @public + */ + Prefix?: string | undefined; + /** + *

The list of tags to use when evaluating an AND predicate.

+ * @public + */ + Tags?: Tag[] | undefined; +} +/** + *

The filter used to describe a set of objects for analyses. A filter must have exactly + * one prefix, one tag, or one conjunction (AnalyticsAndOperator). If no filter is provided, + * all objects will be considered in any analysis.

+ * @public + */ +export type AnalyticsFilter = AnalyticsFilter.AndMember | AnalyticsFilter.PrefixMember | AnalyticsFilter.TagMember | AnalyticsFilter.$UnknownMember; +/** + * @public + */ +export declare namespace AnalyticsFilter { + /** + *

The prefix to use when evaluating an analytics filter.

+ * @public + */ + interface PrefixMember { + Prefix: string; + Tag?: never; + And?: never; + $unknown?: never; + } + /** + *

The tag to use when evaluating an analytics filter.

+ * @public + */ + interface TagMember { + Prefix?: never; + Tag: Tag; + And?: never; + $unknown?: never; + } + /** + *

A conjunction (logical AND) of predicates, which is used in evaluating an analytics + * filter. The operator must have at least two predicates.

+ * @public + */ + interface AndMember { + Prefix?: never; + Tag?: never; + And: AnalyticsAndOperator; + $unknown?: never; + } + /** + * @public + */ + interface $UnknownMember { + Prefix?: never; + Tag?: never; + And?: never; + $unknown: [string, any]; + } + interface Visitor { + Prefix: (value: string) => T; + Tag: (value: Tag) => T; + And: (value: AnalyticsAndOperator) => T; + _: (name: string, value: any) => T; + } + const visit: (value: AnalyticsFilter, visitor: Visitor) => T; +} +/** + * @public + * @enum + */ +export declare const AnalyticsS3ExportFileFormat: { + readonly CSV: "CSV"; +}; +/** + * @public + */ +export type AnalyticsS3ExportFileFormat = (typeof AnalyticsS3ExportFileFormat)[keyof typeof AnalyticsS3ExportFileFormat]; +/** + *

Contains information about where to publish the analytics results.

+ * @public + */ +export interface AnalyticsS3BucketDestination { + /** + *

Specifies the file format used when exporting data to Amazon S3.

+ * @public + */ + Format: AnalyticsS3ExportFileFormat | undefined; + /** + *

The account ID that owns the destination S3 bucket. If no account ID is provided, the + * owner is not validated before exporting data.

+ * + *

Although this value is optional, we strongly recommend that you set it to help + * prevent problems if the destination bucket ownership changes.

+ *
+ * @public + */ + BucketAccountId?: string | undefined; + /** + *

The Amazon Resource Name (ARN) of the bucket to which data is exported.

+ * @public + */ + Bucket: string | undefined; + /** + *

The prefix to use when exporting data. The prefix is prepended to all results.

+ * @public + */ + Prefix?: string | undefined; +} +/** + *

Where to publish the analytics results.

+ * @public + */ +export interface AnalyticsExportDestination { + /** + *

A destination signifying output to an S3 bucket.

+ * @public + */ + S3BucketDestination: AnalyticsS3BucketDestination | undefined; +} +/** + * @public + * @enum + */ +export declare const StorageClassAnalysisSchemaVersion: { + readonly V_1: "V_1"; +}; +/** + * @public + */ +export type StorageClassAnalysisSchemaVersion = (typeof StorageClassAnalysisSchemaVersion)[keyof typeof StorageClassAnalysisSchemaVersion]; +/** + *

Container for data related to the storage class analysis for an Amazon S3 bucket for + * export.

+ * @public + */ +export interface StorageClassAnalysisDataExport { + /** + *

The version of the output schema to use when exporting data. Must be + * V_1.

+ * @public + */ + OutputSchemaVersion: StorageClassAnalysisSchemaVersion | undefined; + /** + *

The place to store the data for an analysis.

+ * @public + */ + Destination: AnalyticsExportDestination | undefined; +} +/** + *

Specifies data related to access patterns to be collected and made available to analyze + * the tradeoffs between different storage classes for an Amazon S3 bucket.

+ * @public + */ +export interface StorageClassAnalysis { + /** + *

Specifies how data related to the storage class analysis for an Amazon S3 bucket should be + * exported.

+ * @public + */ + DataExport?: StorageClassAnalysisDataExport | undefined; +} +/** + *

Specifies the configuration and any analyses for the analytics filter of an Amazon S3 + * bucket.

+ * @public + */ +export interface AnalyticsConfiguration { + /** + *

The ID that identifies the analytics configuration.

+ * @public + */ + Id: string | undefined; + /** + *

The filter used to describe a set of objects for analyses. A filter must have exactly + * one prefix, one tag, or one conjunction (AnalyticsAndOperator). If no filter is provided, + * all objects will be considered in any analysis.

+ * @public + */ + Filter?: AnalyticsFilter | undefined; + /** + *

Contains data related to access patterns to be collected and made available to analyze + * the tradeoffs between different storage classes.

+ * @public + */ + StorageClassAnalysis: StorageClassAnalysis | undefined; +} +/** + * @public + */ +export interface GetBucketAnalyticsConfigurationOutput { + /** + *

The configuration and any analyses for the analytics filter.

+ * @public + */ + AnalyticsConfiguration?: AnalyticsConfiguration | undefined; +} +/** + * @public + */ +export interface GetBucketAnalyticsConfigurationRequest { + /** + *

The name of the bucket from which an analytics configuration is retrieved.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The ID that identifies the analytics configuration.

+ * @public + */ + Id: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

Specifies a cross-origin access rule for an Amazon S3 bucket.

+ * @public + */ +export interface CORSRule { + /** + *

Unique identifier for the rule. The value cannot be longer than 255 characters.

+ * @public + */ + ID?: string | undefined; + /** + *

Headers that are specified in the Access-Control-Request-Headers header. + * These headers are allowed in a preflight OPTIONS request. In response to any preflight + * OPTIONS request, Amazon S3 returns any requested headers that are allowed.

+ * @public + */ + AllowedHeaders?: string[] | undefined; + /** + *

An HTTP method that you allow the origin to execute. Valid values are GET, + * PUT, HEAD, POST, and DELETE.

+ * @public + */ + AllowedMethods: string[] | undefined; + /** + *

One or more origins you want customers to be able to access the bucket from.

+ * @public + */ + AllowedOrigins: string[] | undefined; + /** + *

One or more headers in the response that you want customers to be able to access from + * their applications (for example, from a JavaScript XMLHttpRequest + * object).

+ * @public + */ + ExposeHeaders?: string[] | undefined; + /** + *

The time in seconds that your browser is to cache the preflight response for the + * specified resource.

+ * @public + */ + MaxAgeSeconds?: number | undefined; +} +/** + * @public + */ +export interface GetBucketCorsOutput { + /** + *

A set of origins and methods (cross-origin access that you want to allow). You can add + * up to 100 rules to the configuration.

+ * @public + */ + CORSRules?: CORSRule[] | undefined; +} +/** + * @public + */ +export interface GetBucketCorsRequest { + /** + *

The bucket name for which to get the cors configuration.

+ *

When you use this API operation with an access point, provide the alias of the access point in place of the bucket name.

+ *

When you use this API operation with an Object Lambda access point, provide the alias of the Object Lambda access point in place of the bucket name. + * If the Object Lambda access point alias in a request is not valid, the error code InvalidAccessPointAliasError is returned. + * For more information about InvalidAccessPointAliasError, see List of + * Error Codes.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

Describes the default server-side encryption to apply to new objects in the bucket. If a + * PUT Object request doesn't specify any server-side encryption, this default encryption will + * be applied. For more information, see PutBucketEncryption.

+ * + *
    + *
  • + *

    + * General purpose buckets - If you don't specify + * a customer managed key at configuration, Amazon S3 automatically creates an Amazon Web Services KMS key + * (aws/s3) in your Amazon Web Services account the first time that you add an + * object encrypted with SSE-KMS to a bucket. By default, Amazon S3 uses this KMS key + * for SSE-KMS.

    + *
  • + *
  • + *

    + * Directory buckets - + * Your SSE-KMS configuration can only support 1 customer managed key per directory bucket's lifetime. + * The Amazon Web Services managed key (aws/s3) isn't supported. + *

    + *
  • + *
  • + *

    + * Directory buckets - + * For directory buckets, there are only two supported options for server-side encryption: SSE-S3 and SSE-KMS.

    + *
  • + *
+ *
+ * @public + */ +export interface ServerSideEncryptionByDefault { + /** + *

Server-side encryption algorithm to use for the default encryption.

+ * + *

For directory buckets, there are only two supported values for server-side encryption: AES256 and aws:kms.

+ *
+ * @public + */ + SSEAlgorithm: ServerSideEncryption | undefined; + /** + *

Amazon Web Services Key Management Service (KMS) customer managed key ID to use for the default + * encryption.

+ * + *
    + *
  • + *

    + * General purpose buckets - This parameter is + * allowed if and only if SSEAlgorithm is set to aws:kms or + * aws:kms:dsse.

    + *
  • + *
  • + *

    + * Directory buckets - This parameter is + * allowed if and only if SSEAlgorithm is set to + * aws:kms.

    + *
  • + *
+ *
+ *

You can specify the key ID, key alias, or the Amazon Resource Name (ARN) of the KMS + * key.

+ *
    + *
  • + *

    Key ID: 1234abcd-12ab-34cd-56ef-1234567890ab + *

    + *
  • + *
  • + *

    Key ARN: + * arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab + *

    + *
  • + *
  • + *

    Key Alias: alias/alias-name + *

    + *
  • + *
+ *

If you are using encryption with cross-account or Amazon Web Services service operations, you must use + * a fully qualified KMS key ARN. For more information, see Using encryption for cross-account operations.

+ * + *
    + *
  • + *

    + * General purpose buckets - If you're specifying + * a customer managed KMS key, we recommend using a fully qualified KMS key ARN. + * If you use a KMS key alias instead, then KMS resolves the key within the + * requester’s account. This behavior can result in data that's encrypted with a + * KMS key that belongs to the requester, and not the bucket owner. Also, if you + * use a key ID, you can run into a LogDestination undeliverable error when creating + * a VPC flow log.

    + *
  • + *
  • + *

    + * Directory buckets - + * When you specify an KMS customer managed key for encryption in your directory bucket, only use the key ID or key ARN. The key alias format of the KMS key isn't supported.

    + *
  • + *
+ *
+ * + *

Amazon S3 only supports symmetric encryption KMS keys. For more information, see Asymmetric keys in Amazon Web Services KMS in the Amazon Web Services Key Management Service + * Developer Guide.

+ *
+ * @public + */ + KMSMasterKeyID?: string | undefined; +} +/** + *

Specifies the default server-side encryption configuration.

+ * + *
    + *
  • + *

    + * General purpose buckets - If you're specifying + * a customer managed KMS key, we recommend using a fully qualified KMS key ARN. + * If you use a KMS key alias instead, then KMS resolves the key within the + * requester’s account. This behavior can result in data that's encrypted with a + * KMS key that belongs to the requester, and not the bucket owner.

    + *
  • + *
  • + *

    + * Directory buckets - + * When you specify an KMS customer managed key for encryption in your directory bucket, only use the key ID or key ARN. The key alias format of the KMS key isn't supported.

    + *
  • + *
+ *
+ * @public + */ +export interface ServerSideEncryptionRule { + /** + *

Specifies the default server-side encryption to apply to new objects in the bucket. If a + * PUT Object request doesn't specify any server-side encryption, this default encryption will + * be applied.

+ * @public + */ + ApplyServerSideEncryptionByDefault?: ServerSideEncryptionByDefault | undefined; + /** + *

Specifies whether Amazon S3 should use an S3 Bucket Key with server-side encryption using KMS + * (SSE-KMS) for new objects in the bucket. Existing objects are not affected. Setting the + * BucketKeyEnabled element to true causes Amazon S3 to use an S3 + * Bucket Key.

+ * + *
    + *
  • + *

    + * General purpose buckets - By default, S3 + * Bucket Key is not enabled. For more information, see Amazon S3 Bucket Keys in the + * Amazon S3 User Guide.

    + *
  • + *
  • + *

    + * Directory buckets - + * S3 Bucket Keys are always enabled for GET and PUT operations in a directory bucket and can’t be disabled. S3 Bucket Keys aren't supported, when you copy SSE-KMS encrypted objects from general purpose buckets + * to directory buckets, from directory buckets to general purpose buckets, or between directory buckets, through CopyObject, UploadPartCopy, the Copy operation in Batch Operations, or + * the import jobs. In this case, Amazon S3 makes a call to KMS every time a copy request is made for a KMS-encrypted object.

    + *
  • + *
+ *
+ * @public + */ + BucketKeyEnabled?: boolean | undefined; +} +/** + *

Specifies the default server-side-encryption configuration.

+ * @public + */ +export interface ServerSideEncryptionConfiguration { + /** + *

Container for information about a particular server-side encryption configuration + * rule.

+ * @public + */ + Rules: ServerSideEncryptionRule[] | undefined; +} +/** + * @public + */ +export interface GetBucketEncryptionOutput { + /** + *

Specifies the default server-side-encryption configuration.

+ * @public + */ + ServerSideEncryptionConfiguration?: ServerSideEncryptionConfiguration | undefined; +} +/** + * @public + */ +export interface GetBucketEncryptionRequest { + /** + *

The name of the bucket from which the server-side encryption configuration is + * retrieved.

+ *

+ * Directory buckets - When you use this operation with a directory bucket, you must use path-style requests in the format https://s3express-control.region-code.amazonaws.com/bucket-name + * . Virtual-hosted-style requests aren't supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must also follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * DOC-EXAMPLE-BUCKET--usw2-az1--x-s3). For information about bucket naming restrictions, see Directory bucket naming rules in the Amazon S3 User Guide + *

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * + *

For directory buckets, this header is not supported in this API operation. If you specify this header, the request fails with the HTTP status code + * 501 Not Implemented.

+ *
+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

A container for specifying S3 Intelligent-Tiering filters. The filters determine the + * subset of objects to which the rule applies.

+ * @public + */ +export interface IntelligentTieringAndOperator { + /** + *

An object key name prefix that identifies the subset of objects to which the + * configuration applies.

+ * @public + */ + Prefix?: string | undefined; + /** + *

All of these tags must exist in the object's tag set in order for the configuration to + * apply.

+ * @public + */ + Tags?: Tag[] | undefined; +} +/** + *

The Filter is used to identify objects that the S3 Intelligent-Tiering + * configuration applies to.

+ * @public + */ +export interface IntelligentTieringFilter { + /** + *

An object key name prefix that identifies the subset of objects to which the rule + * applies.

+ * + *

Replacement must be made for object keys containing special characters (such as carriage returns) when using + * XML requests. For more information, see + * XML related object key constraints.

+ *
+ * @public + */ + Prefix?: string | undefined; + /** + *

A container of a key value name pair.

+ * @public + */ + Tag?: Tag | undefined; + /** + *

A conjunction (logical AND) of predicates, which is used in evaluating a metrics filter. + * The operator must have at least two predicates, and an object must match all of the + * predicates in order for the filter to apply.

+ * @public + */ + And?: IntelligentTieringAndOperator | undefined; +} +/** + * @public + * @enum + */ +export declare const IntelligentTieringStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +/** + * @public + */ +export type IntelligentTieringStatus = (typeof IntelligentTieringStatus)[keyof typeof IntelligentTieringStatus]; +/** + * @public + * @enum + */ +export declare const IntelligentTieringAccessTier: { + readonly ARCHIVE_ACCESS: "ARCHIVE_ACCESS"; + readonly DEEP_ARCHIVE_ACCESS: "DEEP_ARCHIVE_ACCESS"; +}; +/** + * @public + */ +export type IntelligentTieringAccessTier = (typeof IntelligentTieringAccessTier)[keyof typeof IntelligentTieringAccessTier]; +/** + *

The S3 Intelligent-Tiering storage class is designed to optimize storage costs by + * automatically moving data to the most cost-effective storage access tier, without + * additional operational overhead.

+ * @public + */ +export interface Tiering { + /** + *

The number of consecutive days of no access after which an object will be eligible to be + * transitioned to the corresponding tier. The minimum number of days specified for + * Archive Access tier must be at least 90 days and Deep Archive Access tier must be at least + * 180 days. The maximum can be up to 2 years (730 days).

+ * @public + */ + Days: number | undefined; + /** + *

S3 Intelligent-Tiering access tier. See Storage class + * for automatically optimizing frequently and infrequently accessed objects for a + * list of access tiers in the S3 Intelligent-Tiering storage class.

+ * @public + */ + AccessTier: IntelligentTieringAccessTier | undefined; +} +/** + *

Specifies the S3 Intelligent-Tiering configuration for an Amazon S3 bucket.

+ *

For information about the S3 Intelligent-Tiering storage class, see Storage class + * for automatically optimizing frequently and infrequently accessed + * objects.

+ * @public + */ +export interface IntelligentTieringConfiguration { + /** + *

The ID used to identify the S3 Intelligent-Tiering configuration.

+ * @public + */ + Id: string | undefined; + /** + *

Specifies a bucket filter. The configuration only includes objects that meet the + * filter's criteria.

+ * @public + */ + Filter?: IntelligentTieringFilter | undefined; + /** + *

Specifies the status of the configuration.

+ * @public + */ + Status: IntelligentTieringStatus | undefined; + /** + *

Specifies the S3 Intelligent-Tiering storage class tier of the configuration.

+ * @public + */ + Tierings: Tiering[] | undefined; +} +/** + * @public + */ +export interface GetBucketIntelligentTieringConfigurationOutput { + /** + *

Container for S3 Intelligent-Tiering configuration.

+ * @public + */ + IntelligentTieringConfiguration?: IntelligentTieringConfiguration | undefined; +} +/** + * @public + */ +export interface GetBucketIntelligentTieringConfigurationRequest { + /** + *

The name of the Amazon S3 bucket whose configuration you want to modify or retrieve.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The ID used to identify the S3 Intelligent-Tiering configuration.

+ * @public + */ + Id: string | undefined; +} +/** + *

Specifies the use of SSE-KMS to encrypt delivered inventory reports.

+ * @public + */ +export interface SSEKMS { + /** + *

Specifies the ID of the Key Management Service (KMS) symmetric encryption customer managed key to use for + * encrypting inventory reports.

+ * @public + */ + KeyId: string | undefined; +} +/** + *

Specifies the use of SSE-S3 to encrypt delivered inventory reports.

+ * @public + */ +export interface SSES3 { +} +/** + *

Contains the type of server-side encryption used to encrypt the inventory + * results.

+ * @public + */ +export interface InventoryEncryption { + /** + *

Specifies the use of SSE-S3 to encrypt delivered inventory reports.

+ * @public + */ + SSES3?: SSES3 | undefined; + /** + *

Specifies the use of SSE-KMS to encrypt delivered inventory reports.

+ * @public + */ + SSEKMS?: SSEKMS | undefined; +} +/** + * @public + * @enum + */ +export declare const InventoryFormat: { + readonly CSV: "CSV"; + readonly ORC: "ORC"; + readonly Parquet: "Parquet"; +}; +/** + * @public + */ +export type InventoryFormat = (typeof InventoryFormat)[keyof typeof InventoryFormat]; +/** + *

Contains the bucket name, file format, bucket owner (optional), and prefix (optional) + * where inventory results are published.

+ * @public + */ +export interface InventoryS3BucketDestination { + /** + *

The account ID that owns the destination S3 bucket. If no account ID is provided, the + * owner is not validated before exporting data.

+ * + *

Although this value is optional, we strongly recommend that you set it to help + * prevent problems if the destination bucket ownership changes.

+ *
+ * @public + */ + AccountId?: string | undefined; + /** + *

The Amazon Resource Name (ARN) of the bucket where inventory results will be + * published.

+ * @public + */ + Bucket: string | undefined; + /** + *

Specifies the output format of the inventory results.

+ * @public + */ + Format: InventoryFormat | undefined; + /** + *

The prefix that is prepended to all inventory results.

+ * @public + */ + Prefix?: string | undefined; + /** + *

Contains the type of server-side encryption used to encrypt the inventory + * results.

+ * @public + */ + Encryption?: InventoryEncryption | undefined; +} +/** + *

Specifies the inventory configuration for an Amazon S3 bucket.

+ * @public + */ +export interface InventoryDestination { + /** + *

Contains the bucket name, file format, bucket owner (optional), and prefix (optional) + * where inventory results are published.

+ * @public + */ + S3BucketDestination: InventoryS3BucketDestination | undefined; +} +/** + *

Specifies an inventory filter. The inventory only includes objects that meet the + * filter's criteria.

+ * @public + */ +export interface InventoryFilter { + /** + *

The prefix that an object must have to be included in the inventory results.

+ * @public + */ + Prefix: string | undefined; +} +/** + * @public + * @enum + */ +export declare const InventoryIncludedObjectVersions: { + readonly All: "All"; + readonly Current: "Current"; +}; +/** + * @public + */ +export type InventoryIncludedObjectVersions = (typeof InventoryIncludedObjectVersions)[keyof typeof InventoryIncludedObjectVersions]; +/** + * @public + * @enum + */ +export declare const InventoryOptionalField: { + readonly BucketKeyStatus: "BucketKeyStatus"; + readonly ChecksumAlgorithm: "ChecksumAlgorithm"; + readonly ETag: "ETag"; + readonly EncryptionStatus: "EncryptionStatus"; + readonly IntelligentTieringAccessTier: "IntelligentTieringAccessTier"; + readonly IsMultipartUploaded: "IsMultipartUploaded"; + readonly LastModifiedDate: "LastModifiedDate"; + readonly ObjectAccessControlList: "ObjectAccessControlList"; + readonly ObjectLockLegalHoldStatus: "ObjectLockLegalHoldStatus"; + readonly ObjectLockMode: "ObjectLockMode"; + readonly ObjectLockRetainUntilDate: "ObjectLockRetainUntilDate"; + readonly ObjectOwner: "ObjectOwner"; + readonly ReplicationStatus: "ReplicationStatus"; + readonly Size: "Size"; + readonly StorageClass: "StorageClass"; +}; +/** + * @public + */ +export type InventoryOptionalField = (typeof InventoryOptionalField)[keyof typeof InventoryOptionalField]; +/** + * @public + * @enum + */ +export declare const InventoryFrequency: { + readonly Daily: "Daily"; + readonly Weekly: "Weekly"; +}; +/** + * @public + */ +export type InventoryFrequency = (typeof InventoryFrequency)[keyof typeof InventoryFrequency]; +/** + *

Specifies the schedule for generating inventory results.

+ * @public + */ +export interface InventorySchedule { + /** + *

Specifies how frequently inventory results are produced.

+ * @public + */ + Frequency: InventoryFrequency | undefined; +} +/** + *

Specifies the inventory configuration for an Amazon S3 bucket. For more information, see + * GET Bucket inventory in the Amazon S3 API Reference.

+ * @public + */ +export interface InventoryConfiguration { + /** + *

Contains information about where to publish the inventory results.

+ * @public + */ + Destination: InventoryDestination | undefined; + /** + *

Specifies whether the inventory is enabled or disabled. If set to True, an + * inventory list is generated. If set to False, no inventory list is + * generated.

+ * @public + */ + IsEnabled: boolean | undefined; + /** + *

Specifies an inventory filter. The inventory only includes objects that meet the + * filter's criteria.

+ * @public + */ + Filter?: InventoryFilter | undefined; + /** + *

The ID used to identify the inventory configuration.

+ * @public + */ + Id: string | undefined; + /** + *

Object versions to include in the inventory list. If set to All, the list + * includes all the object versions, which adds the version-related fields + * VersionId, IsLatest, and DeleteMarker to the + * list. If set to Current, the list does not contain these version-related + * fields.

+ * @public + */ + IncludedObjectVersions: InventoryIncludedObjectVersions | undefined; + /** + *

Contains the optional fields that are included in the inventory results.

+ * @public + */ + OptionalFields?: InventoryOptionalField[] | undefined; + /** + *

Specifies the schedule for generating inventory results.

+ * @public + */ + Schedule: InventorySchedule | undefined; +} +/** + * @public + */ +export interface GetBucketInventoryConfigurationOutput { + /** + *

Specifies the inventory configuration.

+ * @public + */ + InventoryConfiguration?: InventoryConfiguration | undefined; +} +/** + * @public + */ +export interface GetBucketInventoryConfigurationRequest { + /** + *

The name of the bucket containing the inventory configuration to retrieve.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The ID used to identify the inventory configuration.

+ * @public + */ + Id: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

Container for the expiration for the lifecycle of the object.

+ *

For more information see, Managing your storage + * lifecycle in the Amazon S3 User Guide.

+ * @public + */ +export interface LifecycleExpiration { + /** + *

Indicates at what date the object is to be moved or deleted. The date value must conform + * to the ISO 8601 format. The time is always midnight UTC.

+ * + *

This parameter applies to general purpose buckets only. It is not supported for + * directory bucket lifecycle configurations.

+ *
+ * @public + */ + Date?: Date | undefined; + /** + *

Indicates the lifetime, in days, of the objects that are subject to the rule. The value + * must be a non-zero positive integer.

+ * @public + */ + Days?: number | undefined; + /** + *

Indicates whether Amazon S3 will remove a delete marker with no noncurrent versions. If set + * to true, the delete marker will be expired; if set to false the policy takes no action. + * This cannot be specified with Days or Date in a Lifecycle Expiration Policy.

+ * + *

This parameter applies to general purpose buckets only. It is not supported for + * directory bucket lifecycle configurations.

+ *
+ * @public + */ + ExpiredObjectDeleteMarker?: boolean | undefined; +} +/** + *

This is used in a Lifecycle Rule Filter to apply a logical AND to two or more + * predicates. The Lifecycle Rule will apply to any object matching all of the predicates + * configured inside the And operator.

+ * @public + */ +export interface LifecycleRuleAndOperator { + /** + *

Prefix identifying one or more objects to which the rule applies.

+ * @public + */ + Prefix?: string | undefined; + /** + *

All of these tags must exist in the object's tag set in order for the rule to + * apply.

+ * @public + */ + Tags?: Tag[] | undefined; + /** + *

Minimum object size to which the rule applies.

+ * @public + */ + ObjectSizeGreaterThan?: number | undefined; + /** + *

Maximum object size to which the rule applies.

+ * @public + */ + ObjectSizeLessThan?: number | undefined; +} +/** + *

The Filter is used to identify objects that a Lifecycle Rule applies to. A + * Filter can have exactly one of Prefix, Tag, + * ObjectSizeGreaterThan, ObjectSizeLessThan, or And + * specified. If the Filter element is left empty, the Lifecycle Rule applies to + * all objects in the bucket.

+ * @public + */ +export interface LifecycleRuleFilter { + /** + *

Prefix identifying one or more objects to which the rule applies.

+ * + *

Replacement must be made for object keys containing special characters (such as carriage returns) when using + * XML requests. For more information, see + * XML related object key constraints.

+ *
+ * @public + */ + Prefix?: string | undefined; + /** + *

This tag must exist in the object's tag set in order for the rule to apply.

+ * + *

This parameter applies to general purpose buckets only. It is not supported for + * directory bucket lifecycle configurations.

+ *
+ * @public + */ + Tag?: Tag | undefined; + /** + *

Minimum object size to which the rule applies.

+ * @public + */ + ObjectSizeGreaterThan?: number | undefined; + /** + *

Maximum object size to which the rule applies.

+ * @public + */ + ObjectSizeLessThan?: number | undefined; + /** + *

This is used in a Lifecycle Rule Filter to apply a logical AND to two or more + * predicates. The Lifecycle Rule will apply to any object matching all of the predicates + * configured inside the And operator.

+ * @public + */ + And?: LifecycleRuleAndOperator | undefined; +} +/** + *

Specifies when noncurrent object versions expire. Upon expiration, Amazon S3 permanently + * deletes the noncurrent object versions. You set this lifecycle configuration action on a + * bucket that has versioning enabled (or suspended) to request that Amazon S3 delete noncurrent + * object versions at a specific period in the object's lifetime.

+ * + *

This parameter applies to general purpose buckets only. It is not supported for + * directory bucket lifecycle configurations.

+ *
+ * @public + */ +export interface NoncurrentVersionExpiration { + /** + *

Specifies the number of days an object is noncurrent before Amazon S3 can perform the + * associated action. The value must be a non-zero positive integer. For information about the + * noncurrent days calculations, see How + * Amazon S3 Calculates When an Object Became Noncurrent in the + * Amazon S3 User Guide.

+ * + *

This parameter applies to general purpose buckets only. It is not supported for + * directory bucket lifecycle configurations.

+ *
+ * @public + */ + NoncurrentDays?: number | undefined; + /** + *

Specifies how many noncurrent versions Amazon S3 will retain. You can specify up to 100 + * noncurrent versions to retain. Amazon S3 will permanently delete any additional noncurrent + * versions beyond the specified number to retain. For more information about noncurrent + * versions, see Lifecycle configuration + * elements in the Amazon S3 User Guide.

+ * + *

This parameter applies to general purpose buckets only. It is not supported for + * directory bucket lifecycle configurations.

+ *
+ * @public + */ + NewerNoncurrentVersions?: number | undefined; +} +/** + * @public + * @enum + */ +export declare const TransitionStorageClass: { + readonly DEEP_ARCHIVE: "DEEP_ARCHIVE"; + readonly GLACIER: "GLACIER"; + readonly GLACIER_IR: "GLACIER_IR"; + readonly INTELLIGENT_TIERING: "INTELLIGENT_TIERING"; + readonly ONEZONE_IA: "ONEZONE_IA"; + readonly STANDARD_IA: "STANDARD_IA"; +}; +/** + * @public + */ +export type TransitionStorageClass = (typeof TransitionStorageClass)[keyof typeof TransitionStorageClass]; +/** + *

Container for the transition rule that describes when noncurrent objects transition to + * the STANDARD_IA, ONEZONE_IA, INTELLIGENT_TIERING, + * GLACIER_IR, GLACIER, or DEEP_ARCHIVE storage + * class. If your bucket is versioning-enabled (or versioning is suspended), you can set this + * action to request that Amazon S3 transition noncurrent object versions to the + * STANDARD_IA, ONEZONE_IA, INTELLIGENT_TIERING, + * GLACIER_IR, GLACIER, or DEEP_ARCHIVE storage + * class at a specific period in the object's lifetime.

+ * @public + */ +export interface NoncurrentVersionTransition { + /** + *

Specifies the number of days an object is noncurrent before Amazon S3 can perform the + * associated action. For information about the noncurrent days calculations, see How + * Amazon S3 Calculates How Long an Object Has Been Noncurrent in the + * Amazon S3 User Guide.

+ * @public + */ + NoncurrentDays?: number | undefined; + /** + *

The class of storage used to store the object.

+ * @public + */ + StorageClass?: TransitionStorageClass | undefined; + /** + *

Specifies how many noncurrent versions Amazon S3 will retain in the same storage class before + * transitioning objects. You can specify up to 100 noncurrent versions to retain. Amazon S3 will + * transition any additional noncurrent versions beyond the specified number to retain. For + * more information about noncurrent versions, see Lifecycle configuration + * elements in the Amazon S3 User Guide.

+ * @public + */ + NewerNoncurrentVersions?: number | undefined; +} +/** + * @public + * @enum + */ +export declare const ExpirationStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +/** + * @public + */ +export type ExpirationStatus = (typeof ExpirationStatus)[keyof typeof ExpirationStatus]; +/** + *

Specifies when an object transitions to a specified storage class. For more information + * about Amazon S3 lifecycle configuration rules, see Transitioning + * Objects Using Amazon S3 Lifecycle in the Amazon S3 User Guide.

+ * @public + */ +export interface Transition { + /** + *

Indicates when objects are transitioned to the specified storage class. The date value + * must be in ISO 8601 format. The time is always midnight UTC.

+ * @public + */ + Date?: Date | undefined; + /** + *

Indicates the number of days after creation when objects are transitioned to the + * specified storage class. If the specified storage class is INTELLIGENT_TIERING, + * GLACIER_IR, GLACIER, or DEEP_ARCHIVE, valid values are + * 0 or positive integers. If the specified storage class is STANDARD_IA + * or ONEZONE_IA, valid values are positive integers greater than 30. Be + * aware that some storage classes have a minimum storage duration and that you're charged for + * transitioning objects before their minimum storage duration. For more information, see + * + * Constraints and considerations for transitions in the + * Amazon S3 User Guide.

+ * @public + */ + Days?: number | undefined; + /** + *

The storage class to which you want the object to transition.

+ * @public + */ + StorageClass?: TransitionStorageClass | undefined; +} +/** + *

A lifecycle rule for individual objects in an Amazon S3 bucket.

+ *

For more information see, Managing your storage + * lifecycle in the Amazon S3 User Guide.

+ * @public + */ +export interface LifecycleRule { + /** + *

Specifies the expiration for the lifecycle of the object in the form of date, days and, + * whether the object has a delete marker.

+ * @public + */ + Expiration?: LifecycleExpiration | undefined; + /** + *

Unique identifier for the rule. The value cannot be longer than 255 characters.

+ * @public + */ + ID?: string | undefined; + /** + *

Prefix identifying one or more objects to which the rule applies. This is + * no longer used; use Filter instead.

+ * + *

Replacement must be made for object keys containing special characters (such as carriage returns) when using + * XML requests. For more information, see + * XML related object key constraints.

+ *
+ * + * @deprecated + * @public + */ + Prefix?: string | undefined; + /** + *

The Filter is used to identify objects that a Lifecycle Rule applies to. A + * Filter must have exactly one of Prefix, Tag, or + * And specified. Filter is required if the + * LifecycleRule does not contain a Prefix element.

+ * + *

+ * Tag filters are not supported for directory buckets.

+ *
+ * @public + */ + Filter?: LifecycleRuleFilter | undefined; + /** + *

If 'Enabled', the rule is currently being applied. If 'Disabled', the rule is not + * currently being applied.

+ * @public + */ + Status: ExpirationStatus | undefined; + /** + *

Specifies when an Amazon S3 object transitions to a specified storage class.

+ * + *

This parameter applies to general purpose buckets only. It is not supported for + * directory bucket lifecycle configurations.

+ *
+ * @public + */ + Transitions?: Transition[] | undefined; + /** + *

Specifies the transition rule for the lifecycle rule that describes when noncurrent + * objects transition to a specific storage class. If your bucket is versioning-enabled (or + * versioning is suspended), you can set this action to request that Amazon S3 transition + * noncurrent object versions to a specific storage class at a set period in the object's + * lifetime.

+ * + *

This parameter applies to general purpose buckets only. It is not supported for + * directory bucket lifecycle configurations.

+ *
+ * @public + */ + NoncurrentVersionTransitions?: NoncurrentVersionTransition[] | undefined; + /** + *

Specifies when noncurrent object versions expire. Upon expiration, Amazon S3 permanently + * deletes the noncurrent object versions. You set this lifecycle configuration action on a + * bucket that has versioning enabled (or suspended) to request that Amazon S3 delete noncurrent + * object versions at a specific period in the object's lifetime.

+ * + *

This parameter applies to general purpose buckets only. It is not supported for + * directory bucket lifecycle configurations.

+ *
+ * @public + */ + NoncurrentVersionExpiration?: NoncurrentVersionExpiration | undefined; + /** + *

Specifies the days since the initiation of an incomplete multipart upload that Amazon S3 will + * wait before permanently removing all parts of the upload. For more information, see + * Aborting Incomplete Multipart Uploads Using a Bucket Lifecycle Configuration in + * the Amazon S3 User Guide.

+ * @public + */ + AbortIncompleteMultipartUpload?: AbortIncompleteMultipartUpload | undefined; +} +/** + * @public + * @enum + */ +export declare const TransitionDefaultMinimumObjectSize: { + readonly all_storage_classes_128K: "all_storage_classes_128K"; + readonly varies_by_storage_class: "varies_by_storage_class"; +}; +/** + * @public + */ +export type TransitionDefaultMinimumObjectSize = (typeof TransitionDefaultMinimumObjectSize)[keyof typeof TransitionDefaultMinimumObjectSize]; +/** + * @public + */ +export interface GetBucketLifecycleConfigurationOutput { + /** + *

Container for a lifecycle rule.

+ * @public + */ + Rules?: LifecycleRule[] | undefined; + /** + *

Indicates which default minimum object size behavior is applied to the lifecycle + * configuration.

+ * + *

This parameter applies to general purpose buckets only. It isn't supported for + * directory bucket lifecycle configurations.

+ *
+ *
    + *
  • + *

    + * all_storage_classes_128K - Objects smaller than 128 KB will not transition to any storage class by default.

    + *
  • + *
  • + *

    + * varies_by_storage_class - Objects smaller than 128 KB will + * transition to Glacier Flexible Retrieval or Glacier Deep Archive storage classes. By + * default, all other storage classes will prevent transitions smaller than 128 KB. + *

    + *
  • + *
+ *

To customize the minimum object size for any transition you can add a filter that + * specifies a custom ObjectSizeGreaterThan or ObjectSizeLessThan in + * the body of your transition rule. Custom filters always take precedence over the default + * transition behavior.

+ * @public + */ + TransitionDefaultMinimumObjectSize?: TransitionDefaultMinimumObjectSize | undefined; +} +/** + * @public + */ +export interface GetBucketLifecycleConfigurationRequest { + /** + *

The name of the bucket for which to get the lifecycle information.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * + *

This parameter applies to general purpose buckets only. It is not supported for + * directory bucket lifecycle configurations.

+ *
+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface GetBucketLocationOutput { + /** + *

Specifies the Region where the bucket resides. For a list of all the Amazon S3 supported + * location constraints by Region, see Regions and Endpoints.

+ *

Buckets in Region us-east-1 have a LocationConstraint of + * null. Buckets with a LocationConstraint of EU reside in eu-west-1.

+ * @public + */ + LocationConstraint?: BucketLocationConstraint | undefined; +} +/** + * @public + */ +export interface GetBucketLocationRequest { + /** + *

The name of the bucket for which to get the location.

+ *

When you use this API operation with an access point, provide the alias of the access point in place of the bucket name.

+ *

When you use this API operation with an Object Lambda access point, provide the alias of the Object Lambda access point in place of the bucket name. + * If the Object Lambda access point alias in a request is not valid, the error code InvalidAccessPointAliasError is returned. + * For more information about InvalidAccessPointAliasError, see List of + * Error Codes.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const BucketLogsPermission: { + readonly FULL_CONTROL: "FULL_CONTROL"; + readonly READ: "READ"; + readonly WRITE: "WRITE"; +}; +/** + * @public + */ +export type BucketLogsPermission = (typeof BucketLogsPermission)[keyof typeof BucketLogsPermission]; +/** + *

Container for granting information.

+ *

Buckets that use the bucket owner enforced setting for Object Ownership don't support + * target grants. For more information, see Permissions server access log delivery in the + * Amazon S3 User Guide.

+ * @public + */ +export interface TargetGrant { + /** + *

Container for the person being granted permissions.

+ * @public + */ + Grantee?: Grantee | undefined; + /** + *

Logging permissions assigned to the grantee for the bucket.

+ * @public + */ + Permission?: BucketLogsPermission | undefined; +} +/** + * @public + * @enum + */ +export declare const PartitionDateSource: { + readonly DeliveryTime: "DeliveryTime"; + readonly EventTime: "EventTime"; +}; +/** + * @public + */ +export type PartitionDateSource = (typeof PartitionDateSource)[keyof typeof PartitionDateSource]; +/** + *

Amazon S3 keys for log objects are partitioned in the following format:

+ *

+ * [DestinationPrefix][SourceAccountId]/[SourceRegion]/[SourceBucket]/[YYYY]/[MM]/[DD]/[YYYY]-[MM]-[DD]-[hh]-[mm]-[ss]-[UniqueString] + *

+ *

PartitionedPrefix defaults to EventTime delivery when server access logs are + * delivered.

+ * @public + */ +export interface PartitionedPrefix { + /** + *

Specifies the partition date source for the partitioned prefix. + * PartitionDateSource can be EventTime or + * DeliveryTime.

+ *

For DeliveryTime, the time in the log file names corresponds to the + * delivery time for the log files.

+ *

For EventTime, The logs delivered are for a specific day only. The year, + * month, and day correspond to the day on which the event occurred, and the hour, minutes and + * seconds are set to 00 in the key.

+ * @public + */ + PartitionDateSource?: PartitionDateSource | undefined; +} +/** + *

To use simple format for S3 keys for log objects, set SimplePrefix to an empty + * object.

+ *

+ * [DestinationPrefix][YYYY]-[MM]-[DD]-[hh]-[mm]-[ss]-[UniqueString] + *

+ * @public + */ +export interface SimplePrefix { +} +/** + *

Amazon S3 key format for log objects. Only one format, PartitionedPrefix or + * SimplePrefix, is allowed.

+ * @public + */ +export interface TargetObjectKeyFormat { + /** + *

To use the simple format for S3 keys for log objects. To specify SimplePrefix format, + * set SimplePrefix to \{\}.

+ * @public + */ + SimplePrefix?: SimplePrefix | undefined; + /** + *

Partitioned S3 key for log objects.

+ * @public + */ + PartitionedPrefix?: PartitionedPrefix | undefined; +} +/** + *

Describes where logs are stored and the prefix that Amazon S3 assigns to all log object keys + * for a bucket. For more information, see PUT Bucket logging in the + * Amazon S3 API Reference.

+ * @public + */ +export interface LoggingEnabled { + /** + *

Specifies the bucket where you want Amazon S3 to store server access logs. You can have your + * logs delivered to any bucket that you own, including the same bucket that is being logged. + * You can also configure multiple buckets to deliver their logs to the same target bucket. In + * this case, you should choose a different TargetPrefix for each source bucket + * so that the delivered log files can be distinguished by key.

+ * @public + */ + TargetBucket: string | undefined; + /** + *

Container for granting information.

+ *

Buckets that use the bucket owner enforced setting for Object Ownership don't support + * target grants. For more information, see Permissions for server access log delivery in the + * Amazon S3 User Guide.

+ * @public + */ + TargetGrants?: TargetGrant[] | undefined; + /** + *

A prefix for all log object keys. If you store log files from multiple Amazon S3 buckets in a + * single bucket, you can use a prefix to distinguish which log files came from which + * bucket.

+ * @public + */ + TargetPrefix: string | undefined; + /** + *

Amazon S3 key format for log objects.

+ * @public + */ + TargetObjectKeyFormat?: TargetObjectKeyFormat | undefined; +} +/** + * @public + */ +export interface GetBucketLoggingOutput { + /** + *

Describes where logs are stored and the prefix that Amazon S3 assigns to all log object keys + * for a bucket. For more information, see PUT Bucket logging in the + * Amazon S3 API Reference.

+ * @public + */ + LoggingEnabled?: LoggingEnabled | undefined; +} +/** + * @public + */ +export interface GetBucketLoggingRequest { + /** + *

The bucket name for which to get the logging information.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

+ * If the CreateBucketMetadataTableConfiguration request succeeds, but S3 Metadata was + * unable to create the table, this structure contains the error code and error message. + *

+ * @public + */ +export interface ErrorDetails { + /** + *

+ * If the CreateBucketMetadataTableConfiguration request succeeds, but S3 Metadata was + * unable to create the table, this structure contains the error code. The possible error codes and + * error messages are as follows: + *

+ *
    + *
  • + *

    + * AccessDeniedCreatingResources - You don't have sufficient permissions to + * create the required resources. Make sure that you have s3tables:CreateNamespace, + * s3tables:CreateTable, s3tables:GetTable and + * s3tables:PutTablePolicy permissions, and then try again. To create a new metadata + * table, you must delete the metadata configuration for this bucket, and then create a new + * metadata configuration. + *

    + *
  • + *
  • + *

    + * AccessDeniedWritingToTable - Unable to write to the metadata table because of + * missing resource permissions. To fix the resource policy, Amazon S3 needs to create a new + * metadata table. To create a new metadata table, you must delete the metadata configuration for + * this bucket, and then create a new metadata configuration.

    + *
  • + *
  • + *

    + * DestinationTableNotFound - The destination table doesn't exist. To create a + * new metadata table, you must delete the metadata configuration for this bucket, and then + * create a new metadata configuration.

    + *
  • + *
  • + *

    + * ServerInternalError - An internal error has occurred. To create a new metadata + * table, you must delete the metadata configuration for this bucket, and then create a new + * metadata configuration.

    + *
  • + *
  • + *

    + * TableAlreadyExists - The table that you specified already exists in the table + * bucket's namespace. Specify a different table name. To create a new metadata table, you must + * delete the metadata configuration for this bucket, and then create a new metadata + * configuration.

    + *
  • + *
  • + *

    + * TableBucketNotFound - The table bucket that you specified doesn't exist in + * this Amazon Web Services Region and account. Create or choose a different table bucket. To create a new + * metadata table, you must delete the metadata configuration for this bucket, and then create + * a new metadata configuration.

    + *
  • + *
+ * @public + */ + ErrorCode?: string | undefined; + /** + *

+ * If the CreateBucketMetadataTableConfiguration request succeeds, but S3 Metadata was + * unable to create the table, this structure contains the error message. The possible error codes and + * error messages are as follows: + *

+ *
    + *
  • + *

    + * AccessDeniedCreatingResources - You don't have sufficient permissions to + * create the required resources. Make sure that you have s3tables:CreateNamespace, + * s3tables:CreateTable, s3tables:GetTable and + * s3tables:PutTablePolicy permissions, and then try again. To create a new metadata + * table, you must delete the metadata configuration for this bucket, and then create a new + * metadata configuration. + *

    + *
  • + *
  • + *

    + * AccessDeniedWritingToTable - Unable to write to the metadata table because of + * missing resource permissions. To fix the resource policy, Amazon S3 needs to create a new + * metadata table. To create a new metadata table, you must delete the metadata configuration for + * this bucket, and then create a new metadata configuration.

    + *
  • + *
  • + *

    + * DestinationTableNotFound - The destination table doesn't exist. To create a + * new metadata table, you must delete the metadata configuration for this bucket, and then + * create a new metadata configuration.

    + *
  • + *
  • + *

    + * ServerInternalError - An internal error has occurred. To create a new metadata + * table, you must delete the metadata configuration for this bucket, and then create a new + * metadata configuration.

    + *
  • + *
  • + *

    + * TableAlreadyExists - The table that you specified already exists in the table + * bucket's namespace. Specify a different table name. To create a new metadata table, you must + * delete the metadata configuration for this bucket, and then create a new metadata + * configuration.

    + *
  • + *
  • + *

    + * TableBucketNotFound - The table bucket that you specified doesn't exist in + * this Amazon Web Services Region and account. Create or choose a different table bucket. To create a new + * metadata table, you must delete the metadata configuration for this bucket, and then create + * a new metadata configuration.

    + *
  • + *
+ * @public + */ + ErrorMessage?: string | undefined; +} +/** + *

+ * The destination information for the metadata table configuration. The destination table bucket + * must be in the same Region and Amazon Web Services account as the general purpose bucket. The specified metadata + * table name must be unique within the aws_s3_metadata namespace in the destination + * table bucket. + *

+ * @public + */ +export interface S3TablesDestinationResult { + /** + *

+ * The Amazon Resource Name (ARN) for the table bucket that's specified as the + * destination in the metadata table configuration. The destination table bucket + * must be in the same Region and Amazon Web Services account as the general purpose bucket. + *

+ * @public + */ + TableBucketArn: string | undefined; + /** + *

+ * The name for the metadata table in your metadata table configuration. The specified metadata + * table name must be unique within the aws_s3_metadata namespace in the destination + * table bucket. + *

+ * @public + */ + TableName: string | undefined; + /** + *

+ * The Amazon Resource Name (ARN) for the metadata table in the metadata table configuration. The + * specified metadata table name must be unique within the aws_s3_metadata namespace + * in the destination table bucket. + *

+ * @public + */ + TableArn: string | undefined; + /** + *

+ * The table bucket namespace for the metadata table in your metadata table configuration. This value + * is always aws_s3_metadata. + *

+ * @public + */ + TableNamespace: string | undefined; +} +/** + *

+ * The metadata table configuration for a general purpose bucket. The destination table bucket + * must be in the same Region and Amazon Web Services account as the general purpose bucket. The specified metadata + * table name must be unique within the aws_s3_metadata namespace in the destination + * table bucket. + *

+ * @public + */ +export interface MetadataTableConfigurationResult { + /** + *

+ * The destination information for the metadata table configuration. The destination table bucket + * must be in the same Region and Amazon Web Services account as the general purpose bucket. The specified metadata + * table name must be unique within the aws_s3_metadata namespace in the destination + * table bucket. + *

+ * @public + */ + S3TablesDestinationResult: S3TablesDestinationResult | undefined; +} +/** + *

+ * The metadata table configuration for a general purpose bucket. + *

+ * @public + */ +export interface GetBucketMetadataTableConfigurationResult { + /** + *

+ * The metadata table configuration for a general purpose bucket. + *

+ * @public + */ + MetadataTableConfigurationResult: MetadataTableConfigurationResult | undefined; + /** + *

+ * The status of the metadata table. The status values are: + *

+ *
    + *
  • + *

    + * CREATING - The metadata table is in the process of being created in the + * specified table bucket.

    + *
  • + *
  • + *

    + * ACTIVE - The metadata table has been created successfully and records + * are being delivered to the table. + *

    + *
  • + *
  • + *

    + * FAILED - Amazon S3 is unable to create the metadata table, or Amazon S3 is unable to deliver + * records. See ErrorDetails for details.

    + *
  • + *
+ * @public + */ + Status: string | undefined; + /** + *

+ * If the CreateBucketMetadataTableConfiguration request succeeds, but S3 Metadata was + * unable to create the table, this structure contains the error code and error message. + *

+ * @public + */ + Error?: ErrorDetails | undefined; +} +/** + * @public + */ +export interface GetBucketMetadataTableConfigurationOutput { + /** + *

+ * The metadata table configuration for the general purpose bucket. + *

+ * @public + */ + GetBucketMetadataTableConfigurationResult?: GetBucketMetadataTableConfigurationResult | undefined; +} +/** + * @public + */ +export interface GetBucketMetadataTableConfigurationRequest { + /** + *

+ * The general purpose bucket that contains the metadata table configuration that you want to retrieve. + *

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

+ * The expected owner of the general purpose bucket that you want to retrieve the metadata table configuration from. + *

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

A conjunction (logical AND) of predicates, which is used in evaluating a metrics filter. + * The operator must have at least two predicates, and an object must match all of the + * predicates in order for the filter to apply.

+ * @public + */ +export interface MetricsAndOperator { + /** + *

The prefix used when evaluating an AND predicate.

+ * @public + */ + Prefix?: string | undefined; + /** + *

The list of tags used when evaluating an AND predicate.

+ * @public + */ + Tags?: Tag[] | undefined; + /** + *

The access point ARN used when evaluating an AND predicate.

+ * @public + */ + AccessPointArn?: string | undefined; +} +/** + *

Specifies a metrics configuration filter. The metrics configuration only includes + * objects that meet the filter's criteria. A filter must be a prefix, an object tag, an + * access point ARN, or a conjunction (MetricsAndOperator). For more information, see PutBucketMetricsConfiguration.

+ * @public + */ +export type MetricsFilter = MetricsFilter.AccessPointArnMember | MetricsFilter.AndMember | MetricsFilter.PrefixMember | MetricsFilter.TagMember | MetricsFilter.$UnknownMember; +/** + * @public + */ +export declare namespace MetricsFilter { + /** + *

The prefix used when evaluating a metrics filter.

+ * @public + */ + interface PrefixMember { + Prefix: string; + Tag?: never; + AccessPointArn?: never; + And?: never; + $unknown?: never; + } + /** + *

The tag used when evaluating a metrics filter.

+ * @public + */ + interface TagMember { + Prefix?: never; + Tag: Tag; + AccessPointArn?: never; + And?: never; + $unknown?: never; + } + /** + *

The access point ARN used when evaluating a metrics filter.

+ * @public + */ + interface AccessPointArnMember { + Prefix?: never; + Tag?: never; + AccessPointArn: string; + And?: never; + $unknown?: never; + } + /** + *

A conjunction (logical AND) of predicates, which is used in evaluating a metrics filter. + * The operator must have at least two predicates, and an object must match all of the + * predicates in order for the filter to apply.

+ * @public + */ + interface AndMember { + Prefix?: never; + Tag?: never; + AccessPointArn?: never; + And: MetricsAndOperator; + $unknown?: never; + } + /** + * @public + */ + interface $UnknownMember { + Prefix?: never; + Tag?: never; + AccessPointArn?: never; + And?: never; + $unknown: [string, any]; + } + interface Visitor { + Prefix: (value: string) => T; + Tag: (value: Tag) => T; + AccessPointArn: (value: string) => T; + And: (value: MetricsAndOperator) => T; + _: (name: string, value: any) => T; + } + const visit: (value: MetricsFilter, visitor: Visitor) => T; +} +/** + *

Specifies a metrics configuration for the CloudWatch request metrics (specified by the + * metrics configuration ID) from an Amazon S3 bucket. If you're updating an existing metrics + * configuration, note that this is a full replacement of the existing metrics configuration. + * If you don't include the elements you want to keep, they are erased. For more information, + * see PutBucketMetricsConfiguration.

+ * @public + */ +export interface MetricsConfiguration { + /** + *

The ID used to identify the metrics configuration. The ID has a 64 character limit and + * can only contain letters, numbers, periods, dashes, and underscores.

+ * @public + */ + Id: string | undefined; + /** + *

Specifies a metrics configuration filter. The metrics configuration will only include + * objects that meet the filter's criteria. A filter must be a prefix, an object tag, an + * access point ARN, or a conjunction (MetricsAndOperator).

+ * @public + */ + Filter?: MetricsFilter | undefined; +} +/** + * @public + */ +export interface GetBucketMetricsConfigurationOutput { + /** + *

Specifies the metrics configuration.

+ * @public + */ + MetricsConfiguration?: MetricsConfiguration | undefined; +} +/** + * @public + */ +export interface GetBucketMetricsConfigurationRequest { + /** + *

The name of the bucket containing the metrics configuration to retrieve.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The ID used to identify the metrics configuration. The ID has a 64 character limit and + * can only contain letters, numbers, periods, dashes, and underscores.

+ * @public + */ + Id: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface GetBucketNotificationConfigurationRequest { + /** + *

The name of the bucket for which to get the notification configuration.

+ *

When you use this API operation with an access point, provide the alias of the access point in place of the bucket name.

+ *

When you use this API operation with an Object Lambda access point, provide the alias of the Object Lambda access point in place of the bucket name. + * If the Object Lambda access point alias in a request is not valid, the error code InvalidAccessPointAliasError is returned. + * For more information about InvalidAccessPointAliasError, see List of + * Error Codes.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

A container for specifying the configuration for Amazon EventBridge.

+ * @public + */ +export interface EventBridgeConfiguration { +} +/** + * @public + * @enum + */ +export declare const Event: { + readonly s3_IntelligentTiering: "s3:IntelligentTiering"; + readonly s3_LifecycleExpiration_: "s3:LifecycleExpiration:*"; + readonly s3_LifecycleExpiration_Delete: "s3:LifecycleExpiration:Delete"; + readonly s3_LifecycleExpiration_DeleteMarkerCreated: "s3:LifecycleExpiration:DeleteMarkerCreated"; + readonly s3_LifecycleTransition: "s3:LifecycleTransition"; + readonly s3_ObjectAcl_Put: "s3:ObjectAcl:Put"; + readonly s3_ObjectCreated_: "s3:ObjectCreated:*"; + readonly s3_ObjectCreated_CompleteMultipartUpload: "s3:ObjectCreated:CompleteMultipartUpload"; + readonly s3_ObjectCreated_Copy: "s3:ObjectCreated:Copy"; + readonly s3_ObjectCreated_Post: "s3:ObjectCreated:Post"; + readonly s3_ObjectCreated_Put: "s3:ObjectCreated:Put"; + readonly s3_ObjectRemoved_: "s3:ObjectRemoved:*"; + readonly s3_ObjectRemoved_Delete: "s3:ObjectRemoved:Delete"; + readonly s3_ObjectRemoved_DeleteMarkerCreated: "s3:ObjectRemoved:DeleteMarkerCreated"; + readonly s3_ObjectRestore_: "s3:ObjectRestore:*"; + readonly s3_ObjectRestore_Completed: "s3:ObjectRestore:Completed"; + readonly s3_ObjectRestore_Delete: "s3:ObjectRestore:Delete"; + readonly s3_ObjectRestore_Post: "s3:ObjectRestore:Post"; + readonly s3_ObjectTagging_: "s3:ObjectTagging:*"; + readonly s3_ObjectTagging_Delete: "s3:ObjectTagging:Delete"; + readonly s3_ObjectTagging_Put: "s3:ObjectTagging:Put"; + readonly s3_ReducedRedundancyLostObject: "s3:ReducedRedundancyLostObject"; + readonly s3_Replication_: "s3:Replication:*"; + readonly s3_Replication_OperationFailedReplication: "s3:Replication:OperationFailedReplication"; + readonly s3_Replication_OperationMissedThreshold: "s3:Replication:OperationMissedThreshold"; + readonly s3_Replication_OperationNotTracked: "s3:Replication:OperationNotTracked"; + readonly s3_Replication_OperationReplicatedAfterThreshold: "s3:Replication:OperationReplicatedAfterThreshold"; +}; +/** + * @public + */ +export type Event = (typeof Event)[keyof typeof Event]; +/** + * @public + * @enum + */ +export declare const FilterRuleName: { + readonly prefix: "prefix"; + readonly suffix: "suffix"; +}; +/** + * @public + */ +export type FilterRuleName = (typeof FilterRuleName)[keyof typeof FilterRuleName]; +/** + *

Specifies the Amazon S3 object key name to filter on. An object key name is the name assigned + * to an object in your Amazon S3 bucket. You specify whether to filter on the suffix or prefix of + * the object key name. A prefix is a specific string of characters at the beginning of an + * object key name, which you can use to organize objects. For example, you can start the key + * names of related objects with a prefix, such as 2023- or + * engineering/. Then, you can use FilterRule to find objects in + * a bucket with key names that have the same prefix. A suffix is similar to a prefix, but it + * is at the end of the object key name instead of at the beginning.

+ * @public + */ +export interface FilterRule { + /** + *

The object key name prefix or suffix identifying one or more objects to which the + * filtering rule applies. The maximum length is 1,024 characters. Overlapping prefixes and + * suffixes are not supported. For more information, see Configuring Event Notifications + * in the Amazon S3 User Guide.

+ * @public + */ + Name?: FilterRuleName | undefined; + /** + *

The value that the filter searches for in object key names.

+ * @public + */ + Value?: string | undefined; +} +/** + *

A container for object key name prefix and suffix filtering rules.

+ * @public + */ +export interface S3KeyFilter { + /** + *

A list of containers for the key-value pair that defines the criteria for the filter + * rule.

+ * @public + */ + FilterRules?: FilterRule[] | undefined; +} +/** + *

Specifies object key name filtering rules. For information about key name filtering, see + * Configuring event + * notifications using object key name filtering in the + * Amazon S3 User Guide.

+ * @public + */ +export interface NotificationConfigurationFilter { + /** + *

A container for object key name prefix and suffix filtering rules.

+ * @public + */ + Key?: S3KeyFilter | undefined; +} +/** + *

A container for specifying the configuration for Lambda notifications.

+ * @public + */ +export interface LambdaFunctionConfiguration { + /** + *

An optional unique identifier for configurations in a notification configuration. If you + * don't provide one, Amazon S3 will assign an ID.

+ * @public + */ + Id?: string | undefined; + /** + *

The Amazon Resource Name (ARN) of the Lambda function that Amazon S3 invokes when the + * specified event type occurs.

+ * @public + */ + LambdaFunctionArn: string | undefined; + /** + *

The Amazon S3 bucket event for which to invoke the Lambda function. For more information, + * see Supported + * Event Types in the Amazon S3 User Guide.

+ * @public + */ + Events: Event[] | undefined; + /** + *

Specifies object key name filtering rules. For information about key name filtering, see + * Configuring event + * notifications using object key name filtering in the + * Amazon S3 User Guide.

+ * @public + */ + Filter?: NotificationConfigurationFilter | undefined; +} +/** + *

Specifies the configuration for publishing messages to an Amazon Simple Queue Service + * (Amazon SQS) queue when Amazon S3 detects specified events.

+ * @public + */ +export interface QueueConfiguration { + /** + *

An optional unique identifier for configurations in a notification configuration. If you + * don't provide one, Amazon S3 will assign an ID.

+ * @public + */ + Id?: string | undefined; + /** + *

The Amazon Resource Name (ARN) of the Amazon SQS queue to which Amazon S3 publishes a message + * when it detects events of the specified type.

+ * @public + */ + QueueArn: string | undefined; + /** + *

A collection of bucket events for which to send notifications

+ * @public + */ + Events: Event[] | undefined; + /** + *

Specifies object key name filtering rules. For information about key name filtering, see + * Configuring event + * notifications using object key name filtering in the + * Amazon S3 User Guide.

+ * @public + */ + Filter?: NotificationConfigurationFilter | undefined; +} +/** + *

A container for specifying the configuration for publication of messages to an Amazon + * Simple Notification Service (Amazon SNS) topic when Amazon S3 detects specified events.

+ * @public + */ +export interface TopicConfiguration { + /** + *

An optional unique identifier for configurations in a notification configuration. If you + * don't provide one, Amazon S3 will assign an ID.

+ * @public + */ + Id?: string | undefined; + /** + *

The Amazon Resource Name (ARN) of the Amazon SNS topic to which Amazon S3 publishes a message + * when it detects events of the specified type.

+ * @public + */ + TopicArn: string | undefined; + /** + *

The Amazon S3 bucket event about which to send notifications. For more information, see + * Supported + * Event Types in the Amazon S3 User Guide.

+ * @public + */ + Events: Event[] | undefined; + /** + *

Specifies object key name filtering rules. For information about key name filtering, see + * Configuring event + * notifications using object key name filtering in the + * Amazon S3 User Guide.

+ * @public + */ + Filter?: NotificationConfigurationFilter | undefined; +} +/** + *

A container for specifying the notification configuration of the bucket. If this element + * is empty, notifications are turned off for the bucket.

+ * @public + */ +export interface NotificationConfiguration { + /** + *

The topic to which notifications are sent and the events for which notifications are + * generated.

+ * @public + */ + TopicConfigurations?: TopicConfiguration[] | undefined; + /** + *

The Amazon Simple Queue Service queues to publish messages to and the events for which + * to publish messages.

+ * @public + */ + QueueConfigurations?: QueueConfiguration[] | undefined; + /** + *

Describes the Lambda functions to invoke and the events for which to invoke + * them.

+ * @public + */ + LambdaFunctionConfigurations?: LambdaFunctionConfiguration[] | undefined; + /** + *

Enables delivery of events to Amazon EventBridge.

+ * @public + */ + EventBridgeConfiguration?: EventBridgeConfiguration | undefined; +} +/** + *

The container element for an ownership control rule.

+ * @public + */ +export interface OwnershipControlsRule { + /** + *

The container element for object ownership for a bucket's ownership controls.

+ *

+ * BucketOwnerPreferred - Objects uploaded to the bucket change ownership to + * the bucket owner if the objects are uploaded with the + * bucket-owner-full-control canned ACL.

+ *

+ * ObjectWriter - The uploading account will own the object if the object is + * uploaded with the bucket-owner-full-control canned ACL.

+ *

+ * BucketOwnerEnforced - Access control lists (ACLs) are disabled and no + * longer affect permissions. The bucket owner automatically owns and has full control over + * every object in the bucket. The bucket only accepts PUT requests that don't specify an ACL + * or specify bucket owner full control ACLs (such as the predefined + * bucket-owner-full-control canned ACL or a custom ACL in XML format that + * grants the same permissions).

+ *

By default, ObjectOwnership is set to BucketOwnerEnforced and + * ACLs are disabled. We recommend keeping ACLs disabled, except in uncommon use cases where + * you must control access for each object individually. For more information about S3 Object + * Ownership, see Controlling ownership of + * objects and disabling ACLs for your bucket in the + * Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets. Directory buckets use the bucket owner enforced setting for S3 Object Ownership.

+ *
+ * @public + */ + ObjectOwnership: ObjectOwnership | undefined; +} +/** + *

The container element for a bucket's ownership controls.

+ * @public + */ +export interface OwnershipControls { + /** + *

The container element for an ownership control rule.

+ * @public + */ + Rules: OwnershipControlsRule[] | undefined; +} +/** + * @public + */ +export interface GetBucketOwnershipControlsOutput { + /** + *

The OwnershipControls (BucketOwnerEnforced, BucketOwnerPreferred, or + * ObjectWriter) currently in effect for this Amazon S3 bucket.

+ * @public + */ + OwnershipControls?: OwnershipControls | undefined; +} +/** + * @public + */ +export interface GetBucketOwnershipControlsRequest { + /** + *

The name of the Amazon S3 bucket whose OwnershipControls you want to retrieve. + *

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface GetBucketPolicyOutput { + /** + *

The bucket policy as a JSON document.

+ * @public + */ + Policy?: string | undefined; +} +/** + * @public + */ +export interface GetBucketPolicyRequest { + /** + *

The bucket name to get the bucket policy for.

+ *

+ * Directory buckets - When you use this operation with a directory bucket, you must use path-style requests in the format https://s3express-control.region-code.amazonaws.com/bucket-name + * . Virtual-hosted-style requests aren't supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must also follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * DOC-EXAMPLE-BUCKET--usw2-az1--x-s3). For information about bucket naming restrictions, see Directory bucket naming rules in the Amazon S3 User Guide + *

+ *

+ * Access points - When you use this API operation with an access point, provide the alias of the access point in place of the bucket name.

+ *

+ * Object Lambda access points - When you use this API operation with an Object Lambda access point, provide the alias of the Object Lambda access point in place of the bucket name. + * If the Object Lambda access point alias in a request is not valid, the error code InvalidAccessPointAliasError is returned. + * For more information about InvalidAccessPointAliasError, see List of + * Error Codes.

+ * + *

Object Lambda access points are not supported by directory buckets.

+ *
+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * + *

For directory buckets, this header is not supported in this API operation. If you specify this header, the request fails with the HTTP status code + * 501 Not Implemented.

+ *
+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

The container element for a bucket's policy status.

+ * @public + */ +export interface PolicyStatus { + /** + *

The policy status for this bucket. TRUE indicates that this bucket is + * public. FALSE indicates that the bucket is not public.

+ * @public + */ + IsPublic?: boolean | undefined; +} +/** + * @public + */ +export interface GetBucketPolicyStatusOutput { + /** + *

The policy status for the specified bucket.

+ * @public + */ + PolicyStatus?: PolicyStatus | undefined; +} +/** + * @public + */ +export interface GetBucketPolicyStatusRequest { + /** + *

The name of the Amazon S3 bucket whose policy status you want to retrieve.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const DeleteMarkerReplicationStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +/** + * @public + */ +export type DeleteMarkerReplicationStatus = (typeof DeleteMarkerReplicationStatus)[keyof typeof DeleteMarkerReplicationStatus]; +/** + *

Specifies whether Amazon S3 replicates delete markers. If you specify a Filter + * in your replication configuration, you must also include a + * DeleteMarkerReplication element. If your Filter includes a + * Tag element, the DeleteMarkerReplication + * Status must be set to Disabled, because Amazon S3 does not support replicating + * delete markers for tag-based rules. For an example configuration, see Basic Rule Configuration.

+ *

For more information about delete marker replication, see Basic Rule + * Configuration.

+ * + *

If you are using an earlier version of the replication configuration, Amazon S3 handles + * replication of delete markers differently. For more information, see Backward Compatibility.

+ *
+ * @public + */ +export interface DeleteMarkerReplication { + /** + *

Indicates whether to replicate delete markers.

+ * + *

Indicates whether to replicate delete markers.

+ *
+ * @public + */ + Status?: DeleteMarkerReplicationStatus | undefined; +} +/** + *

Specifies encryption-related information for an Amazon S3 bucket that is a destination for + * replicated objects.

+ * + *

If you're specifying a customer managed KMS key, we recommend using a fully + * qualified KMS key ARN. If you use a KMS key alias instead, then KMS resolves the + * key within the requester’s account. This behavior can result in data that's encrypted + * with a KMS key that belongs to the requester, and not the bucket owner.

+ *
+ * @public + */ +export interface EncryptionConfiguration { + /** + *

Specifies the ID (Key ARN or Alias ARN) of the customer managed Amazon Web Services KMS key stored in + * Amazon Web Services Key Management Service (KMS) for the destination bucket. Amazon S3 uses this key to + * encrypt replica objects. Amazon S3 only supports symmetric encryption KMS keys. For more + * information, see Asymmetric keys in Amazon Web Services + * KMS in the Amazon Web Services Key Management Service Developer + * Guide.

+ * @public + */ + ReplicaKmsKeyID?: string | undefined; +} +/** + *

A container specifying the time value for S3 Replication Time Control (S3 RTC) and replication metrics + * EventThreshold.

+ * @public + */ +export interface ReplicationTimeValue { + /** + *

Contains an integer specifying time in minutes.

+ *

Valid value: 15

+ * @public + */ + Minutes?: number | undefined; +} +/** + * @public + * @enum + */ +export declare const MetricsStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +/** + * @public + */ +export type MetricsStatus = (typeof MetricsStatus)[keyof typeof MetricsStatus]; +/** + *

A container specifying replication metrics-related settings enabling replication + * metrics and events.

+ * @public + */ +export interface Metrics { + /** + *

Specifies whether the replication metrics are enabled.

+ * @public + */ + Status: MetricsStatus | undefined; + /** + *

A container specifying the time threshold for emitting the + * s3:Replication:OperationMissedThreshold event.

+ * @public + */ + EventThreshold?: ReplicationTimeValue | undefined; +} +/** + * @public + * @enum + */ +export declare const ReplicationTimeStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +/** + * @public + */ +export type ReplicationTimeStatus = (typeof ReplicationTimeStatus)[keyof typeof ReplicationTimeStatus]; +/** + *

A container specifying S3 Replication Time Control (S3 RTC) related information, including whether S3 RTC is + * enabled and the time when all objects and operations on objects must be replicated. Must be + * specified together with a Metrics block.

+ * @public + */ +export interface ReplicationTime { + /** + *

Specifies whether the replication time is enabled.

+ * @public + */ + Status: ReplicationTimeStatus | undefined; + /** + *

A container specifying the time by which replication should be complete for all objects + * and operations on objects.

+ * @public + */ + Time: ReplicationTimeValue | undefined; +} +/** + *

Specifies information about where to publish analysis or configuration results for an + * Amazon S3 bucket and S3 Replication Time Control (S3 RTC).

+ * @public + */ +export interface Destination { + /** + *

The Amazon Resource Name (ARN) of the bucket where you want Amazon S3 to store the + * results.

+ * @public + */ + Bucket: string | undefined; + /** + *

Destination bucket owner account ID. In a cross-account scenario, if you direct Amazon S3 to + * change replica ownership to the Amazon Web Services account that owns the destination bucket by + * specifying the AccessControlTranslation property, this is the account ID of + * the destination bucket owner. For more information, see Replication Additional + * Configuration: Changing the Replica Owner in the + * Amazon S3 User Guide.

+ * @public + */ + Account?: string | undefined; + /** + *

The storage class to use when replicating objects, such as S3 Standard or reduced + * redundancy. By default, Amazon S3 uses the storage class of the source object to create the + * object replica.

+ *

For valid values, see the StorageClass element of the PUT Bucket + * replication action in the Amazon S3 API Reference.

+ * @public + */ + StorageClass?: StorageClass | undefined; + /** + *

Specify this only in a cross-account scenario (where source and destination bucket + * owners are not the same), and you want to change replica ownership to the Amazon Web Services account + * that owns the destination bucket. If this is not specified in the replication + * configuration, the replicas are owned by same Amazon Web Services account that owns the source + * object.

+ * @public + */ + AccessControlTranslation?: AccessControlTranslation | undefined; + /** + *

A container that provides information about encryption. If + * SourceSelectionCriteria is specified, you must specify this element.

+ * @public + */ + EncryptionConfiguration?: EncryptionConfiguration | undefined; + /** + *

A container specifying S3 Replication Time Control (S3 RTC), including whether S3 RTC is enabled and the time + * when all objects and operations on objects must be replicated. Must be specified together + * with a Metrics block.

+ * @public + */ + ReplicationTime?: ReplicationTime | undefined; + /** + *

A container specifying replication metrics-related settings enabling replication + * metrics and events.

+ * @public + */ + Metrics?: Metrics | undefined; +} +/** + * @public + * @enum + */ +export declare const ExistingObjectReplicationStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +/** + * @public + */ +export type ExistingObjectReplicationStatus = (typeof ExistingObjectReplicationStatus)[keyof typeof ExistingObjectReplicationStatus]; +/** + *

Optional configuration to replicate existing source bucket objects.

+ * + *

This parameter is no longer supported. To replicate existing objects, see Replicating existing objects with S3 Batch Replication in the + * Amazon S3 User Guide.

+ *
+ * @public + */ +export interface ExistingObjectReplication { + /** + *

Specifies whether Amazon S3 replicates existing source bucket objects.

+ * @public + */ + Status: ExistingObjectReplicationStatus | undefined; +} +/** + *

A container for specifying rule filters. The filters determine the subset of objects to + * which the rule applies. This element is required only if you specify more than one filter.

+ *

For example:

+ *
    + *
  • + *

    If you specify both a Prefix and a Tag filter, wrap + * these filters in an And tag.

    + *
  • + *
  • + *

    If you specify a filter based on multiple tags, wrap the Tag elements + * in an And tag.

    + *
  • + *
+ * @public + */ +export interface ReplicationRuleAndOperator { + /** + *

An object key name prefix that identifies the subset of objects to which the rule + * applies.

+ * @public + */ + Prefix?: string | undefined; + /** + *

An array of tags containing key and value pairs.

+ * @public + */ + Tags?: Tag[] | undefined; +} +/** + *

A filter that identifies the subset of objects to which the replication rule applies. A + * Filter must specify exactly one Prefix, Tag, or + * an And child element.

+ * @public + */ +export interface ReplicationRuleFilter { + /** + *

An object key name prefix that identifies the subset of objects to which the rule + * applies.

+ * + *

Replacement must be made for object keys containing special characters (such as carriage returns) when using + * XML requests. For more information, see + * XML related object key constraints.

+ *
+ * @public + */ + Prefix?: string | undefined; + /** + *

A container for specifying a tag key and value.

+ *

The rule applies only to objects that have the tag in their tag set.

+ * @public + */ + Tag?: Tag | undefined; + /** + *

A container for specifying rule filters. The filters determine the subset of objects to + * which the rule applies. This element is required only if you specify more than one filter. + * For example:

+ *
    + *
  • + *

    If you specify both a Prefix and a Tag filter, wrap + * these filters in an And tag.

    + *
  • + *
  • + *

    If you specify a filter based on multiple tags, wrap the Tag elements + * in an And tag.

    + *
  • + *
+ * @public + */ + And?: ReplicationRuleAndOperator | undefined; +} +/** + * @public + * @enum + */ +export declare const ReplicaModificationsStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +/** + * @public + */ +export type ReplicaModificationsStatus = (typeof ReplicaModificationsStatus)[keyof typeof ReplicaModificationsStatus]; +/** + *

A filter that you can specify for selection for modifications on replicas. Amazon S3 doesn't + * replicate replica modifications by default. In the latest version of replication + * configuration (when Filter is specified), you can specify this element and set + * the status to Enabled to replicate modifications on replicas.

+ * + *

If you don't specify the Filter element, Amazon S3 assumes that the + * replication configuration is the earlier version, V1. In the earlier version, this + * element is not allowed.

+ *
+ * @public + */ +export interface ReplicaModifications { + /** + *

Specifies whether Amazon S3 replicates modifications on replicas.

+ * @public + */ + Status: ReplicaModificationsStatus | undefined; +} +/** + * @public + * @enum + */ +export declare const SseKmsEncryptedObjectsStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +/** + * @public + */ +export type SseKmsEncryptedObjectsStatus = (typeof SseKmsEncryptedObjectsStatus)[keyof typeof SseKmsEncryptedObjectsStatus]; +/** + *

A container for filter information for the selection of S3 objects encrypted with Amazon Web Services + * KMS.

+ * @public + */ +export interface SseKmsEncryptedObjects { + /** + *

Specifies whether Amazon S3 replicates objects created with server-side encryption using an + * Amazon Web Services KMS key stored in Amazon Web Services Key Management Service.

+ * @public + */ + Status: SseKmsEncryptedObjectsStatus | undefined; +} +/** + *

A container that describes additional filters for identifying the source objects that + * you want to replicate. You can choose to enable or disable the replication of these + * objects. Currently, Amazon S3 supports only the filter that you can specify for objects created + * with server-side encryption using a customer managed key stored in Amazon Web Services Key Management Service + * (SSE-KMS).

+ * @public + */ +export interface SourceSelectionCriteria { + /** + *

A container for filter information for the selection of Amazon S3 objects encrypted with + * Amazon Web Services KMS. If you include SourceSelectionCriteria in the replication + * configuration, this element is required.

+ * @public + */ + SseKmsEncryptedObjects?: SseKmsEncryptedObjects | undefined; + /** + *

A filter that you can specify for selections for modifications on replicas. Amazon S3 doesn't + * replicate replica modifications by default. In the latest version of replication + * configuration (when Filter is specified), you can specify this element and set + * the status to Enabled to replicate modifications on replicas.

+ * + *

If you don't specify the Filter element, Amazon S3 assumes that the + * replication configuration is the earlier version, V1. In the earlier version, this + * element is not allowed

+ *
+ * @public + */ + ReplicaModifications?: ReplicaModifications | undefined; +} +/** + * @public + * @enum + */ +export declare const ReplicationRuleStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +/** + * @public + */ +export type ReplicationRuleStatus = (typeof ReplicationRuleStatus)[keyof typeof ReplicationRuleStatus]; +/** + *

Specifies which Amazon S3 objects to replicate and where to store the replicas.

+ * @public + */ +export interface ReplicationRule { + /** + *

A unique identifier for the rule. The maximum value is 255 characters.

+ * @public + */ + ID?: string | undefined; + /** + *

The priority indicates which rule has precedence whenever two or more replication rules + * conflict. Amazon S3 will attempt to replicate objects according to all replication rules. + * However, if there are two or more rules with the same destination bucket, then objects will + * be replicated according to the rule with the highest priority. The higher the number, the + * higher the priority.

+ *

For more information, see Replication in the + * Amazon S3 User Guide.

+ * @public + */ + Priority?: number | undefined; + /** + *

An object key name prefix that identifies the object or objects to which the rule + * applies. The maximum prefix length is 1,024 characters. To include all objects in a bucket, + * specify an empty string.

+ * + *

Replacement must be made for object keys containing special characters (such as carriage returns) when using + * XML requests. For more information, see + * XML related object key constraints.

+ *
+ * + * @deprecated + * @public + */ + Prefix?: string | undefined; + /** + *

A filter that identifies the subset of objects to which the replication rule applies. A + * Filter must specify exactly one Prefix, Tag, or + * an And child element.

+ * @public + */ + Filter?: ReplicationRuleFilter | undefined; + /** + *

Specifies whether the rule is enabled.

+ * @public + */ + Status: ReplicationRuleStatus | undefined; + /** + *

A container that describes additional filters for identifying the source objects that + * you want to replicate. You can choose to enable or disable the replication of these + * objects. Currently, Amazon S3 supports only the filter that you can specify for objects created + * with server-side encryption using a customer managed key stored in Amazon Web Services Key Management Service + * (SSE-KMS).

+ * @public + */ + SourceSelectionCriteria?: SourceSelectionCriteria | undefined; + /** + *

Optional configuration to replicate existing source bucket objects.

+ * + *

This parameter is no longer supported. To replicate existing objects, see Replicating existing objects with S3 Batch Replication in the + * Amazon S3 User Guide.

+ *
+ * @public + */ + ExistingObjectReplication?: ExistingObjectReplication | undefined; + /** + *

A container for information about the replication destination and its configurations + * including enabling the S3 Replication Time Control (S3 RTC).

+ * @public + */ + Destination: Destination | undefined; + /** + *

Specifies whether Amazon S3 replicates delete markers. If you specify a Filter + * in your replication configuration, you must also include a + * DeleteMarkerReplication element. If your Filter includes a + * Tag element, the DeleteMarkerReplication + * Status must be set to Disabled, because Amazon S3 does not support replicating + * delete markers for tag-based rules. For an example configuration, see Basic Rule Configuration.

+ *

For more information about delete marker replication, see Basic Rule + * Configuration.

+ * + *

If you are using an earlier version of the replication configuration, Amazon S3 handles + * replication of delete markers differently. For more information, see Backward Compatibility.

+ *
+ * @public + */ + DeleteMarkerReplication?: DeleteMarkerReplication | undefined; +} +/** + *

A container for replication rules. You can add up to 1,000 rules. The maximum size of a + * replication configuration is 2 MB.

+ * @public + */ +export interface ReplicationConfiguration { + /** + *

The Amazon Resource Name (ARN) of the Identity and Access Management (IAM) role that Amazon S3 assumes when + * replicating objects. For more information, see How to Set Up Replication + * in the Amazon S3 User Guide.

+ * @public + */ + Role: string | undefined; + /** + *

A container for one or more replication rules. A replication configuration must have at + * least one rule and can contain a maximum of 1,000 rules.

+ * @public + */ + Rules: ReplicationRule[] | undefined; +} +/** + * @public + */ +export interface GetBucketReplicationOutput { + /** + *

A container for replication rules. You can add up to 1,000 rules. The maximum size of a + * replication configuration is 2 MB.

+ * @public + */ + ReplicationConfiguration?: ReplicationConfiguration | undefined; +} +/** + * @public + */ +export interface GetBucketReplicationRequest { + /** + *

The bucket name for which to get the replication information.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const Payer: { + readonly BucketOwner: "BucketOwner"; + readonly Requester: "Requester"; +}; +/** + * @public + */ +export type Payer = (typeof Payer)[keyof typeof Payer]; +/** + * @public + */ +export interface GetBucketRequestPaymentOutput { + /** + *

Specifies who pays for the download and request fees.

+ * @public + */ + Payer?: Payer | undefined; +} +/** + * @public + */ +export interface GetBucketRequestPaymentRequest { + /** + *

The name of the bucket for which to get the payment request configuration

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface GetBucketTaggingOutput { + /** + *

Contains the tag set.

+ * @public + */ + TagSet: Tag[] | undefined; +} +/** + * @public + */ +export interface GetBucketTaggingRequest { + /** + *

The name of the bucket for which to get the tagging information.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const MFADeleteStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +/** + * @public + */ +export type MFADeleteStatus = (typeof MFADeleteStatus)[keyof typeof MFADeleteStatus]; +/** + * @public + * @enum + */ +export declare const BucketVersioningStatus: { + readonly Enabled: "Enabled"; + readonly Suspended: "Suspended"; +}; +/** + * @public + */ +export type BucketVersioningStatus = (typeof BucketVersioningStatus)[keyof typeof BucketVersioningStatus]; +/** + * @public + */ +export interface GetBucketVersioningOutput { + /** + *

The versioning state of the bucket.

+ * @public + */ + Status?: BucketVersioningStatus | undefined; + /** + *

Specifies whether MFA delete is enabled in the bucket versioning configuration. This + * element is only returned if the bucket has been configured with MFA delete. If the bucket + * has never been so configured, this element is not returned.

+ * @public + */ + MFADelete?: MFADeleteStatus | undefined; +} +/** + * @public + */ +export interface GetBucketVersioningRequest { + /** + *

The name of the bucket for which to get the versioning information.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

The error information.

+ * @public + */ +export interface ErrorDocument { + /** + *

The object key name to use when a 4XX class error occurs.

+ * + *

Replacement must be made for object keys containing special characters (such as carriage returns) when using + * XML requests. For more information, see + * XML related object key constraints.

+ *
+ * @public + */ + Key: string | undefined; +} +/** + *

Container for the Suffix element.

+ * @public + */ +export interface IndexDocument { + /** + *

A suffix that is appended to a request that is for a directory on the website endpoint. + * (For example, if the suffix is index.html and you make a request to + * samplebucket/images/, the data that is returned will be for the object with + * the key name images/index.html.) The suffix must not be empty and must not + * include a slash character.

+ * + *

Replacement must be made for object keys containing special characters (such as carriage returns) when using + * XML requests. For more information, see + * XML related object key constraints.

+ *
+ * @public + */ + Suffix: string | undefined; +} +/** + * @public + * @enum + */ +export declare const Protocol: { + readonly http: "http"; + readonly https: "https"; +}; +/** + * @public + */ +export type Protocol = (typeof Protocol)[keyof typeof Protocol]; +/** + *

Specifies the redirect behavior of all requests to a website endpoint of an Amazon S3 + * bucket.

+ * @public + */ +export interface RedirectAllRequestsTo { + /** + *

Name of the host where requests are redirected.

+ * @public + */ + HostName: string | undefined; + /** + *

Protocol to use when redirecting requests. The default is the protocol that is used in + * the original request.

+ * @public + */ + Protocol?: Protocol | undefined; +} +/** + *

A container for describing a condition that must be met for the specified redirect to + * apply. For example, 1. If request is for pages in the /docs folder, redirect + * to the /documents folder. 2. If request results in HTTP error 4xx, redirect + * request to another host where you might process the error.

+ * @public + */ +export interface Condition { + /** + *

The HTTP error code when the redirect is applied. In the event of an error, if the error + * code equals this value, then the specified redirect is applied. Required when parent + * element Condition is specified and sibling KeyPrefixEquals is not + * specified. If both are specified, then both must be true for the redirect to be + * applied.

+ * @public + */ + HttpErrorCodeReturnedEquals?: string | undefined; + /** + *

The object key name prefix when the redirect is applied. For example, to redirect + * requests for ExamplePage.html, the key prefix will be + * ExamplePage.html. To redirect request for all pages with the prefix + * docs/, the key prefix will be /docs, which identifies all + * objects in the docs/ folder. Required when the parent element + * Condition is specified and sibling HttpErrorCodeReturnedEquals + * is not specified. If both conditions are specified, both must be true for the redirect to + * be applied.

+ * + *

Replacement must be made for object keys containing special characters (such as carriage returns) when using + * XML requests. For more information, see + * XML related object key constraints.

+ *
+ * @public + */ + KeyPrefixEquals?: string | undefined; +} +/** + *

Specifies how requests are redirected. In the event of an error, you can specify a + * different error code to return.

+ * @public + */ +export interface Redirect { + /** + *

The host name to use in the redirect request.

+ * @public + */ + HostName?: string | undefined; + /** + *

The HTTP redirect code to use on the response. Not required if one of the siblings is + * present.

+ * @public + */ + HttpRedirectCode?: string | undefined; + /** + *

Protocol to use when redirecting requests. The default is the protocol that is used in + * the original request.

+ * @public + */ + Protocol?: Protocol | undefined; + /** + *

The object key prefix to use in the redirect request. For example, to redirect requests + * for all pages with prefix docs/ (objects in the docs/ folder) to + * documents/, you can set a condition block with KeyPrefixEquals + * set to docs/ and in the Redirect set ReplaceKeyPrefixWith to + * /documents. Not required if one of the siblings is present. Can be present + * only if ReplaceKeyWith is not provided.

+ * + *

Replacement must be made for object keys containing special characters (such as carriage returns) when using + * XML requests. For more information, see + * XML related object key constraints.

+ *
+ * @public + */ + ReplaceKeyPrefixWith?: string | undefined; + /** + *

The specific object key to use in the redirect request. For example, redirect request to + * error.html. Not required if one of the siblings is present. Can be present + * only if ReplaceKeyPrefixWith is not provided.

+ * + *

Replacement must be made for object keys containing special characters (such as carriage returns) when using + * XML requests. For more information, see + * XML related object key constraints.

+ *
+ * @public + */ + ReplaceKeyWith?: string | undefined; +} +/** + *

Specifies the redirect behavior and when a redirect is applied. For more information + * about routing rules, see Configuring advanced conditional redirects in the + * Amazon S3 User Guide.

+ * @public + */ +export interface RoutingRule { + /** + *

A container for describing a condition that must be met for the specified redirect to + * apply. For example, 1. If request is for pages in the /docs folder, redirect + * to the /documents folder. 2. If request results in HTTP error 4xx, redirect + * request to another host where you might process the error.

+ * @public + */ + Condition?: Condition | undefined; + /** + *

Container for redirect information. You can redirect requests to another host, to + * another page, or with another protocol. In the event of an error, you can specify a + * different error code to return.

+ * @public + */ + Redirect: Redirect | undefined; +} +/** + * @public + */ +export interface GetBucketWebsiteOutput { + /** + *

Specifies the redirect behavior of all requests to a website endpoint of an Amazon S3 + * bucket.

+ * @public + */ + RedirectAllRequestsTo?: RedirectAllRequestsTo | undefined; + /** + *

The name of the index document for the website (for example + * index.html).

+ * @public + */ + IndexDocument?: IndexDocument | undefined; + /** + *

The object key name of the website error document to use for 4XX class errors.

+ * @public + */ + ErrorDocument?: ErrorDocument | undefined; + /** + *

Rules that define when a redirect is applied and the redirect behavior.

+ * @public + */ + RoutingRules?: RoutingRule[] | undefined; +} +/** + * @public + */ +export interface GetBucketWebsiteRequest { + /** + *

The bucket name for which to get the website configuration.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const ReplicationStatus: { + readonly COMPLETE: "COMPLETE"; + readonly COMPLETED: "COMPLETED"; + readonly FAILED: "FAILED"; + readonly PENDING: "PENDING"; + readonly REPLICA: "REPLICA"; +}; +/** + * @public + */ +export type ReplicationStatus = (typeof ReplicationStatus)[keyof typeof ReplicationStatus]; +/** + * @public + */ +export interface GetObjectOutput { + /** + *

Object data.

+ * @public + */ + Body?: StreamingBlobTypes | undefined; + /** + *

Indicates whether the object retrieved was (true) or was not (false) a Delete Marker. If + * false, this response header does not appear in the response.

+ * + *
    + *
  • + *

    If the current version of the object is a delete marker, Amazon S3 behaves as if the + * object was deleted and includes x-amz-delete-marker: true in the + * response.

    + *
  • + *
  • + *

    If the specified version in the request is a delete marker, the response + * returns a 405 Method Not Allowed error and the Last-Modified: + * timestamp response header.

    + *
  • + *
+ *
+ * @public + */ + DeleteMarker?: boolean | undefined; + /** + *

Indicates that a range of bytes was specified in the request.

+ * @public + */ + AcceptRanges?: string | undefined; + /** + *

If the object expiration is configured (see + * PutBucketLifecycleConfiguration + * ), the response includes this + * header. It includes the expiry-date and rule-id key-value pairs + * providing object expiration information. The value of the rule-id is + * URL-encoded.

+ * + *

Object expiration information is not returned in directory buckets and this header returns the value "NotImplemented" in all responses for directory buckets.

+ *
+ * @public + */ + Expiration?: string | undefined; + /** + *

Provides information about object restoration action and expiration time of the restored + * object copy.

+ * + *

This functionality is not supported for directory buckets. + * Directory buckets only support EXPRESS_ONEZONE (the S3 Express One Zone storage class) in Availability Zones and ONEZONE_IA (the S3 One Zone-Infrequent Access storage class) in Dedicated Local Zones.

+ *
+ * @public + */ + Restore?: string | undefined; + /** + *

Date and time when the object was last modified.

+ *

+ * General purpose buckets - When you specify a + * versionId of the object in your request, if the specified version in the + * request is a delete marker, the response returns a 405 Method Not Allowed + * error and the Last-Modified: timestamp response header.

+ * @public + */ + LastModified?: Date | undefined; + /** + *

Size of the body in bytes.

+ * @public + */ + ContentLength?: number | undefined; + /** + *

An entity tag (ETag) is an opaque identifier assigned by a web server to a specific + * version of a resource found at a URL.

+ * @public + */ + ETag?: string | undefined; + /** + *

The Base64 encoded, 32-bit CRC32 checksum of the object. This checksum is only present if the object was uploaded + * with the object. For more information, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32?: string | undefined; + /** + *

The Base64 encoded, 32-bit CRC32C checksum of the object. This will only be present if the object was uploaded + * with the object. For more information, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32C?: string | undefined; + /** + *

The Base64 encoded, 64-bit CRC64NVME checksum of the object. For more + * information, see Checking object integrity + * in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC64NVME?: string | undefined; + /** + *

The Base64 encoded, 160-bit SHA1 digest of the object. This will only be present if the object was uploaded + * with the object. For more information, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA1?: string | undefined; + /** + *

The Base64 encoded, 256-bit SHA256 digest of the object. This will only be present if the object was uploaded + * with the object. For more information, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA256?: string | undefined; + /** + *

The checksum type, which determines how part-level checksums are combined to create an + * object-level checksum for multipart objects. You can use this header response to verify + * that the checksum type that is received is the same checksum type that was specified in the + * CreateMultipartUpload request. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumType?: ChecksumType | undefined; + /** + *

This is set to the number of metadata entries not returned in the headers that are + * prefixed with x-amz-meta-. This can happen if you create metadata using an API + * like SOAP that supports more flexible metadata than the REST API. For example, using SOAP, + * you can create metadata whose values are not legal HTTP headers.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + MissingMeta?: number | undefined; + /** + *

Version ID of the object.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + VersionId?: string | undefined; + /** + *

Specifies caching behavior along the request/reply chain.

+ * @public + */ + CacheControl?: string | undefined; + /** + *

Specifies presentational information for the object.

+ * @public + */ + ContentDisposition?: string | undefined; + /** + *

Indicates what content encodings have been applied to the object and thus what decoding + * mechanisms must be applied to obtain the media-type referenced by the Content-Type header + * field.

+ * @public + */ + ContentEncoding?: string | undefined; + /** + *

The language the content is in.

+ * @public + */ + ContentLanguage?: string | undefined; + /** + *

The portion of the object returned in the response.

+ * @public + */ + ContentRange?: string | undefined; + /** + *

A standard MIME type describing the format of the object data.

+ * @public + */ + ContentType?: string | undefined; + /** + * Deprecated in favor of ExpiresString. + * + * @deprecated + * @public + */ + Expires?: Date | undefined; + /** + *

The date and time at which the object is no longer cacheable.

+ * @public + */ + ExpiresString?: string | undefined; + /** + *

If the bucket is configured as a website, redirects requests for this object to another + * object in the same bucket or to an external URL. Amazon S3 stores the value of this header in + * the object metadata.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + WebsiteRedirectLocation?: string | undefined; + /** + *

The server-side encryption algorithm used when you store this object in Amazon S3.

+ * @public + */ + ServerSideEncryption?: ServerSideEncryption | undefined; + /** + *

A map of metadata to store with the object in S3.

+ * @public + */ + Metadata?: Record | undefined; + /** + *

If server-side encryption with a customer-provided encryption key was requested, the + * response will include this header to confirm the encryption algorithm that's used.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerAlgorithm?: string | undefined; + /** + *

If server-side encryption with a customer-provided encryption key was requested, the + * response will include this header to provide the round-trip message integrity verification + * of the customer-provided encryption key.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKeyMD5?: string | undefined; + /** + *

If present, indicates the ID of the KMS key that was used for object encryption.

+ * @public + */ + SSEKMSKeyId?: string | undefined; + /** + *

Indicates whether the object uses an S3 Bucket Key for server-side encryption with + * Key Management Service (KMS) keys (SSE-KMS).

+ * @public + */ + BucketKeyEnabled?: boolean | undefined; + /** + *

Provides storage class information of the object. Amazon S3 returns this header for all + * objects except for S3 Standard storage class objects.

+ * + *

+ * Directory buckets - + * Directory buckets only support EXPRESS_ONEZONE (the S3 Express One Zone storage class) in Availability Zones and ONEZONE_IA (the S3 One Zone-Infrequent Access storage class) in Dedicated Local Zones.

+ *
+ * @public + */ + StorageClass?: StorageClass | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; + /** + *

Amazon S3 can return this if your request involves a bucket that is either a source or + * destination in a replication rule.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ReplicationStatus?: ReplicationStatus | undefined; + /** + *

The count of parts this object has. This value is only returned if you specify + * partNumber in your request and the object was uploaded as a multipart + * upload.

+ * @public + */ + PartsCount?: number | undefined; + /** + *

The number of tags, if any, on the object, when you have the relevant permission to read + * object tags.

+ *

You can use GetObjectTagging to retrieve + * the tag set associated with an object.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + TagCount?: number | undefined; + /** + *

The Object Lock mode that's currently in place for this object.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ObjectLockMode?: ObjectLockMode | undefined; + /** + *

The date and time when this object's Object Lock will expire.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ObjectLockRetainUntilDate?: Date | undefined; + /** + *

Indicates whether this object has an active legal hold. This field is only returned if + * you have permission to view an object's legal hold status.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ObjectLockLegalHoldStatus?: ObjectLockLegalHoldStatus | undefined; +} +/** + * @public + * @enum + */ +export declare const ChecksumMode: { + readonly ENABLED: "ENABLED"; +}; +/** + * @public + */ +export type ChecksumMode = (typeof ChecksumMode)[keyof typeof ChecksumMode]; +/** + * @public + */ +export interface GetObjectRequest { + /** + *

The bucket name containing the object.

+ *

+ * Directory buckets - + * When you use this operation with a directory bucket, you must use virtual-hosted-style requests in the format + * Bucket-name.s3express-zone-id.region-code.amazonaws.com. Path-style requests are not supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * amzn-s3-demo-bucket--usw2-az1--x-s3). For information about bucket naming + * restrictions, see Directory bucket naming + * rules in the Amazon S3 User Guide.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ *

+ * Object Lambda access points - When you use this action with an Object Lambda access point, you must direct requests to the Object Lambda access point hostname. The Object Lambda access point hostname takes the form AccessPointName-AccountId.s3-object-lambda.Region.amazonaws.com.

+ * + *

Object Lambda access points are not supported by directory buckets.

+ *
+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Return the object only if its entity tag (ETag) is the same as the one specified in this + * header; otherwise, return a 412 Precondition Failed error.

+ *

If both of the If-Match and If-Unmodified-Since headers are + * present in the request as follows: If-Match condition evaluates to + * true, and; If-Unmodified-Since condition evaluates to + * false; then, S3 returns 200 OK and the data requested.

+ *

For more information about conditional requests, see RFC 7232.

+ * @public + */ + IfMatch?: string | undefined; + /** + *

Return the object only if it has been modified since the specified time; otherwise, + * return a 304 Not Modified error.

+ *

If both of the If-None-Match and If-Modified-Since headers are + * present in the request as follows: If-None-Match condition evaluates to + * false, and; If-Modified-Since condition evaluates to + * true; then, S3 returns 304 Not Modified status code.

+ *

For more information about conditional requests, see RFC 7232.

+ * @public + */ + IfModifiedSince?: Date | undefined; + /** + *

Return the object only if its entity tag (ETag) is different from the one specified in + * this header; otherwise, return a 304 Not Modified error.

+ *

If both of the If-None-Match and If-Modified-Since headers are + * present in the request as follows: If-None-Match condition evaluates to + * false, and; If-Modified-Since condition evaluates to + * true; then, S3 returns 304 Not Modified HTTP status + * code.

+ *

For more information about conditional requests, see RFC 7232.

+ * @public + */ + IfNoneMatch?: string | undefined; + /** + *

Return the object only if it has not been modified since the specified time; otherwise, + * return a 412 Precondition Failed error.

+ *

If both of the If-Match and If-Unmodified-Since headers are + * present in the request as follows: If-Match condition evaluates to + * true, and; If-Unmodified-Since condition evaluates to + * false; then, S3 returns 200 OK and the data requested.

+ *

For more information about conditional requests, see RFC 7232.

+ * @public + */ + IfUnmodifiedSince?: Date | undefined; + /** + *

Key of the object to get.

+ * @public + */ + Key: string | undefined; + /** + *

Downloads the specified byte range of an object. For more information about the HTTP + * Range header, see https://www.rfc-editor.org/rfc/rfc9110.html#name-range.

+ * + *

Amazon S3 doesn't support retrieving multiple ranges of data per GET + * request.

+ *
+ * @public + */ + Range?: string | undefined; + /** + *

Sets the Cache-Control header of the response.

+ * @public + */ + ResponseCacheControl?: string | undefined; + /** + *

Sets the Content-Disposition header of the response.

+ * @public + */ + ResponseContentDisposition?: string | undefined; + /** + *

Sets the Content-Encoding header of the response.

+ * @public + */ + ResponseContentEncoding?: string | undefined; + /** + *

Sets the Content-Language header of the response.

+ * @public + */ + ResponseContentLanguage?: string | undefined; + /** + *

Sets the Content-Type header of the response.

+ * @public + */ + ResponseContentType?: string | undefined; + /** + *

Sets the Expires header of the response.

+ * @public + */ + ResponseExpires?: Date | undefined; + /** + *

Version ID used to reference a specific version of the object.

+ *

By default, the GetObject operation returns the current version of an + * object. To return a different version, use the versionId subresource.

+ * + *
    + *
  • + *

    If you include a versionId in your request header, you must have + * the s3:GetObjectVersion permission to access a specific version of an + * object. The s3:GetObject permission is not required in this + * scenario.

    + *
  • + *
  • + *

    If you request the current version of an object without a specific + * versionId in the request header, only the + * s3:GetObject permission is required. The + * s3:GetObjectVersion permission is not required in this + * scenario.

    + *
  • + *
  • + *

    + * Directory buckets - + * S3 Versioning isn't enabled and supported for directory buckets. For this API operation, only the null value of the version ID is supported by directory buckets. You can only specify null to the + * versionId query parameter in the request.

    + *
  • + *
+ *
+ *

For more information about versioning, see PutBucketVersioning.

+ * @public + */ + VersionId?: string | undefined; + /** + *

Specifies the algorithm to use when decrypting the object (for example, + * AES256).

+ *

If you encrypt an object by using server-side encryption with customer-provided + * encryption keys (SSE-C) when you store the object in Amazon S3, then when you GET the object, + * you must use the following headers:

+ *
    + *
  • + *

    + * x-amz-server-side-encryption-customer-algorithm + *

    + *
  • + *
  • + *

    + * x-amz-server-side-encryption-customer-key + *

    + *
  • + *
  • + *

    + * x-amz-server-side-encryption-customer-key-MD5 + *

    + *
  • + *
+ *

For more information about SSE-C, see Server-Side Encryption + * (Using Customer-Provided Encryption Keys) in the + * Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerAlgorithm?: string | undefined; + /** + *

Specifies the customer-provided encryption key that you originally provided for Amazon S3 to + * encrypt the data before storing it. This value is used to decrypt the object when + * recovering it and must match the one used when storing the data. The key must be + * appropriate for use with the algorithm specified in the + * x-amz-server-side-encryption-customer-algorithm header.

+ *

If you encrypt an object by using server-side encryption with customer-provided + * encryption keys (SSE-C) when you store the object in Amazon S3, then when you GET the object, + * you must use the following headers:

+ *
    + *
  • + *

    + * x-amz-server-side-encryption-customer-algorithm + *

    + *
  • + *
  • + *

    + * x-amz-server-side-encryption-customer-key + *

    + *
  • + *
  • + *

    + * x-amz-server-side-encryption-customer-key-MD5 + *

    + *
  • + *
+ *

For more information about SSE-C, see Server-Side Encryption + * (Using Customer-Provided Encryption Keys) in the + * Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKey?: string | undefined; + /** + *

Specifies the 128-bit MD5 digest of the customer-provided encryption key according to + * RFC 1321. Amazon S3 uses this header for a message integrity check to ensure that the encryption + * key was transmitted without error.

+ *

If you encrypt an object by using server-side encryption with customer-provided + * encryption keys (SSE-C) when you store the object in Amazon S3, then when you GET the object, + * you must use the following headers:

+ *
    + *
  • + *

    + * x-amz-server-side-encryption-customer-algorithm + *

    + *
  • + *
  • + *

    + * x-amz-server-side-encryption-customer-key + *

    + *
  • + *
  • + *

    + * x-amz-server-side-encryption-customer-key-MD5 + *

    + *
  • + *
+ *

For more information about SSE-C, see Server-Side Encryption + * (Using Customer-Provided Encryption Keys) in the + * Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKeyMD5?: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

Part number of the object being read. This is a positive integer between 1 and 10,000. + * Effectively performs a 'ranged' GET request for the part specified. Useful for downloading + * just a part of an object.

+ * @public + */ + PartNumber?: number | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

To retrieve the checksum, this mode must be enabled.

+ * @public + */ + ChecksumMode?: ChecksumMode | undefined; +} +/** + *

Object is archived and inaccessible until restored.

+ *

If the object you are retrieving is stored in the S3 Glacier Flexible Retrieval storage + * class, the S3 Glacier Deep Archive storage class, the S3 Intelligent-Tiering Archive Access + * tier, or the S3 Intelligent-Tiering Deep Archive Access tier, before you can retrieve the object you + * must first restore a copy using RestoreObject. Otherwise, this + * operation returns an InvalidObjectState error. For information about restoring + * archived objects, see Restoring Archived Objects in + * the Amazon S3 User Guide.

+ * @public + */ +export declare class InvalidObjectState extends __BaseException { + readonly name: "InvalidObjectState"; + readonly $fault: "client"; + StorageClass?: StorageClass | undefined; + AccessTier?: IntelligentTieringAccessTier | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The specified key does not exist.

+ * @public + */ +export declare class NoSuchKey extends __BaseException { + readonly name: "NoSuchKey"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface GetObjectAclOutput { + /** + *

Container for the bucket owner's display name and ID.

+ * @public + */ + Owner?: Owner | undefined; + /** + *

A list of grants.

+ * @public + */ + Grants?: Grant[] | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; +} +/** + * @public + */ +export interface GetObjectAclRequest { + /** + *

The bucket name that contains the object for which to get the ACL information.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The key of the object for which to get the ACL information.

+ * @public + */ + Key: string | undefined; + /** + *

Version ID used to reference a specific version of the object.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + VersionId?: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

Contains all the possible checksum or digest values for an object.

+ * @public + */ +export interface Checksum { + /** + *

The Base64 encoded, 32-bit CRC32 checksum of the object. This checksum is only be present if the checksum was uploaded + * with the object. When you use an API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32?: string | undefined; + /** + *

The Base64 encoded, 32-bit CRC32C checksum of the object. This checksum is only present if the checksum was uploaded + * with the object. When you use an API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32C?: string | undefined; + /** + *

The Base64 encoded, 64-bit CRC64NVME checksum of the object. This checksum is present + * if the object was uploaded with the CRC64NVME checksum algorithm, or if the object was uploaded without a + * checksum (and Amazon S3 added the default checksum, CRC64NVME, to the uploaded object). For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC64NVME?: string | undefined; + /** + *

The Base64 encoded, 160-bit SHA1 digest of the object. This will only be present if the object was uploaded + * with the object. When you use the API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA1?: string | undefined; + /** + *

The Base64 encoded, 256-bit SHA256 digest of the object. This will only be present if the object was uploaded + * with the object. When you use an API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA256?: string | undefined; + /** + *

The checksum type that is used to calculate the object’s + * checksum value. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumType?: ChecksumType | undefined; +} +/** + *

A container for elements related to an individual part.

+ * @public + */ +export interface ObjectPart { + /** + *

The part number identifying the part. This value is a positive integer between 1 and + * 10,000.

+ * @public + */ + PartNumber?: number | undefined; + /** + *

The size of the uploaded part in bytes.

+ * @public + */ + Size?: number | undefined; + /** + *

The Base64 encoded, 32-bit CRC32 checksum of the part. This checksum is present + * if the multipart upload request was created with the CRC32 checksum algorithm. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32?: string | undefined; + /** + *

The Base64 encoded, 32-bit CRC32C checksum of the part. This checksum is present + * if the multipart upload request was created with the CRC32C checksum algorithm. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32C?: string | undefined; + /** + *

The Base64 encoded, 64-bit CRC64NVME checksum of the part. This checksum is present + * if the multipart upload request was created with the CRC64NVME checksum algorithm, or if the object was uploaded without a + * checksum (and Amazon S3 added the default checksum, CRC64NVME, to the uploaded object). For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC64NVME?: string | undefined; + /** + *

The Base64 encoded, 160-bit SHA1 checksum of the part. This checksum is present + * if the multipart upload request was created with the SHA1 checksum algorithm. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA1?: string | undefined; + /** + *

The Base64 encoded, 256-bit SHA256 checksum of the part. This checksum is present + * if the multipart upload request was created with the SHA256 checksum algorithm. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA256?: string | undefined; +} +/** + *

A collection of parts associated with a multipart upload.

+ * @public + */ +export interface GetObjectAttributesParts { + /** + *

The total number of parts.

+ * @public + */ + TotalPartsCount?: number | undefined; + /** + *

The marker for the current part.

+ * @public + */ + PartNumberMarker?: string | undefined; + /** + *

When a list is truncated, this element specifies the last part in the list, as well as + * the value to use for the PartNumberMarker request parameter in a subsequent + * request.

+ * @public + */ + NextPartNumberMarker?: string | undefined; + /** + *

The maximum number of parts allowed in the response.

+ * @public + */ + MaxParts?: number | undefined; + /** + *

Indicates whether the returned list of parts is truncated. A value of true + * indicates that the list was truncated. A list can be truncated if the number of parts + * exceeds the limit returned in the MaxParts element.

+ * @public + */ + IsTruncated?: boolean | undefined; + /** + *

A container for elements related to a particular part. A response can contain zero or + * more Parts elements.

+ * + *
    + *
  • + *

    + * General purpose buckets - For + * GetObjectAttributes, if a additional checksum (including + * x-amz-checksum-crc32, x-amz-checksum-crc32c, + * x-amz-checksum-sha1, or x-amz-checksum-sha256) isn't + * applied to the object specified in the request, the response doesn't return + * Part.

    + *
  • + *
  • + *

    + * Directory buckets - For + * GetObjectAttributes, no matter whether a additional checksum is + * applied to the object specified in the request, the response returns + * Part.

    + *
  • + *
+ *
+ * @public + */ + Parts?: ObjectPart[] | undefined; +} +/** + * @public + */ +export interface GetObjectAttributesOutput { + /** + *

Specifies whether the object retrieved was (true) or was not + * (false) a delete marker. If false, this response header does + * not appear in the response. To learn more about delete markers, see Working with delete markers.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + DeleteMarker?: boolean | undefined; + /** + *

Date and time when the object was last modified.

+ * @public + */ + LastModified?: Date | undefined; + /** + *

The version ID of the object.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + VersionId?: string | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; + /** + *

An ETag is an opaque identifier assigned by a web server to a specific version of a + * resource found at a URL.

+ * @public + */ + ETag?: string | undefined; + /** + *

The checksum or digest of the object.

+ * @public + */ + Checksum?: Checksum | undefined; + /** + *

A collection of parts associated with a multipart upload.

+ * @public + */ + ObjectParts?: GetObjectAttributesParts | undefined; + /** + *

Provides the storage class information of the object. Amazon S3 returns this header for all + * objects except for S3 Standard storage class objects.

+ *

For more information, see Storage Classes.

+ * + *

+ * Directory buckets - + * Directory buckets only support EXPRESS_ONEZONE (the S3 Express One Zone storage class) in Availability Zones and ONEZONE_IA (the S3 One Zone-Infrequent Access storage class) in Dedicated Local Zones.

+ *
+ * @public + */ + StorageClass?: StorageClass | undefined; + /** + *

The size of the object in bytes.

+ * @public + */ + ObjectSize?: number | undefined; +} +/** + * @public + * @enum + */ +export declare const ObjectAttributes: { + readonly CHECKSUM: "Checksum"; + readonly ETAG: "ETag"; + readonly OBJECT_PARTS: "ObjectParts"; + readonly OBJECT_SIZE: "ObjectSize"; + readonly STORAGE_CLASS: "StorageClass"; +}; +/** + * @public + */ +export type ObjectAttributes = (typeof ObjectAttributes)[keyof typeof ObjectAttributes]; +/** + * @public + */ +export interface GetObjectAttributesRequest { + /** + *

The name of the bucket that contains the object.

+ *

+ * Directory buckets - + * When you use this operation with a directory bucket, you must use virtual-hosted-style requests in the format + * Bucket-name.s3express-zone-id.region-code.amazonaws.com. Path-style requests are not supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * amzn-s3-demo-bucket--usw2-az1--x-s3). For information about bucket naming + * restrictions, see Directory bucket naming + * rules in the Amazon S3 User Guide.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ * + *

Object Lambda access points are not supported by directory buckets.

+ *
+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The object key.

+ * @public + */ + Key: string | undefined; + /** + *

The version ID used to reference a specific version of the object.

+ * + *

S3 Versioning isn't enabled and supported for directory buckets. For this API operation, only the null value of the version ID is supported by directory buckets. You can only specify null to the + * versionId query parameter in the request.

+ *
+ * @public + */ + VersionId?: string | undefined; + /** + *

Sets the maximum number of parts to return.

+ * @public + */ + MaxParts?: number | undefined; + /** + *

Specifies the part after which listing should begin. Only parts with higher part numbers + * will be listed.

+ * @public + */ + PartNumberMarker?: string | undefined; + /** + *

Specifies the algorithm to use when encrypting the object (for example, AES256).

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerAlgorithm?: string | undefined; + /** + *

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This + * value is used to store the object and then it is discarded; Amazon S3 does not store the + * encryption key. The key must be appropriate for use with the algorithm specified in the + * x-amz-server-side-encryption-customer-algorithm header.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKey?: string | undefined; + /** + *

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses + * this header for a message integrity check to ensure that the encryption key was transmitted + * without error.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKeyMD5?: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

Specifies the fields at the root level that you want returned in the response. Fields + * that you do not specify are not returned.

+ * @public + */ + ObjectAttributes: ObjectAttributes[] | undefined; +} +/** + *

A legal hold configuration for an object.

+ * @public + */ +export interface ObjectLockLegalHold { + /** + *

Indicates whether the specified object has a legal hold in place.

+ * @public + */ + Status?: ObjectLockLegalHoldStatus | undefined; +} +/** + * @public + */ +export interface GetObjectLegalHoldOutput { + /** + *

The current legal hold status for the specified object.

+ * @public + */ + LegalHold?: ObjectLockLegalHold | undefined; +} +/** + * @public + */ +export interface GetObjectLegalHoldRequest { + /** + *

The bucket name containing the object whose legal hold status you want to retrieve.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The key name for the object whose legal hold status you want to retrieve.

+ * @public + */ + Key: string | undefined; + /** + *

The version ID of the object whose legal hold status you want to retrieve.

+ * @public + */ + VersionId?: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const ObjectLockEnabled: { + readonly Enabled: "Enabled"; +}; +/** + * @public + */ +export type ObjectLockEnabled = (typeof ObjectLockEnabled)[keyof typeof ObjectLockEnabled]; +/** + * @public + * @enum + */ +export declare const ObjectLockRetentionMode: { + readonly COMPLIANCE: "COMPLIANCE"; + readonly GOVERNANCE: "GOVERNANCE"; +}; +/** + * @public + */ +export type ObjectLockRetentionMode = (typeof ObjectLockRetentionMode)[keyof typeof ObjectLockRetentionMode]; +/** + *

The container element for optionally specifying the default Object Lock retention + * settings for new objects placed in the specified bucket.

+ * + *
    + *
  • + *

    The DefaultRetention settings require both a mode and a + * period.

    + *
  • + *
  • + *

    The DefaultRetention period can be either Days or + * Years but you must select one. You cannot specify + * Days and Years at the same time.

    + *
  • + *
+ *
+ * @public + */ +export interface DefaultRetention { + /** + *

The default Object Lock retention mode you want to apply to new objects placed in the + * specified bucket. Must be used with either Days or Years.

+ * @public + */ + Mode?: ObjectLockRetentionMode | undefined; + /** + *

The number of days that you want to specify for the default retention period. Must be + * used with Mode.

+ * @public + */ + Days?: number | undefined; + /** + *

The number of years that you want to specify for the default retention period. Must be + * used with Mode.

+ * @public + */ + Years?: number | undefined; +} +/** + *

The container element for an Object Lock rule.

+ * @public + */ +export interface ObjectLockRule { + /** + *

The default Object Lock retention mode and period that you want to apply to new objects + * placed in the specified bucket. Bucket settings require both a mode and a period. The + * period can be either Days or Years but you must select one. You + * cannot specify Days and Years at the same time.

+ * @public + */ + DefaultRetention?: DefaultRetention | undefined; +} +/** + *

The container element for Object Lock configuration parameters.

+ * @public + */ +export interface ObjectLockConfiguration { + /** + *

Indicates whether this bucket has an Object Lock configuration enabled. Enable + * ObjectLockEnabled when you apply ObjectLockConfiguration to a + * bucket.

+ * @public + */ + ObjectLockEnabled?: ObjectLockEnabled | undefined; + /** + *

Specifies the Object Lock rule for the specified object. Enable the this rule when you + * apply ObjectLockConfiguration to a bucket. Bucket settings require both a mode + * and a period. The period can be either Days or Years but you must + * select one. You cannot specify Days and Years at the same + * time.

+ * @public + */ + Rule?: ObjectLockRule | undefined; +} +/** + * @public + */ +export interface GetObjectLockConfigurationOutput { + /** + *

The specified bucket's Object Lock configuration.

+ * @public + */ + ObjectLockConfiguration?: ObjectLockConfiguration | undefined; +} +/** + * @public + */ +export interface GetObjectLockConfigurationRequest { + /** + *

The bucket whose Object Lock configuration you want to retrieve.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

A Retention configuration for an object.

+ * @public + */ +export interface ObjectLockRetention { + /** + *

Indicates the Retention mode for the specified object.

+ * @public + */ + Mode?: ObjectLockRetentionMode | undefined; + /** + *

The date on which this Object Lock Retention will expire.

+ * @public + */ + RetainUntilDate?: Date | undefined; +} +/** + * @public + */ +export interface GetObjectRetentionOutput { + /** + *

The container element for an object's retention settings.

+ * @public + */ + Retention?: ObjectLockRetention | undefined; +} +/** + * @public + */ +export interface GetObjectRetentionRequest { + /** + *

The bucket name containing the object whose retention settings you want to retrieve.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The key name for the object whose retention settings you want to retrieve.

+ * @public + */ + Key: string | undefined; + /** + *

The version ID for the object whose retention settings you want to retrieve.

+ * @public + */ + VersionId?: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface GetObjectTaggingOutput { + /** + *

The versionId of the object for which you got the tagging information.

+ * @public + */ + VersionId?: string | undefined; + /** + *

Contains the tag set.

+ * @public + */ + TagSet: Tag[] | undefined; +} +/** + * @public + */ +export interface GetObjectTaggingRequest { + /** + *

The bucket name containing the object for which to get the tagging information.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Object key for which to get the tagging information.

+ * @public + */ + Key: string | undefined; + /** + *

The versionId of the object for which to get the tagging information.

+ * @public + */ + VersionId?: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; +} +/** + * @public + */ +export interface GetObjectTorrentOutput { + /** + *

A Bencoded dictionary as defined by the BitTorrent specification

+ * @public + */ + Body?: StreamingBlobTypes | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; +} +/** + * @public + */ +export interface GetObjectTorrentRequest { + /** + *

The name of the bucket containing the object for which to get the torrent files.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The object key for which to get the information.

+ * @public + */ + Key: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

The PublicAccessBlock configuration that you want to apply to this Amazon S3 bucket. You can + * enable the configuration options in any combination. For more information about when Amazon S3 + * considers a bucket or object public, see The Meaning of "Public" in the Amazon S3 User Guide.

+ * @public + */ +export interface PublicAccessBlockConfiguration { + /** + *

Specifies whether Amazon S3 should block public access control lists (ACLs) for this bucket + * and objects in this bucket. Setting this element to TRUE causes the following + * behavior:

+ *
    + *
  • + *

    PUT Bucket ACL and PUT Object ACL calls fail if the specified ACL is + * public.

    + *
  • + *
  • + *

    PUT Object calls fail if the request includes a public ACL.

    + *
  • + *
  • + *

    PUT Bucket calls fail if the request includes a public ACL.

    + *
  • + *
+ *

Enabling this setting doesn't affect existing policies or ACLs.

+ * @public + */ + BlockPublicAcls?: boolean | undefined; + /** + *

Specifies whether Amazon S3 should ignore public ACLs for this bucket and objects in this + * bucket. Setting this element to TRUE causes Amazon S3 to ignore all public ACLs on + * this bucket and objects in this bucket.

+ *

Enabling this setting doesn't affect the persistence of any existing ACLs and doesn't + * prevent new public ACLs from being set.

+ * @public + */ + IgnorePublicAcls?: boolean | undefined; + /** + *

Specifies whether Amazon S3 should block public bucket policies for this bucket. Setting this + * element to TRUE causes Amazon S3 to reject calls to PUT Bucket policy if the + * specified bucket policy allows public access.

+ *

Enabling this setting doesn't affect existing bucket policies.

+ * @public + */ + BlockPublicPolicy?: boolean | undefined; + /** + *

Specifies whether Amazon S3 should restrict public bucket policies for this bucket. Setting + * this element to TRUE restricts access to this bucket to only Amazon Web Services service principals and authorized users within this account if the bucket has + * a public policy.

+ *

Enabling this setting doesn't affect previously stored bucket policies, except that + * public and cross-account access within any public bucket policy, including non-public + * delegation to specific accounts, is blocked.

+ * @public + */ + RestrictPublicBuckets?: boolean | undefined; +} +/** + * @public + */ +export interface GetPublicAccessBlockOutput { + /** + *

The PublicAccessBlock configuration currently in effect for this Amazon S3 + * bucket.

+ * @public + */ + PublicAccessBlockConfiguration?: PublicAccessBlockConfiguration | undefined; +} +/** + * @public + */ +export interface GetPublicAccessBlockRequest { + /** + *

The name of the Amazon S3 bucket whose PublicAccessBlock configuration you want + * to retrieve.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface HeadBucketOutput { + /** + *

The type of location where the bucket is created.

+ * + *

This functionality is only supported by directory buckets.

+ *
+ * @public + */ + BucketLocationType?: LocationType | undefined; + /** + *

The name of the location where the bucket will be created.

+ *

For directory buckets, the Zone ID of the Availability Zone or the Local Zone where the bucket is created. An example Zone ID value for an Availability Zone is usw2-az1.

+ * + *

This functionality is only supported by directory buckets.

+ *
+ * @public + */ + BucketLocationName?: string | undefined; + /** + *

The Region that the bucket is located.

+ * @public + */ + BucketRegion?: string | undefined; + /** + *

Indicates whether the bucket name used in the request is an access point alias.

+ * + *

For directory buckets, the value of this field is false.

+ *
+ * @public + */ + AccessPointAlias?: boolean | undefined; +} +/** + * @public + */ +export interface HeadBucketRequest { + /** + *

The bucket name.

+ *

+ * Directory buckets - + * When you use this operation with a directory bucket, you must use virtual-hosted-style requests in the format + * Bucket-name.s3express-zone-id.region-code.amazonaws.com. Path-style requests are not supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * amzn-s3-demo-bucket--usw2-az1--x-s3). For information about bucket naming + * restrictions, see Directory bucket naming + * rules in the Amazon S3 User Guide.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ *

+ * Object Lambda access points - When you use this API operation with an Object Lambda access point, provide the alias of the Object Lambda access point in place of the bucket name. + * If the Object Lambda access point alias in a request is not valid, the error code InvalidAccessPointAliasError is returned. + * For more information about InvalidAccessPointAliasError, see List of + * Error Codes.

+ * + *

Object Lambda access points are not supported by directory buckets.

+ *
+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

The specified content does not exist.

+ * @public + */ +export declare class NotFound extends __BaseException { + readonly name: "NotFound"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + * @enum + */ +export declare const ArchiveStatus: { + readonly ARCHIVE_ACCESS: "ARCHIVE_ACCESS"; + readonly DEEP_ARCHIVE_ACCESS: "DEEP_ARCHIVE_ACCESS"; +}; +/** + * @public + */ +export type ArchiveStatus = (typeof ArchiveStatus)[keyof typeof ArchiveStatus]; +/** + * @public + */ +export interface HeadObjectOutput { + /** + *

Specifies whether the object retrieved was (true) or was not (false) a Delete Marker. If + * false, this response header does not appear in the response.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + DeleteMarker?: boolean | undefined; + /** + *

Indicates that a range of bytes was specified.

+ * @public + */ + AcceptRanges?: string | undefined; + /** + *

If the object expiration is configured (see + * PutBucketLifecycleConfiguration + * ), the response includes this + * header. It includes the expiry-date and rule-id key-value pairs + * providing object expiration information. The value of the rule-id is + * URL-encoded.

+ * + *

Object expiration information is not returned in directory buckets and this header returns the value "NotImplemented" in all responses for directory buckets.

+ *
+ * @public + */ + Expiration?: string | undefined; + /** + *

If the object is an archived object (an object whose storage class is GLACIER), the + * response includes this header if either the archive restoration is in progress (see RestoreObject or an archive copy is already restored.

+ *

If an archive copy is already restored, the header value indicates when Amazon S3 is + * scheduled to delete the object copy. For example:

+ *

+ * x-amz-restore: ongoing-request="false", expiry-date="Fri, 21 Dec 2012 00:00:00 + * GMT" + *

+ *

If the object restoration is in progress, the header returns the value + * ongoing-request="true".

+ *

For more information about archiving objects, see Transitioning Objects: General Considerations.

+ * + *

This functionality is not supported for directory buckets. + * Directory buckets only support EXPRESS_ONEZONE (the S3 Express One Zone storage class) in Availability Zones and ONEZONE_IA (the S3 One Zone-Infrequent Access storage class) in Dedicated Local Zones.

+ *
+ * @public + */ + Restore?: string | undefined; + /** + *

The archive state of the head object.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ArchiveStatus?: ArchiveStatus | undefined; + /** + *

Date and time when the object was last modified.

+ * @public + */ + LastModified?: Date | undefined; + /** + *

Size of the body in bytes.

+ * @public + */ + ContentLength?: number | undefined; + /** + *

The Base64 encoded, 32-bit CRC32 checksum of the object. This checksum is only be present if the checksum was uploaded + * with the object. When you use an API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32?: string | undefined; + /** + *

The Base64 encoded, 32-bit CRC32C checksum of the object. This checksum is only present if the checksum was uploaded + * with the object. When you use an API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32C?: string | undefined; + /** + *

The Base64 encoded, 64-bit CRC64NVME checksum of the object. For more + * information, see Checking object integrity + * in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC64NVME?: string | undefined; + /** + *

The Base64 encoded, 160-bit SHA1 digest of the object. This will only be present if the object was uploaded + * with the object. When you use the API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA1?: string | undefined; + /** + *

The Base64 encoded, 256-bit SHA256 digest of the object. This will only be present if the object was uploaded + * with the object. When you use an API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA256?: string | undefined; + /** + *

The checksum type, which determines how part-level checksums are combined to create an + * object-level checksum for multipart objects. You can use this header response to verify + * that the checksum type that is received is the same checksum type that was specified in + * CreateMultipartUpload request. For more + * information, see Checking object integrity + * in the Amazon S3 User Guide.

+ * @public + */ + ChecksumType?: ChecksumType | undefined; + /** + *

An entity tag (ETag) is an opaque identifier assigned by a web server to a specific + * version of a resource found at a URL.

+ * @public + */ + ETag?: string | undefined; + /** + *

This is set to the number of metadata entries not returned in x-amz-meta + * headers. This can happen if you create metadata using an API like SOAP that supports more + * flexible metadata than the REST API. For example, using SOAP, you can create metadata whose + * values are not legal HTTP headers.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + MissingMeta?: number | undefined; + /** + *

Version ID of the object.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + VersionId?: string | undefined; + /** + *

Specifies caching behavior along the request/reply chain.

+ * @public + */ + CacheControl?: string | undefined; + /** + *

Specifies presentational information for the object.

+ * @public + */ + ContentDisposition?: string | undefined; + /** + *

Indicates what content encodings have been applied to the object and thus what decoding + * mechanisms must be applied to obtain the media-type referenced by the Content-Type header + * field.

+ * @public + */ + ContentEncoding?: string | undefined; + /** + *

The language the content is in.

+ * @public + */ + ContentLanguage?: string | undefined; + /** + *

A standard MIME type describing the format of the object data.

+ * @public + */ + ContentType?: string | undefined; + /** + *

The portion of the object returned in the response for a GET request.

+ * @public + */ + ContentRange?: string | undefined; + /** + * Deprecated in favor of ExpiresString. + * + * @deprecated + * @public + */ + Expires?: Date | undefined; + /** + *

The date and time at which the object is no longer cacheable.

+ * @public + */ + ExpiresString?: string | undefined; + /** + *

If the bucket is configured as a website, redirects requests for this object to another + * object in the same bucket or to an external URL. Amazon S3 stores the value of this header in + * the object metadata.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + WebsiteRedirectLocation?: string | undefined; + /** + *

The server-side encryption algorithm used when you store this object in Amazon S3 (for + * example, AES256, aws:kms, aws:kms:dsse).

+ * @public + */ + ServerSideEncryption?: ServerSideEncryption | undefined; + /** + *

A map of metadata to store with the object in S3.

+ * @public + */ + Metadata?: Record | undefined; + /** + *

If server-side encryption with a customer-provided encryption key was requested, the + * response will include this header to confirm the encryption algorithm that's used.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerAlgorithm?: string | undefined; + /** + *

If server-side encryption with a customer-provided encryption key was requested, the + * response will include this header to provide the round-trip message integrity verification + * of the customer-provided encryption key.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKeyMD5?: string | undefined; + /** + *

If present, indicates the ID of the KMS key that was used for object encryption.

+ * @public + */ + SSEKMSKeyId?: string | undefined; + /** + *

Indicates whether the object uses an S3 Bucket Key for server-side encryption with + * Key Management Service (KMS) keys (SSE-KMS).

+ * @public + */ + BucketKeyEnabled?: boolean | undefined; + /** + *

Provides storage class information of the object. Amazon S3 returns this header for all + * objects except for S3 Standard storage class objects.

+ *

For more information, see Storage Classes.

+ * + *

+ * Directory buckets - + * Directory buckets only support EXPRESS_ONEZONE (the S3 Express One Zone storage class) in Availability Zones and ONEZONE_IA (the S3 One Zone-Infrequent Access storage class) in Dedicated Local Zones.

+ *
+ * @public + */ + StorageClass?: StorageClass | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; + /** + *

Amazon S3 can return this header if your request involves a bucket that is either a source or + * a destination in a replication rule.

+ *

In replication, you have a source bucket on which you configure replication and + * destination bucket or buckets where Amazon S3 stores object replicas. When you request an object + * (GetObject) or object metadata (HeadObject) from these + * buckets, Amazon S3 will return the x-amz-replication-status header in the response + * as follows:

+ *
    + *
  • + *

    + * If requesting an object from the source bucket, + * Amazon S3 will return the x-amz-replication-status header if the object in + * your request is eligible for replication.

    + *

    For example, suppose that in your replication configuration, you specify object + * prefix TaxDocs requesting Amazon S3 to replicate objects with key prefix + * TaxDocs. Any objects you upload with this key name prefix, for + * example TaxDocs/document1.pdf, are eligible for replication. For any + * object request with this key name prefix, Amazon S3 will return the + * x-amz-replication-status header with value PENDING, COMPLETED or + * FAILED indicating object replication status.

    + *
  • + *
  • + *

    + * If requesting an object from a destination + * bucket, Amazon S3 will return the x-amz-replication-status header + * with value REPLICA if the object in your request is a replica that Amazon S3 created and + * there is no replica modification replication in progress.

    + *
  • + *
  • + *

    + * When replicating objects to multiple destination + * buckets, the x-amz-replication-status header acts + * differently. The header of the source object will only return a value of COMPLETED + * when replication is successful to all destinations. The header will remain at value + * PENDING until replication has completed for all destinations. If one or more + * destinations fails replication the header will return FAILED.

    + *
  • + *
+ *

For more information, see Replication.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ReplicationStatus?: ReplicationStatus | undefined; + /** + *

The count of parts this object has. This value is only returned if you specify + * partNumber in your request and the object was uploaded as a multipart + * upload.

+ * @public + */ + PartsCount?: number | undefined; + /** + *

The Object Lock mode, if any, that's in effect for this object. This header is only + * returned if the requester has the s3:GetObjectRetention permission. For more + * information about S3 Object Lock, see Object Lock.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ObjectLockMode?: ObjectLockMode | undefined; + /** + *

The date and time when the Object Lock retention period expires. This header is only + * returned if the requester has the s3:GetObjectRetention permission.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ObjectLockRetainUntilDate?: Date | undefined; + /** + *

Specifies whether a legal hold is in effect for this object. This header is only + * returned if the requester has the s3:GetObjectLegalHold permission. This + * header is not returned if the specified version of this object has never had a legal hold + * applied. For more information about S3 Object Lock, see Object Lock.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ObjectLockLegalHoldStatus?: ObjectLockLegalHoldStatus | undefined; +} +/** + * @public + */ +export interface HeadObjectRequest { + /** + *

The name of the bucket that contains the object.

+ *

+ * Directory buckets - + * When you use this operation with a directory bucket, you must use virtual-hosted-style requests in the format + * Bucket-name.s3express-zone-id.region-code.amazonaws.com. Path-style requests are not supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * amzn-s3-demo-bucket--usw2-az1--x-s3). For information about bucket naming + * restrictions, see Directory bucket naming + * rules in the Amazon S3 User Guide.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ * + *

Object Lambda access points are not supported by directory buckets.

+ *
+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Return the object only if its entity tag (ETag) is the same as the one specified; + * otherwise, return a 412 (precondition failed) error.

+ *

If both of the If-Match and If-Unmodified-Since headers are + * present in the request as follows:

+ *
    + *
  • + *

    + * If-Match condition evaluates to true, and;

    + *
  • + *
  • + *

    + * If-Unmodified-Since condition evaluates to false;

    + *
  • + *
+ *

Then Amazon S3 returns 200 OK and the data requested.

+ *

For more information about conditional requests, see RFC 7232.

+ * @public + */ + IfMatch?: string | undefined; + /** + *

Return the object only if it has been modified since the specified time; otherwise, + * return a 304 (not modified) error.

+ *

If both of the If-None-Match and If-Modified-Since headers are + * present in the request as follows:

+ *
    + *
  • + *

    + * If-None-Match condition evaluates to false, and;

    + *
  • + *
  • + *

    + * If-Modified-Since condition evaluates to true;

    + *
  • + *
+ *

Then Amazon S3 returns the 304 Not Modified response code.

+ *

For more information about conditional requests, see RFC 7232.

+ * @public + */ + IfModifiedSince?: Date | undefined; + /** + *

Return the object only if its entity tag (ETag) is different from the one specified; + * otherwise, return a 304 (not modified) error.

+ *

If both of the If-None-Match and If-Modified-Since headers are + * present in the request as follows:

+ *
    + *
  • + *

    + * If-None-Match condition evaluates to false, and;

    + *
  • + *
  • + *

    + * If-Modified-Since condition evaluates to true;

    + *
  • + *
+ *

Then Amazon S3 returns the 304 Not Modified response code.

+ *

For more information about conditional requests, see RFC 7232.

+ * @public + */ + IfNoneMatch?: string | undefined; + /** + *

Return the object only if it has not been modified since the specified time; otherwise, + * return a 412 (precondition failed) error.

+ *

If both of the If-Match and If-Unmodified-Since headers are + * present in the request as follows:

+ *
    + *
  • + *

    + * If-Match condition evaluates to true, and;

    + *
  • + *
  • + *

    + * If-Unmodified-Since condition evaluates to false;

    + *
  • + *
+ *

Then Amazon S3 returns 200 OK and the data requested.

+ *

For more information about conditional requests, see RFC 7232.

+ * @public + */ + IfUnmodifiedSince?: Date | undefined; + /** + *

The object key.

+ * @public + */ + Key: string | undefined; + /** + *

HeadObject returns only the metadata for an object. If the Range is satisfiable, only + * the ContentLength is affected in the response. If the Range is not + * satisfiable, S3 returns a 416 - Requested Range Not Satisfiable error.

+ * @public + */ + Range?: string | undefined; + /** + *

Sets the Cache-Control header of the response.

+ * @public + */ + ResponseCacheControl?: string | undefined; + /** + *

Sets the Content-Disposition header of the response.

+ * @public + */ + ResponseContentDisposition?: string | undefined; + /** + *

Sets the Content-Encoding header of the response.

+ * @public + */ + ResponseContentEncoding?: string | undefined; + /** + *

Sets the Content-Language header of the response.

+ * @public + */ + ResponseContentLanguage?: string | undefined; + /** + *

Sets the Content-Type header of the response.

+ * @public + */ + ResponseContentType?: string | undefined; + /** + *

Sets the Expires header of the response.

+ * @public + */ + ResponseExpires?: Date | undefined; + /** + *

Version ID used to reference a specific version of the object.

+ * + *

For directory buckets in this API operation, only the null value of the version ID is supported.

+ *
+ * @public + */ + VersionId?: string | undefined; + /** + *

Specifies the algorithm to use when encrypting the object (for example, AES256).

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerAlgorithm?: string | undefined; + /** + *

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This + * value is used to store the object and then it is discarded; Amazon S3 does not store the + * encryption key. The key must be appropriate for use with the algorithm specified in the + * x-amz-server-side-encryption-customer-algorithm header.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKey?: string | undefined; + /** + *

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses + * this header for a message integrity check to ensure that the encryption key was transmitted + * without error.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKeyMD5?: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

Part number of the object being read. This is a positive integer between 1 and 10,000. + * Effectively performs a 'ranged' HEAD request for the part specified. Useful querying about + * the size of the part and the number of parts in this object.

+ * @public + */ + PartNumber?: number | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

To retrieve the checksum, this parameter must be enabled.

+ *

+ * General purpose buckets - + * If you enable checksum mode and the object is uploaded with a + * checksum + * and encrypted with an Key Management Service (KMS) key, you must have permission to use the + * kms:Decrypt action to retrieve the checksum.

+ *

+ * Directory buckets - If you enable + * ChecksumMode and the object is encrypted with Amazon Web Services Key Management Service + * (Amazon Web Services KMS), you must also have the kms:GenerateDataKey and + * kms:Decrypt permissions in IAM identity-based policies and KMS key + * policies for the KMS key to retrieve the checksum of the object.

+ * @public + */ + ChecksumMode?: ChecksumMode | undefined; +} +/** + * @public + */ +export interface ListBucketAnalyticsConfigurationsOutput { + /** + *

Indicates whether the returned list of analytics configurations is complete. A value of + * true indicates that the list is not complete and the NextContinuationToken will be provided + * for a subsequent request.

+ * @public + */ + IsTruncated?: boolean | undefined; + /** + *

The marker that is used as a starting point for this analytics configuration list + * response. This value is present if it was sent in the request.

+ * @public + */ + ContinuationToken?: string | undefined; + /** + *

+ * NextContinuationToken is sent when isTruncated is true, which + * indicates that there are more analytics configurations to list. The next request must + * include this NextContinuationToken. The token is obfuscated and is not a + * usable value.

+ * @public + */ + NextContinuationToken?: string | undefined; + /** + *

The list of analytics configurations for a bucket.

+ * @public + */ + AnalyticsConfigurationList?: AnalyticsConfiguration[] | undefined; +} +/** + * @public + */ +export interface ListBucketAnalyticsConfigurationsRequest { + /** + *

The name of the bucket from which analytics configurations are retrieved.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The ContinuationToken that represents a placeholder from where this request + * should begin.

+ * @public + */ + ContinuationToken?: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface ListBucketIntelligentTieringConfigurationsOutput { + /** + *

Indicates whether the returned list of analytics configurations is complete. A value of + * true indicates that the list is not complete and the + * NextContinuationToken will be provided for a subsequent request.

+ * @public + */ + IsTruncated?: boolean | undefined; + /** + *

The ContinuationToken that represents a placeholder from where this request + * should begin.

+ * @public + */ + ContinuationToken?: string | undefined; + /** + *

The marker used to continue this inventory configuration listing. Use the + * NextContinuationToken from this response to continue the listing in a + * subsequent request. The continuation token is an opaque value that Amazon S3 understands.

+ * @public + */ + NextContinuationToken?: string | undefined; + /** + *

The list of S3 Intelligent-Tiering configurations for a bucket.

+ * @public + */ + IntelligentTieringConfigurationList?: IntelligentTieringConfiguration[] | undefined; +} +/** + * @public + */ +export interface ListBucketIntelligentTieringConfigurationsRequest { + /** + *

The name of the Amazon S3 bucket whose configuration you want to modify or retrieve.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The ContinuationToken that represents a placeholder from where this request + * should begin.

+ * @public + */ + ContinuationToken?: string | undefined; +} +/** + * @public + */ +export interface ListBucketInventoryConfigurationsOutput { + /** + *

If sent in the request, the marker that is used as a starting point for this inventory + * configuration list response.

+ * @public + */ + ContinuationToken?: string | undefined; + /** + *

The list of inventory configurations for a bucket.

+ * @public + */ + InventoryConfigurationList?: InventoryConfiguration[] | undefined; + /** + *

Tells whether the returned list of inventory configurations is complete. A value of true + * indicates that the list is not complete and the NextContinuationToken is provided for a + * subsequent request.

+ * @public + */ + IsTruncated?: boolean | undefined; + /** + *

The marker used to continue this inventory configuration listing. Use the + * NextContinuationToken from this response to continue the listing in a + * subsequent request. The continuation token is an opaque value that Amazon S3 understands.

+ * @public + */ + NextContinuationToken?: string | undefined; +} +/** + * @public + */ +export interface ListBucketInventoryConfigurationsRequest { + /** + *

The name of the bucket containing the inventory configurations to retrieve.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The marker used to continue an inventory configuration listing that has been truncated. + * Use the NextContinuationToken from a previously truncated list response to + * continue the listing. The continuation token is an opaque value that Amazon S3 + * understands.

+ * @public + */ + ContinuationToken?: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface ListBucketMetricsConfigurationsOutput { + /** + *

Indicates whether the returned list of metrics configurations is complete. A value of + * true indicates that the list is not complete and the NextContinuationToken will be provided + * for a subsequent request.

+ * @public + */ + IsTruncated?: boolean | undefined; + /** + *

The marker that is used as a starting point for this metrics configuration list + * response. This value is present if it was sent in the request.

+ * @public + */ + ContinuationToken?: string | undefined; + /** + *

The marker used to continue a metrics configuration listing that has been truncated. Use + * the NextContinuationToken from a previously truncated list response to + * continue the listing. The continuation token is an opaque value that Amazon S3 + * understands.

+ * @public + */ + NextContinuationToken?: string | undefined; + /** + *

The list of metrics configurations for a bucket.

+ * @public + */ + MetricsConfigurationList?: MetricsConfiguration[] | undefined; +} +/** + * @public + */ +export interface ListBucketMetricsConfigurationsRequest { + /** + *

The name of the bucket containing the metrics configurations to retrieve.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The marker that is used to continue a metrics configuration listing that has been + * truncated. Use the NextContinuationToken from a previously truncated list + * response to continue the listing. The continuation token is an opaque value that Amazon S3 + * understands.

+ * @public + */ + ContinuationToken?: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

In terms of implementation, a Bucket is a resource.

+ * @public + */ +export interface Bucket { + /** + *

The name of the bucket.

+ * @public + */ + Name?: string | undefined; + /** + *

Date the bucket was created. This date can change when making changes to your bucket, + * such as editing its bucket policy.

+ * @public + */ + CreationDate?: Date | undefined; + /** + *

+ * BucketRegion indicates the Amazon Web Services region where the bucket is located. If the + * request contains at least one valid parameter, it is included in the response.

+ * @public + */ + BucketRegion?: string | undefined; +} +/** + * @public + */ +export interface ListBucketsOutput { + /** + *

The list of buckets owned by the requester.

+ * @public + */ + Buckets?: Bucket[] | undefined; + /** + *

The owner of the buckets listed.

+ * @public + */ + Owner?: Owner | undefined; + /** + *

+ * ContinuationToken is included in the response when there are more buckets + * that can be listed with pagination. The next ListBuckets request to Amazon S3 can + * be continued with this ContinuationToken. ContinuationToken is + * obfuscated and is not a real bucket.

+ * @public + */ + ContinuationToken?: string | undefined; + /** + *

If Prefix was sent with the request, it is included in the response.

+ *

All bucket names in the response begin with the specified bucket name prefix.

+ * @public + */ + Prefix?: string | undefined; +} +/** + * @public + */ +export interface ListBucketsRequest { + /** + *

Maximum number of buckets to be returned in response. When the number is more than the + * count of buckets that are owned by an Amazon Web Services account, return all the buckets in + * response.

+ * @public + */ + MaxBuckets?: number | undefined; + /** + *

+ * ContinuationToken indicates to Amazon S3 that the list is being continued on + * this bucket with a token. ContinuationToken is obfuscated and is not a real + * key. You can use this ContinuationToken for pagination of the list results.

+ *

Length Constraints: Minimum length of 0. Maximum length of 1024.

+ *

Required: No.

+ * + *

If you specify the bucket-region, prefix, or continuation-token + * query parameters without using max-buckets to set the maximum number of buckets returned in the response, + * Amazon S3 applies a default page size of 10,000 and provides a continuation token if there are more buckets.

+ *
+ * @public + */ + ContinuationToken?: string | undefined; + /** + *

Limits the response to bucket names that begin with the specified bucket name + * prefix.

+ * @public + */ + Prefix?: string | undefined; + /** + *

Limits the response to buckets that are located in the specified Amazon Web Services Region. The Amazon Web Services + * Region must be expressed according to the Amazon Web Services Region code, such as us-west-2 + * for the US West (Oregon) Region. For a list of the valid values for all of the Amazon Web Services + * Regions, see Regions and Endpoints.

+ * + *

Requests made to a Regional endpoint that is different from the + * bucket-region parameter are not supported. For example, if you want to + * limit the response to your buckets in Region us-west-2, the request must be + * made to an endpoint in Region us-west-2.

+ *
+ * @public + */ + BucketRegion?: string | undefined; +} +/** + * @public + */ +export interface ListDirectoryBucketsOutput { + /** + *

The list of buckets owned by the requester.

+ * @public + */ + Buckets?: Bucket[] | undefined; + /** + *

If ContinuationToken was sent with the request, it is included in the + * response. You can use the returned ContinuationToken for pagination of the + * list response.

+ * @public + */ + ContinuationToken?: string | undefined; +} +/** + * @public + */ +export interface ListDirectoryBucketsRequest { + /** + *

+ * ContinuationToken indicates to Amazon S3 that the list is being continued on + * buckets in this account with a token. ContinuationToken is obfuscated and is + * not a real bucket name. You can use this ContinuationToken for the pagination + * of the list results.

+ * @public + */ + ContinuationToken?: string | undefined; + /** + *

Maximum number of buckets to be returned in response. When the number is more than the + * count of buckets that are owned by an Amazon Web Services account, return all the buckets in + * response.

+ * @public + */ + MaxDirectoryBuckets?: number | undefined; +} +/** + *

Container for all (if there are any) keys between Prefix and the next occurrence of the + * string specified by a delimiter. CommonPrefixes lists keys that act like subdirectories in + * the directory specified by Prefix. For example, if the prefix is notes/ and the delimiter + * is a slash (/) as in notes/summer/july, the common prefix is notes/summer/.

+ * @public + */ +export interface CommonPrefix { + /** + *

Container for the specified common prefix.

+ * @public + */ + Prefix?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const EncodingType: { + readonly url: "url"; +}; +/** + * @public + */ +export type EncodingType = (typeof EncodingType)[keyof typeof EncodingType]; +/** + *

Container element that identifies who initiated the multipart upload.

+ * @public + */ +export interface Initiator { + /** + *

If the principal is an Amazon Web Services account, it provides the Canonical User ID. If the + * principal is an IAM User, it provides a user ARN value.

+ * + *

+ * Directory buckets - If the principal is an + * Amazon Web Services account, it provides the Amazon Web Services account ID. If the principal is an IAM User, it + * provides a user ARN value.

+ *
+ * @public + */ + ID?: string | undefined; + /** + *

Name of the Principal.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + DisplayName?: string | undefined; +} +/** + *

Container for the MultipartUpload for the Amazon S3 object.

+ * @public + */ +export interface MultipartUpload { + /** + *

Upload ID that identifies the multipart upload.

+ * @public + */ + UploadId?: string | undefined; + /** + *

Key of the object for which the multipart upload was initiated.

+ * @public + */ + Key?: string | undefined; + /** + *

Date and time at which the multipart upload was initiated.

+ * @public + */ + Initiated?: Date | undefined; + /** + *

The class of storage used to store the object.

+ * + *

+ * Directory buckets - + * Directory buckets only support EXPRESS_ONEZONE (the S3 Express One Zone storage class) in Availability Zones and ONEZONE_IA (the S3 One Zone-Infrequent Access storage class) in Dedicated Local Zones.

+ *
+ * @public + */ + StorageClass?: StorageClass | undefined; + /** + *

Specifies the owner of the object that is part of the multipart upload.

+ * + *

+ * Directory buckets - The bucket owner is + * returned as the object owner for all the objects.

+ *
+ * @public + */ + Owner?: Owner | undefined; + /** + *

Identifies who initiated the multipart upload.

+ * @public + */ + Initiator?: Initiator | undefined; + /** + *

The algorithm that was used to create a checksum of the object.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

The checksum type that is used to calculate the object’s + * checksum value. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumType?: ChecksumType | undefined; +} +/** + * @public + */ +export interface ListMultipartUploadsOutput { + /** + *

The name of the bucket to which the multipart upload was initiated. Does not return the + * access point ARN or access point alias if used.

+ * @public + */ + Bucket?: string | undefined; + /** + *

The key at or after which the listing began.

+ * @public + */ + KeyMarker?: string | undefined; + /** + *

Together with key-marker, specifies the multipart upload after which listing should + * begin. If key-marker is not specified, the upload-id-marker parameter is ignored. + * Otherwise, any multipart uploads for a key equal to the key-marker might be included in the + * list only if they have an upload ID lexicographically greater than the specified + * upload-id-marker.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + UploadIdMarker?: string | undefined; + /** + *

When a list is truncated, this element specifies the value that should be used for the + * key-marker request parameter in a subsequent request.

+ * @public + */ + NextKeyMarker?: string | undefined; + /** + *

When a prefix is provided in the request, this field contains the specified prefix. The + * result contains only keys starting with the specified prefix.

+ * + *

+ * Directory buckets - For directory buckets, only prefixes that end in a delimiter (/) are supported.

+ *
+ * @public + */ + Prefix?: string | undefined; + /** + *

Contains the delimiter you specified in the request. If you don't specify a delimiter in + * your request, this element is absent from the response.

+ * + *

+ * Directory buckets - For directory buckets, / is the only supported delimiter.

+ *
+ * @public + */ + Delimiter?: string | undefined; + /** + *

When a list is truncated, this element specifies the value that should be used for the + * upload-id-marker request parameter in a subsequent request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + NextUploadIdMarker?: string | undefined; + /** + *

Maximum number of multipart uploads that could have been included in the + * response.

+ * @public + */ + MaxUploads?: number | undefined; + /** + *

Indicates whether the returned list of multipart uploads is truncated. A value of true + * indicates that the list was truncated. The list can be truncated if the number of multipart + * uploads exceeds the limit allowed or specified by max uploads.

+ * @public + */ + IsTruncated?: boolean | undefined; + /** + *

Container for elements related to a particular multipart upload. A response can contain + * zero or more Upload elements.

+ * @public + */ + Uploads?: MultipartUpload[] | undefined; + /** + *

If you specify a delimiter in the request, then the result returns each distinct key + * prefix containing the delimiter in a CommonPrefixes element. The distinct key + * prefixes are returned in the Prefix child element.

+ * + *

+ * Directory buckets - For directory buckets, only prefixes that end in a delimiter (/) are supported.

+ *
+ * @public + */ + CommonPrefixes?: CommonPrefix[] | undefined; + /** + *

Encoding type used by Amazon S3 to encode object keys in the response.

+ *

If you specify the encoding-type request parameter, Amazon S3 includes this + * element in the response, and returns encoded key name values in the following response + * elements:

+ *

+ * Delimiter, KeyMarker, Prefix, + * NextKeyMarker, Key.

+ * @public + */ + EncodingType?: EncodingType | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; +} +/** + * @public + */ +export interface ListMultipartUploadsRequest { + /** + *

The name of the bucket to which the multipart upload was initiated.

+ *

+ * Directory buckets - + * When you use this operation with a directory bucket, you must use virtual-hosted-style requests in the format + * Bucket-name.s3express-zone-id.region-code.amazonaws.com. Path-style requests are not supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * amzn-s3-demo-bucket--usw2-az1--x-s3). For information about bucket naming + * restrictions, see Directory bucket naming + * rules in the Amazon S3 User Guide.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ * + *

Object Lambda access points are not supported by directory buckets.

+ *
+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Character you use to group keys.

+ *

All keys that contain the same string between the prefix, if specified, and the first + * occurrence of the delimiter after the prefix are grouped under a single result element, + * CommonPrefixes. If you don't specify the prefix parameter, then the + * substring starts at the beginning of the key. The keys that are grouped under + * CommonPrefixes result element are not returned elsewhere in the + * response.

+ * + *

+ * Directory buckets - For directory buckets, / is the only supported delimiter.

+ *
+ * @public + */ + Delimiter?: string | undefined; + /** + *

Encoding type used by Amazon S3 to encode the object keys in the response. + * Responses are encoded only in UTF-8. An object key can contain any Unicode character. + * However, the XML 1.0 parser can't parse certain characters, such as characters with an + * ASCII value from 0 to 10. For characters that aren't supported in XML 1.0, you can add this + * parameter to request that Amazon S3 encode the keys in the response. For more information about + * characters to avoid in object key names, see Object key naming + * guidelines.

+ * + *

When using the URL encoding type, non-ASCII characters that are used in an object's + * key name will be percent-encoded according to UTF-8 code values. For example, the object + * test_file(3).png will appear as + * test_file%283%29.png.

+ *
+ * @public + */ + EncodingType?: EncodingType | undefined; + /** + *

Specifies the multipart upload after which listing should begin.

+ * + *
    + *
  • + *

    + * General purpose buckets - For + * general purpose buckets, key-marker is an object key. Together with + * upload-id-marker, this parameter specifies the multipart upload + * after which listing should begin.

    + *

    If upload-id-marker is not specified, only the keys + * lexicographically greater than the specified key-marker will be + * included in the list.

    + *

    If upload-id-marker is specified, any multipart uploads for a key + * equal to the key-marker might also be included, provided those + * multipart uploads have upload IDs lexicographically greater than the specified + * upload-id-marker.

    + *
  • + *
  • + *

    + * Directory buckets - For + * directory buckets, key-marker is obfuscated and isn't a real object + * key. The upload-id-marker parameter isn't supported by + * directory buckets. To list the additional multipart uploads, you only need to set + * the value of key-marker to the NextKeyMarker value from + * the previous response.

    + *

    In the ListMultipartUploads response, the multipart uploads aren't + * sorted lexicographically based on the object keys. + * + *

    + *
  • + *
+ *
+ * @public + */ + KeyMarker?: string | undefined; + /** + *

Sets the maximum number of multipart uploads, from 1 to 1,000, to return in the response + * body. 1,000 is the maximum number of uploads that can be returned in a response.

+ * @public + */ + MaxUploads?: number | undefined; + /** + *

Lists in-progress uploads only for those keys that begin with the specified prefix. You + * can use prefixes to separate a bucket into different grouping of keys. (You can think of + * using prefix to make groups in the same way that you'd use a folder in a file + * system.)

+ * + *

+ * Directory buckets - For directory buckets, only prefixes that end in a delimiter (/) are supported.

+ *
+ * @public + */ + Prefix?: string | undefined; + /** + *

Together with key-marker, specifies the multipart upload after which listing should + * begin. If key-marker is not specified, the upload-id-marker parameter is ignored. + * Otherwise, any multipart uploads for a key equal to the key-marker might be included in the + * list only if they have an upload ID lexicographically greater than the specified + * upload-id-marker.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + UploadIdMarker?: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; +} +/** + *

Specifies the restoration status of an object. Objects in certain storage classes must + * be restored before they can be retrieved. For more information about these storage classes + * and how to work with archived objects, see Working with archived + * objects in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets. + * Directory buckets only support EXPRESS_ONEZONE (the S3 Express One Zone storage class) in Availability Zones and ONEZONE_IA (the S3 One Zone-Infrequent Access storage class) in Dedicated Local Zones.

+ *
+ * @public + */ +export interface RestoreStatus { + /** + *

Specifies whether the object is currently being restored. If the object restoration is + * in progress, the header returns the value TRUE. For example:

+ *

+ * x-amz-optional-object-attributes: IsRestoreInProgress="true" + *

+ *

If the object restoration has completed, the header returns the value + * FALSE. For example:

+ *

+ * x-amz-optional-object-attributes: IsRestoreInProgress="false", + * RestoreExpiryDate="2012-12-21T00:00:00.000Z" + *

+ *

If the object hasn't been restored, there is no header response.

+ * @public + */ + IsRestoreInProgress?: boolean | undefined; + /** + *

Indicates when the restored copy will expire. This value is populated only if the object + * has already been restored. For example:

+ *

+ * x-amz-optional-object-attributes: IsRestoreInProgress="false", + * RestoreExpiryDate="2012-12-21T00:00:00.000Z" + *

+ * @public + */ + RestoreExpiryDate?: Date | undefined; +} +/** + * @public + * @enum + */ +export declare const ObjectStorageClass: { + readonly DEEP_ARCHIVE: "DEEP_ARCHIVE"; + readonly EXPRESS_ONEZONE: "EXPRESS_ONEZONE"; + readonly GLACIER: "GLACIER"; + readonly GLACIER_IR: "GLACIER_IR"; + readonly INTELLIGENT_TIERING: "INTELLIGENT_TIERING"; + readonly ONEZONE_IA: "ONEZONE_IA"; + readonly OUTPOSTS: "OUTPOSTS"; + readonly REDUCED_REDUNDANCY: "REDUCED_REDUNDANCY"; + readonly SNOW: "SNOW"; + readonly STANDARD: "STANDARD"; + readonly STANDARD_IA: "STANDARD_IA"; +}; +/** + * @public + */ +export type ObjectStorageClass = (typeof ObjectStorageClass)[keyof typeof ObjectStorageClass]; +/** + *

An object consists of data and its descriptive metadata.

+ * @public + */ +export interface _Object { + /** + *

The name that you assign to an object. You use the object key to retrieve the + * object.

+ * @public + */ + Key?: string | undefined; + /** + *

Creation date of the object.

+ * @public + */ + LastModified?: Date | undefined; + /** + *

The entity tag is a hash of the object. The ETag reflects changes only to the contents + * of an object, not its metadata. The ETag may or may not be an MD5 digest of the object + * data. Whether or not it is depends on how the object was created and how it is encrypted as + * described below:

+ *
    + *
  • + *

    Objects created by the PUT Object, POST Object, or Copy operation, or through the + * Amazon Web Services Management Console, and are encrypted by SSE-S3 or plaintext, have ETags that + * are an MD5 digest of their object data.

    + *
  • + *
  • + *

    Objects created by the PUT Object, POST Object, or Copy operation, or through the + * Amazon Web Services Management Console, and are encrypted by SSE-C or SSE-KMS, have ETags that are + * not an MD5 digest of their object data.

    + *
  • + *
  • + *

    If an object is created by either the Multipart Upload or Part Copy operation, the + * ETag is not an MD5 digest, regardless of the method of encryption. If an object is + * larger than 16 MB, the Amazon Web Services Management Console will upload or copy that object as a + * Multipart Upload, and therefore the ETag will not be an MD5 digest.

    + *
  • + *
+ * + *

+ * Directory buckets - MD5 is not supported by directory buckets.

+ *
+ * @public + */ + ETag?: string | undefined; + /** + *

The algorithm that was used to create a checksum of the object.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm[] | undefined; + /** + *

The checksum type that is used to calculate the object’s + * checksum value. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumType?: ChecksumType | undefined; + /** + *

Size in bytes of the object

+ * @public + */ + Size?: number | undefined; + /** + *

The class of storage used to store the object.

+ * + *

+ * Directory buckets - + * Directory buckets only support EXPRESS_ONEZONE (the S3 Express One Zone storage class) in Availability Zones and ONEZONE_IA (the S3 One Zone-Infrequent Access storage class) in Dedicated Local Zones.

+ *
+ * @public + */ + StorageClass?: ObjectStorageClass | undefined; + /** + *

The owner of the object

+ * + *

+ * Directory buckets - The bucket owner is + * returned as the object owner.

+ *
+ * @public + */ + Owner?: Owner | undefined; + /** + *

Specifies the restoration status of an object. Objects in certain storage classes must + * be restored before they can be retrieved. For more information about these storage classes + * and how to work with archived objects, see Working with archived + * objects in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets. + * Directory buckets only support EXPRESS_ONEZONE (the S3 Express One Zone storage class) in Availability Zones and ONEZONE_IA (the S3 One Zone-Infrequent Access storage class) in Dedicated Local Zones.

+ *
+ * @public + */ + RestoreStatus?: RestoreStatus | undefined; +} +/** + * @public + */ +export interface ListObjectsOutput { + /** + *

A flag that indicates whether Amazon S3 returned all of the results that satisfied the search + * criteria.

+ * @public + */ + IsTruncated?: boolean | undefined; + /** + *

Indicates where in the bucket listing begins. Marker is included in the response if it + * was sent with the request.

+ * @public + */ + Marker?: string | undefined; + /** + *

When the response is truncated (the IsTruncated element value in the + * response is true), you can use the key name in this field as the + * marker parameter in the subsequent request to get the next set of objects. + * Amazon S3 lists objects in alphabetical order.

+ * + *

This element is returned only if you have the delimiter request + * parameter specified. If the response does not include the NextMarker + * element and it is truncated, you can use the value of the last Key element + * in the response as the marker parameter in the subsequent request to get + * the next set of object keys.

+ *
+ * @public + */ + NextMarker?: string | undefined; + /** + *

Metadata about each object returned.

+ * @public + */ + Contents?: _Object[] | undefined; + /** + *

The bucket name.

+ * @public + */ + Name?: string | undefined; + /** + *

Keys that begin with the indicated prefix.

+ * @public + */ + Prefix?: string | undefined; + /** + *

Causes keys that contain the same string between the prefix and the first occurrence of + * the delimiter to be rolled up into a single result element in the + * CommonPrefixes collection. These rolled-up keys are not returned elsewhere + * in the response. Each rolled-up result counts as only one return against the + * MaxKeys value.

+ * @public + */ + Delimiter?: string | undefined; + /** + *

The maximum number of keys returned in the response body.

+ * @public + */ + MaxKeys?: number | undefined; + /** + *

All of the keys (up to 1,000) rolled up in a common prefix count as a single return when + * calculating the number of returns.

+ *

A response can contain CommonPrefixes only if you specify a + * delimiter.

+ *

+ * CommonPrefixes contains all (if there are any) keys between + * Prefix and the next occurrence of the string specified by the + * delimiter.

+ *

+ * CommonPrefixes lists keys that act like subdirectories in the directory + * specified by Prefix.

+ *

For example, if the prefix is notes/ and the delimiter is a slash + * (/), as in notes/summer/july, the common prefix is + * notes/summer/. All of the keys that roll up into a common prefix count as a + * single return when calculating the number of returns.

+ * @public + */ + CommonPrefixes?: CommonPrefix[] | undefined; + /** + *

Encoding type used by Amazon S3 to encode the object keys in the response. + * Responses are encoded only in UTF-8. An object key can contain any Unicode character. + * However, the XML 1.0 parser can't parse certain characters, such as characters with an + * ASCII value from 0 to 10. For characters that aren't supported in XML 1.0, you can add this + * parameter to request that Amazon S3 encode the keys in the response. For more information about + * characters to avoid in object key names, see Object key naming + * guidelines.

+ * + *

When using the URL encoding type, non-ASCII characters that are used in an object's + * key name will be percent-encoded according to UTF-8 code values. For example, the object + * test_file(3).png will appear as + * test_file%283%29.png.

+ *
+ * @public + */ + EncodingType?: EncodingType | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; +} +/** + * @public + * @enum + */ +export declare const OptionalObjectAttributes: { + readonly RESTORE_STATUS: "RestoreStatus"; +}; +/** + * @public + */ +export type OptionalObjectAttributes = (typeof OptionalObjectAttributes)[keyof typeof OptionalObjectAttributes]; +/** + * @public + */ +export interface ListObjectsRequest { + /** + *

The name of the bucket containing the objects.

+ *

+ * Directory buckets - + * When you use this operation with a directory bucket, you must use virtual-hosted-style requests in the format + * Bucket-name.s3express-zone-id.region-code.amazonaws.com. Path-style requests are not supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * amzn-s3-demo-bucket--usw2-az1--x-s3). For information about bucket naming + * restrictions, see Directory bucket naming + * rules in the Amazon S3 User Guide.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ * + *

Object Lambda access points are not supported by directory buckets.

+ *
+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

A delimiter is a character that you use to group keys.

+ * @public + */ + Delimiter?: string | undefined; + /** + *

Encoding type used by Amazon S3 to encode the object keys in the response. + * Responses are encoded only in UTF-8. An object key can contain any Unicode character. + * However, the XML 1.0 parser can't parse certain characters, such as characters with an + * ASCII value from 0 to 10. For characters that aren't supported in XML 1.0, you can add this + * parameter to request that Amazon S3 encode the keys in the response. For more information about + * characters to avoid in object key names, see Object key naming + * guidelines.

+ * + *

When using the URL encoding type, non-ASCII characters that are used in an object's + * key name will be percent-encoded according to UTF-8 code values. For example, the object + * test_file(3).png will appear as + * test_file%283%29.png.

+ *
+ * @public + */ + EncodingType?: EncodingType | undefined; + /** + *

Marker is where you want Amazon S3 to start listing from. Amazon S3 starts listing after this + * specified key. Marker can be any key in the bucket.

+ * @public + */ + Marker?: string | undefined; + /** + *

Sets the maximum number of keys returned in the response. By default, the action returns + * up to 1,000 key names. The response might contain fewer keys but will never contain more. + *

+ * @public + */ + MaxKeys?: number | undefined; + /** + *

Limits the response to keys that begin with the specified prefix.

+ * @public + */ + Prefix?: string | undefined; + /** + *

Confirms that the requester knows that she or he will be charged for the list objects + * request. Bucket owners need not specify this parameter in their requests.

+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

Specifies the optional fields that you want returned in the response. Fields that you do + * not specify are not returned.

+ * @public + */ + OptionalObjectAttributes?: OptionalObjectAttributes[] | undefined; +} +/** + * @public + */ +export interface ListObjectsV2Output { + /** + *

Set to false if all of the results were returned. Set to true + * if more keys are available to return. If the number of results exceeds that specified by + * MaxKeys, all of the results might not be returned.

+ * @public + */ + IsTruncated?: boolean | undefined; + /** + *

Metadata about each object returned.

+ * @public + */ + Contents?: _Object[] | undefined; + /** + *

The bucket name.

+ * @public + */ + Name?: string | undefined; + /** + *

Keys that begin with the indicated prefix.

+ * + *

+ * Directory buckets - For directory buckets, only prefixes that end in a delimiter (/) are supported.

+ *
+ * @public + */ + Prefix?: string | undefined; + /** + *

Causes keys that contain the same string between the prefix and the first + * occurrence of the delimiter to be rolled up into a single result element in the + * CommonPrefixes collection. These rolled-up keys are not returned elsewhere + * in the response. Each rolled-up result counts as only one return against the + * MaxKeys value.

+ * + *

+ * Directory buckets - For directory buckets, / is the only supported delimiter.

+ *
+ * @public + */ + Delimiter?: string | undefined; + /** + *

Sets the maximum number of keys returned in the response. By default, the action returns + * up to 1,000 key names. The response might contain fewer keys but will never contain + * more.

+ * @public + */ + MaxKeys?: number | undefined; + /** + *

All of the keys (up to 1,000) that share the same prefix are grouped together. When + * counting the total numbers of returns by this API operation, this group of keys is + * considered as one item.

+ *

A response can contain CommonPrefixes only if you specify a + * delimiter.

+ *

+ * CommonPrefixes contains all (if there are any) keys between + * Prefix and the next occurrence of the string specified by a + * delimiter.

+ *

+ * CommonPrefixes lists keys that act like subdirectories in the directory + * specified by Prefix.

+ *

For example, if the prefix is notes/ and the delimiter is a slash + * (/) as in notes/summer/july, the common prefix is + * notes/summer/. All of the keys that roll up into a common prefix count as a + * single return when calculating the number of returns.

+ * + *
    + *
  • + *

    + * Directory buckets - For directory buckets, only prefixes that end in a delimiter (/) are supported.

    + *
  • + *
  • + *

    + * Directory buckets - When you query + * ListObjectsV2 with a delimiter during in-progress multipart + * uploads, the CommonPrefixes response parameter contains the prefixes + * that are associated with the in-progress multipart uploads. For more information + * about multipart uploads, see Multipart Upload Overview in + * the Amazon S3 User Guide.

    + *
  • + *
+ *
+ * @public + */ + CommonPrefixes?: CommonPrefix[] | undefined; + /** + *

Encoding type used by Amazon S3 to encode object key names in the XML response.

+ *

If you specify the encoding-type request parameter, Amazon S3 includes this + * element in the response, and returns encoded key name values in the following response + * elements:

+ *

+ * Delimiter, Prefix, Key, and StartAfter.

+ * @public + */ + EncodingType?: EncodingType | undefined; + /** + *

+ * KeyCount is the number of keys returned with this request. + * KeyCount will always be less than or equal to the MaxKeys + * field. For example, if you ask for 50 keys, your result will include 50 keys or + * fewer.

+ * @public + */ + KeyCount?: number | undefined; + /** + *

If ContinuationToken was sent with the request, it is included in the + * response. You can use the returned ContinuationToken for pagination of the + * list response. You can use this ContinuationToken for pagination of the list + * results.

+ * @public + */ + ContinuationToken?: string | undefined; + /** + *

+ * NextContinuationToken is sent when isTruncated is true, which + * means there are more keys in the bucket that can be listed. The next list requests to Amazon S3 + * can be continued with this NextContinuationToken. + * NextContinuationToken is obfuscated and is not a real key

+ * @public + */ + NextContinuationToken?: string | undefined; + /** + *

If StartAfter was sent with the request, it is included in the response.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + StartAfter?: string | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; +} +/** + * @public + */ +export interface ListObjectsV2Request { + /** + *

+ * Directory buckets - + * When you use this operation with a directory bucket, you must use virtual-hosted-style requests in the format + * Bucket-name.s3express-zone-id.region-code.amazonaws.com. Path-style requests are not supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * amzn-s3-demo-bucket--usw2-az1--x-s3). For information about bucket naming + * restrictions, see Directory bucket naming + * rules in the Amazon S3 User Guide.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ * + *

Object Lambda access points are not supported by directory buckets.

+ *
+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

A delimiter is a character that you use to group keys.

+ * + *
    + *
  • + *

    + * Directory buckets - For directory buckets, / is the only supported delimiter.

    + *
  • + *
  • + *

    + * Directory buckets - When you query + * ListObjectsV2 with a delimiter during in-progress multipart + * uploads, the CommonPrefixes response parameter contains the prefixes + * that are associated with the in-progress multipart uploads. For more information + * about multipart uploads, see Multipart Upload Overview in + * the Amazon S3 User Guide.

    + *
  • + *
+ *
+ * @public + */ + Delimiter?: string | undefined; + /** + *

Encoding type used by Amazon S3 to encode the object keys in the response. + * Responses are encoded only in UTF-8. An object key can contain any Unicode character. + * However, the XML 1.0 parser can't parse certain characters, such as characters with an + * ASCII value from 0 to 10. For characters that aren't supported in XML 1.0, you can add this + * parameter to request that Amazon S3 encode the keys in the response. For more information about + * characters to avoid in object key names, see Object key naming + * guidelines.

+ * + *

When using the URL encoding type, non-ASCII characters that are used in an object's + * key name will be percent-encoded according to UTF-8 code values. For example, the object + * test_file(3).png will appear as + * test_file%283%29.png.

+ *
+ * @public + */ + EncodingType?: EncodingType | undefined; + /** + *

Sets the maximum number of keys returned in the response. By default, the action returns + * up to 1,000 key names. The response might contain fewer keys but will never contain + * more.

+ * @public + */ + MaxKeys?: number | undefined; + /** + *

Limits the response to keys that begin with the specified prefix.

+ * + *

+ * Directory buckets - For directory buckets, only prefixes that end in a delimiter (/) are supported.

+ *
+ * @public + */ + Prefix?: string | undefined; + /** + *

+ * ContinuationToken indicates to Amazon S3 that the list is being continued on + * this bucket with a token. ContinuationToken is obfuscated and is not a real + * key. You can use this ContinuationToken for pagination of the list results. + *

+ * @public + */ + ContinuationToken?: string | undefined; + /** + *

The owner field is not present in ListObjectsV2 by default. If you want to + * return the owner field with each key in the result, then set the FetchOwner + * field to true.

+ * + *

+ * Directory buckets - For directory buckets, + * the bucket owner is returned as the object owner for all objects.

+ *
+ * @public + */ + FetchOwner?: boolean | undefined; + /** + *

StartAfter is where you want Amazon S3 to start listing from. Amazon S3 starts listing after this + * specified key. StartAfter can be any key in the bucket.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + StartAfter?: string | undefined; + /** + *

Confirms that the requester knows that she or he will be charged for the list objects + * request in V2 style. Bucket owners need not specify this parameter in their + * requests.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

Specifies the optional fields that you want returned in the response. Fields that you do + * not specify are not returned.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + OptionalObjectAttributes?: OptionalObjectAttributes[] | undefined; +} +/** + *

Information about the delete marker.

+ * @public + */ +export interface DeleteMarkerEntry { + /** + *

The account that created the delete marker.

+ * @public + */ + Owner?: Owner | undefined; + /** + *

The object key.

+ * @public + */ + Key?: string | undefined; + /** + *

Version ID of an object.

+ * @public + */ + VersionId?: string | undefined; + /** + *

Specifies whether the object is (true) or is not (false) the latest version of an + * object.

+ * @public + */ + IsLatest?: boolean | undefined; + /** + *

Date and time when the object was last modified.

+ * @public + */ + LastModified?: Date | undefined; +} +/** + * @public + * @enum + */ +export declare const ObjectVersionStorageClass: { + readonly STANDARD: "STANDARD"; +}; +/** + * @public + */ +export type ObjectVersionStorageClass = (typeof ObjectVersionStorageClass)[keyof typeof ObjectVersionStorageClass]; +/** + *

The version of an object.

+ * @public + */ +export interface ObjectVersion { + /** + *

The entity tag is an MD5 hash of that version of the object.

+ * @public + */ + ETag?: string | undefined; + /** + *

The algorithm that was used to create a checksum of the object.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm[] | undefined; + /** + *

The checksum type that is used to calculate the object’s + * checksum value. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumType?: ChecksumType | undefined; + /** + *

Size in bytes of the object.

+ * @public + */ + Size?: number | undefined; + /** + *

The class of storage used to store the object.

+ * @public + */ + StorageClass?: ObjectVersionStorageClass | undefined; + /** + *

The object key.

+ * @public + */ + Key?: string | undefined; + /** + *

Version ID of an object.

+ * @public + */ + VersionId?: string | undefined; + /** + *

Specifies whether the object is (true) or is not (false) the latest version of an + * object.

+ * @public + */ + IsLatest?: boolean | undefined; + /** + *

Date and time when the object was last modified.

+ * @public + */ + LastModified?: Date | undefined; + /** + *

Specifies the owner of the object.

+ * @public + */ + Owner?: Owner | undefined; + /** + *

Specifies the restoration status of an object. Objects in certain storage classes must + * be restored before they can be retrieved. For more information about these storage classes + * and how to work with archived objects, see Working with archived + * objects in the Amazon S3 User Guide.

+ * @public + */ + RestoreStatus?: RestoreStatus | undefined; +} +/** + * @public + */ +export interface ListObjectVersionsOutput { + /** + *

A flag that indicates whether Amazon S3 returned all of the results that satisfied the search + * criteria. If your results were truncated, you can make a follow-up paginated request by + * using the NextKeyMarker and NextVersionIdMarker response + * parameters as a starting place in another request to return the rest of the results.

+ * @public + */ + IsTruncated?: boolean | undefined; + /** + *

Marks the last key returned in a truncated response.

+ * @public + */ + KeyMarker?: string | undefined; + /** + *

Marks the last version of the key returned in a truncated response.

+ * @public + */ + VersionIdMarker?: string | undefined; + /** + *

When the number of responses exceeds the value of MaxKeys, + * NextKeyMarker specifies the first key not returned that satisfies the + * search criteria. Use this value for the key-marker request parameter in a subsequent + * request.

+ * @public + */ + NextKeyMarker?: string | undefined; + /** + *

When the number of responses exceeds the value of MaxKeys, + * NextVersionIdMarker specifies the first object version not returned that + * satisfies the search criteria. Use this value for the version-id-marker + * request parameter in a subsequent request.

+ * @public + */ + NextVersionIdMarker?: string | undefined; + /** + *

Container for version information.

+ * @public + */ + Versions?: ObjectVersion[] | undefined; + /** + *

Container for an object that is a delete marker. To learn more about delete markers, see Working with delete markers.

+ * @public + */ + DeleteMarkers?: DeleteMarkerEntry[] | undefined; + /** + *

The bucket name.

+ * @public + */ + Name?: string | undefined; + /** + *

Selects objects that start with the value supplied by this parameter.

+ * @public + */ + Prefix?: string | undefined; + /** + *

The delimiter grouping the included keys. A delimiter is a character that you specify to + * group keys. All keys that contain the same string between the prefix and the first + * occurrence of the delimiter are grouped under a single result element in + * CommonPrefixes. These groups are counted as one result against the + * max-keys limitation. These keys are not returned elsewhere in the + * response.

+ * @public + */ + Delimiter?: string | undefined; + /** + *

Specifies the maximum number of objects to return.

+ * @public + */ + MaxKeys?: number | undefined; + /** + *

All of the keys rolled up into a common prefix count as a single return when calculating + * the number of returns.

+ * @public + */ + CommonPrefixes?: CommonPrefix[] | undefined; + /** + *

Encoding type used by Amazon S3 to encode object key names in the XML response.

+ *

If you specify the encoding-type request parameter, Amazon S3 includes this + * element in the response, and returns encoded key name values in the following response + * elements:

+ *

+ * KeyMarker, NextKeyMarker, Prefix, Key, and Delimiter.

+ * @public + */ + EncodingType?: EncodingType | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; +} +/** + * @public + */ +export interface ListObjectVersionsRequest { + /** + *

The bucket name that contains the objects.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

A delimiter is a character that you specify to group keys. All keys that contain the + * same string between the prefix and the first occurrence of the delimiter are + * grouped under a single result element in CommonPrefixes. These groups are + * counted as one result against the max-keys limitation. These keys are not + * returned elsewhere in the response.

+ * @public + */ + Delimiter?: string | undefined; + /** + *

Encoding type used by Amazon S3 to encode the object keys in the response. + * Responses are encoded only in UTF-8. An object key can contain any Unicode character. + * However, the XML 1.0 parser can't parse certain characters, such as characters with an + * ASCII value from 0 to 10. For characters that aren't supported in XML 1.0, you can add this + * parameter to request that Amazon S3 encode the keys in the response. For more information about + * characters to avoid in object key names, see Object key naming + * guidelines.

+ * + *

When using the URL encoding type, non-ASCII characters that are used in an object's + * key name will be percent-encoded according to UTF-8 code values. For example, the object + * test_file(3).png will appear as + * test_file%283%29.png.

+ *
+ * @public + */ + EncodingType?: EncodingType | undefined; + /** + *

Specifies the key to start with when listing objects in a bucket.

+ * @public + */ + KeyMarker?: string | undefined; + /** + *

Sets the maximum number of keys returned in the response. By default, the action returns + * up to 1,000 key names. The response might contain fewer keys but will never contain more. + * If additional keys satisfy the search criteria, but were not returned because + * max-keys was exceeded, the response contains + * true. To return the additional keys, + * see key-marker and version-id-marker.

+ * @public + */ + MaxKeys?: number | undefined; + /** + *

Use this parameter to select only those keys that begin with the specified prefix. You + * can use prefixes to separate a bucket into different groupings of keys. (You can think of + * using prefix to make groups in the same way that you'd use a folder in a file + * system.) You can use prefix with delimiter to roll up numerous + * objects into a single result under CommonPrefixes.

+ * @public + */ + Prefix?: string | undefined; + /** + *

Specifies the object version you want to start listing from.

+ * @public + */ + VersionIdMarker?: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

Specifies the optional fields that you want returned in the response. Fields that you do + * not specify are not returned.

+ * @public + */ + OptionalObjectAttributes?: OptionalObjectAttributes[] | undefined; +} +/** + *

Container for elements related to a part.

+ * @public + */ +export interface Part { + /** + *

Part number identifying the part. This is a positive integer between 1 and + * 10,000.

+ * @public + */ + PartNumber?: number | undefined; + /** + *

Date and time at which the part was uploaded.

+ * @public + */ + LastModified?: Date | undefined; + /** + *

Entity tag returned when the part was uploaded.

+ * @public + */ + ETag?: string | undefined; + /** + *

Size in bytes of the uploaded part data.

+ * @public + */ + Size?: number | undefined; + /** + *

The Base64 encoded, 32-bit CRC32 checksum of the part. This checksum is present + * if the object was uploaded with the CRC32 checksum algorithm. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32?: string | undefined; + /** + *

The Base64 encoded, 32-bit CRC32C checksum of the part. This checksum is present + * if the object was uploaded with the CRC32C checksum algorithm. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32C?: string | undefined; + /** + *

The Base64 encoded, 64-bit CRC64NVME checksum of the part. This checksum is present + * if the multipart upload request was created with the CRC64NVME checksum algorithm, or if the object was uploaded without a + * checksum (and Amazon S3 added the default checksum, CRC64NVME, to the uploaded object). For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC64NVME?: string | undefined; + /** + *

The Base64 encoded, 160-bit SHA1 checksum of the part. This checksum is present + * if the object was uploaded with the SHA1 checksum algorithm. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA1?: string | undefined; + /** + *

The Base64 encoded, 256-bit SHA256 checksum of the part. This checksum is present + * if the object was uploaded with the SHA256 checksum algorithm. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA256?: string | undefined; +} +/** + * @public + */ +export interface ListPartsOutput { + /** + *

If the bucket has a lifecycle rule configured with an action to abort incomplete + * multipart uploads and the prefix in the lifecycle rule matches the object name in the + * request, then the response includes this header indicating when the initiated multipart + * upload will become eligible for abort operation. For more information, see Aborting Incomplete Multipart Uploads Using a Bucket Lifecycle + * Configuration.

+ *

The response will also include the x-amz-abort-rule-id header that will + * provide the ID of the lifecycle configuration rule that defines this action.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + AbortDate?: Date | undefined; + /** + *

This header is returned along with the x-amz-abort-date header. It + * identifies applicable lifecycle configuration rule that defines the action to abort + * incomplete multipart uploads.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + AbortRuleId?: string | undefined; + /** + *

The name of the bucket to which the multipart upload was initiated. Does not return the + * access point ARN or access point alias if used.

+ * @public + */ + Bucket?: string | undefined; + /** + *

Object key for which the multipart upload was initiated.

+ * @public + */ + Key?: string | undefined; + /** + *

Upload ID identifying the multipart upload whose parts are being listed.

+ * @public + */ + UploadId?: string | undefined; + /** + *

Specifies the part after which listing should begin. Only parts with higher part numbers + * will be listed.

+ * @public + */ + PartNumberMarker?: string | undefined; + /** + *

When a list is truncated, this element specifies the last part in the list, as well as + * the value to use for the part-number-marker request parameter in a subsequent + * request.

+ * @public + */ + NextPartNumberMarker?: string | undefined; + /** + *

Maximum number of parts that were allowed in the response.

+ * @public + */ + MaxParts?: number | undefined; + /** + *

Indicates whether the returned list of parts is truncated. A true value indicates that + * the list was truncated. A list can be truncated if the number of parts exceeds the limit + * returned in the MaxParts element.

+ * @public + */ + IsTruncated?: boolean | undefined; + /** + *

Container for elements related to a particular part. A response can contain zero or more + * Part elements.

+ * @public + */ + Parts?: Part[] | undefined; + /** + *

Container element that identifies who initiated the multipart upload. If the initiator + * is an Amazon Web Services account, this element provides the same information as the Owner + * element. If the initiator is an IAM User, this element provides the user ARN and display + * name.

+ * @public + */ + Initiator?: Initiator | undefined; + /** + *

Container element that identifies the object owner, after the object is created. If + * multipart upload is initiated by an IAM user, this element provides the parent account ID + * and display name.

+ * + *

+ * Directory buckets - The bucket owner is + * returned as the object owner for all the parts.

+ *
+ * @public + */ + Owner?: Owner | undefined; + /** + *

The class of storage used to store the uploaded object.

+ * + *

+ * Directory buckets - + * Directory buckets only support EXPRESS_ONEZONE (the S3 Express One Zone storage class) in Availability Zones and ONEZONE_IA (the S3 One Zone-Infrequent Access storage class) in Dedicated Local Zones.

+ *
+ * @public + */ + StorageClass?: StorageClass | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; + /** + *

The algorithm that was used to create a checksum of the object.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

The checksum type, which determines how part-level checksums are combined to create an + * object-level checksum for multipart objects. You can use this header response to verify + * that the checksum type that is received is the same checksum type that was specified in + * CreateMultipartUpload request. For more + * information, see Checking object integrity + * in the Amazon S3 User Guide.

+ * @public + */ + ChecksumType?: ChecksumType | undefined; +} +/** + * @public + */ +export interface ListPartsRequest { + /** + *

The name of the bucket to which the parts are being uploaded.

+ *

+ * Directory buckets - + * When you use this operation with a directory bucket, you must use virtual-hosted-style requests in the format + * Bucket-name.s3express-zone-id.region-code.amazonaws.com. Path-style requests are not supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * amzn-s3-demo-bucket--usw2-az1--x-s3). For information about bucket naming + * restrictions, see Directory bucket naming + * rules in the Amazon S3 User Guide.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ * + *

Object Lambda access points are not supported by directory buckets.

+ *
+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Object key for which the multipart upload was initiated.

+ * @public + */ + Key: string | undefined; + /** + *

Sets the maximum number of parts to return.

+ * @public + */ + MaxParts?: number | undefined; + /** + *

Specifies the part after which listing should begin. Only parts with higher part numbers + * will be listed.

+ * @public + */ + PartNumberMarker?: string | undefined; + /** + *

Upload ID identifying the multipart upload whose parts are being listed.

+ * @public + */ + UploadId: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

The server-side encryption (SSE) algorithm used to encrypt the object. This parameter is needed only when the object was created + * using a checksum algorithm. For more information, + * see Protecting data using SSE-C keys in the + * Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerAlgorithm?: string | undefined; + /** + *

The server-side encryption (SSE) customer managed key. This parameter is needed only when the object was created using a checksum algorithm. + * For more information, see + * Protecting data using SSE-C keys in the + * Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKey?: string | undefined; + /** + *

The MD5 server-side encryption (SSE) customer managed key. This parameter is needed only when the object was created using a checksum + * algorithm. For more information, + * see Protecting data using SSE-C keys in the + * Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKeyMD5?: string | undefined; +} +/** + * @public + */ +export interface PutBucketAccelerateConfigurationRequest { + /** + *

The name of the bucket for which the accelerate configuration is set.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Container for setting the transfer acceleration state.

+ * @public + */ + AccelerateConfiguration: AccelerateConfiguration | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the request when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request. For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; +} +/** + * @public + */ +export interface PutBucketAclRequest { + /** + *

The canned ACL to apply to the bucket.

+ * @public + */ + ACL?: BucketCannedACL | undefined; + /** + *

Contains the elements that set the ACL permissions for an object per grantee.

+ * @public + */ + AccessControlPolicy?: AccessControlPolicy | undefined; + /** + *

The bucket to which to apply the ACL.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The Base64 encoded 128-bit MD5 digest of the data. This header must be used as a message + * integrity check to verify that the request body was not corrupted in transit. For more + * information, go to RFC + * 1864. + *

+ *

For requests made using the Amazon Web Services Command Line Interface (CLI) or Amazon Web Services SDKs, this field is calculated automatically.

+ * @public + */ + ContentMD5?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the request when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request. For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

Allows grantee the read, write, read ACP, and write ACP permissions on the + * bucket.

+ * @public + */ + GrantFullControl?: string | undefined; + /** + *

Allows grantee to list the objects in the bucket.

+ * @public + */ + GrantRead?: string | undefined; + /** + *

Allows grantee to read the bucket ACL.

+ * @public + */ + GrantReadACP?: string | undefined; + /** + *

Allows grantee to create new objects in the bucket.

+ *

For the bucket and object owners of existing objects, also allows deletions and + * overwrites of those objects.

+ * @public + */ + GrantWrite?: string | undefined; + /** + *

Allows grantee to write the ACL for the applicable bucket.

+ * @public + */ + GrantWriteACP?: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface PutBucketAnalyticsConfigurationRequest { + /** + *

The name of the bucket to which an analytics configuration is stored.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The ID that identifies the analytics configuration.

+ * @public + */ + Id: string | undefined; + /** + *

The configuration and any analyses for the analytics filter.

+ * @public + */ + AnalyticsConfiguration: AnalyticsConfiguration | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @internal + */ +export declare const CompleteMultipartUploadOutputFilterSensitiveLog: (obj: CompleteMultipartUploadOutput) => any; +/** + * @internal + */ +export declare const CompleteMultipartUploadRequestFilterSensitiveLog: (obj: CompleteMultipartUploadRequest) => any; +/** + * @internal + */ +export declare const CopyObjectOutputFilterSensitiveLog: (obj: CopyObjectOutput) => any; +/** + * @internal + */ +export declare const CopyObjectRequestFilterSensitiveLog: (obj: CopyObjectRequest) => any; +/** + * @internal + */ +export declare const CreateMultipartUploadOutputFilterSensitiveLog: (obj: CreateMultipartUploadOutput) => any; +/** + * @internal + */ +export declare const CreateMultipartUploadRequestFilterSensitiveLog: (obj: CreateMultipartUploadRequest) => any; +/** + * @internal + */ +export declare const SessionCredentialsFilterSensitiveLog: (obj: SessionCredentials) => any; +/** + * @internal + */ +export declare const CreateSessionOutputFilterSensitiveLog: (obj: CreateSessionOutput) => any; +/** + * @internal + */ +export declare const CreateSessionRequestFilterSensitiveLog: (obj: CreateSessionRequest) => any; +/** + * @internal + */ +export declare const ServerSideEncryptionByDefaultFilterSensitiveLog: (obj: ServerSideEncryptionByDefault) => any; +/** + * @internal + */ +export declare const ServerSideEncryptionRuleFilterSensitiveLog: (obj: ServerSideEncryptionRule) => any; +/** + * @internal + */ +export declare const ServerSideEncryptionConfigurationFilterSensitiveLog: (obj: ServerSideEncryptionConfiguration) => any; +/** + * @internal + */ +export declare const GetBucketEncryptionOutputFilterSensitiveLog: (obj: GetBucketEncryptionOutput) => any; +/** + * @internal + */ +export declare const SSEKMSFilterSensitiveLog: (obj: SSEKMS) => any; +/** + * @internal + */ +export declare const InventoryEncryptionFilterSensitiveLog: (obj: InventoryEncryption) => any; +/** + * @internal + */ +export declare const InventoryS3BucketDestinationFilterSensitiveLog: (obj: InventoryS3BucketDestination) => any; +/** + * @internal + */ +export declare const InventoryDestinationFilterSensitiveLog: (obj: InventoryDestination) => any; +/** + * @internal + */ +export declare const InventoryConfigurationFilterSensitiveLog: (obj: InventoryConfiguration) => any; +/** + * @internal + */ +export declare const GetBucketInventoryConfigurationOutputFilterSensitiveLog: (obj: GetBucketInventoryConfigurationOutput) => any; +/** + * @internal + */ +export declare const GetObjectOutputFilterSensitiveLog: (obj: GetObjectOutput) => any; +/** + * @internal + */ +export declare const GetObjectRequestFilterSensitiveLog: (obj: GetObjectRequest) => any; +/** + * @internal + */ +export declare const GetObjectAttributesRequestFilterSensitiveLog: (obj: GetObjectAttributesRequest) => any; +/** + * @internal + */ +export declare const GetObjectTorrentOutputFilterSensitiveLog: (obj: GetObjectTorrentOutput) => any; +/** + * @internal + */ +export declare const HeadObjectOutputFilterSensitiveLog: (obj: HeadObjectOutput) => any; +/** + * @internal + */ +export declare const HeadObjectRequestFilterSensitiveLog: (obj: HeadObjectRequest) => any; +/** + * @internal + */ +export declare const ListBucketInventoryConfigurationsOutputFilterSensitiveLog: (obj: ListBucketInventoryConfigurationsOutput) => any; +/** + * @internal + */ +export declare const ListPartsRequestFilterSensitiveLog: (obj: ListPartsRequest) => any; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/models/models_1.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/models/models_1.d.ts new file mode 100644 index 00000000..918bc0f6 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/models/models_1.d.ts @@ -0,0 +1,4028 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { StreamingBlobTypes } from "@smithy/types"; +import { AccessControlPolicy, BucketVersioningStatus, ChecksumAlgorithm, ChecksumType, CORSRule, ErrorDocument, Grant, IndexDocument, IntelligentTieringConfiguration, InventoryConfiguration, LifecycleRule, LoggingEnabled, MetricsConfiguration, NotificationConfiguration, ObjectCannedACL, ObjectLockConfiguration, ObjectLockLegalHold, ObjectLockLegalHoldStatus, ObjectLockMode, ObjectLockRetention, OwnershipControls, Payer, PublicAccessBlockConfiguration, RedirectAllRequestsTo, ReplicationConfiguration, ReplicationStatus, RequestCharged, RequestPayer, RoutingRule, ServerSideEncryption, ServerSideEncryptionConfiguration, StorageClass, Tag, TransitionDefaultMinimumObjectSize } from "./models_0"; +import { S3ServiceException as __BaseException } from "./S3ServiceException"; +/** + *

Describes the cross-origin access configuration for objects in an Amazon S3 bucket. For more + * information, see Enabling + * Cross-Origin Resource Sharing in the + * Amazon S3 User Guide.

+ * @public + */ +export interface CORSConfiguration { + /** + *

A set of origins and methods (cross-origin access that you want to allow). You can add + * up to 100 rules to the configuration.

+ * @public + */ + CORSRules: CORSRule[] | undefined; +} +/** + * @public + */ +export interface PutBucketCorsRequest { + /** + *

Specifies the bucket impacted by the corsconfiguration.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Describes the cross-origin access configuration for objects in an Amazon S3 bucket. For more + * information, see Enabling + * Cross-Origin Resource Sharing in the + * Amazon S3 User Guide.

+ * @public + */ + CORSConfiguration: CORSConfiguration | undefined; + /** + *

The Base64 encoded 128-bit MD5 digest of the data. This header must be used as a message + * integrity check to verify that the request body was not corrupted in transit. For more + * information, go to RFC + * 1864. + *

+ *

For requests made using the Amazon Web Services Command Line Interface (CLI) or Amazon Web Services SDKs, this field is calculated automatically.

+ * @public + */ + ContentMD5?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the request when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request. For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface PutBucketEncryptionRequest { + /** + *

Specifies default encryption for a bucket using server-side encryption with different + * key options.

+ *

+ * Directory buckets - When you use this operation with a directory bucket, you must use path-style requests in the format https://s3express-control.region-code.amazonaws.com/bucket-name + * . Virtual-hosted-style requests aren't supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must also follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * DOC-EXAMPLE-BUCKET--usw2-az1--x-s3). For information about bucket naming restrictions, see Directory bucket naming rules in the Amazon S3 User Guide + *

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The Base64 encoded 128-bit MD5 digest of the server-side encryption + * configuration.

+ *

For requests made using the Amazon Web Services Command Line Interface (CLI) or Amazon Web Services SDKs, this field is calculated automatically.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ContentMD5?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the request when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request. For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ * + *

For directory buckets, when you use Amazon Web Services SDKs, CRC32 is the default checksum algorithm that's used for performance.

+ *
+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

Specifies the default server-side-encryption configuration.

+ * @public + */ + ServerSideEncryptionConfiguration: ServerSideEncryptionConfiguration | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * + *

For directory buckets, this header is not supported in this API operation. If you specify this header, the request fails with the HTTP status code + * 501 Not Implemented.

+ *
+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface PutBucketIntelligentTieringConfigurationRequest { + /** + *

The name of the Amazon S3 bucket whose configuration you want to modify or retrieve.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The ID used to identify the S3 Intelligent-Tiering configuration.

+ * @public + */ + Id: string | undefined; + /** + *

Container for S3 Intelligent-Tiering configuration.

+ * @public + */ + IntelligentTieringConfiguration: IntelligentTieringConfiguration | undefined; +} +/** + * @public + */ +export interface PutBucketInventoryConfigurationRequest { + /** + *

The name of the bucket where the inventory configuration will be stored.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The ID used to identify the inventory configuration.

+ * @public + */ + Id: string | undefined; + /** + *

Specifies the inventory configuration.

+ * @public + */ + InventoryConfiguration: InventoryConfiguration | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface PutBucketLifecycleConfigurationOutput { + /** + *

Indicates which default minimum object size behavior is applied to the lifecycle + * configuration.

+ * + *

This parameter applies to general purpose buckets only. It is not supported for + * directory bucket lifecycle configurations.

+ *
+ *
    + *
  • + *

    + * all_storage_classes_128K - Objects smaller than 128 KB will not + * transition to any storage class by default.

    + *
  • + *
  • + *

    + * varies_by_storage_class - Objects smaller than 128 KB will + * transition to Glacier Flexible Retrieval or Glacier Deep Archive storage classes. By + * default, all other storage classes will prevent transitions smaller than 128 KB. + *

    + *
  • + *
+ *

To customize the minimum object size for any transition you can add a filter that + * specifies a custom ObjectSizeGreaterThan or ObjectSizeLessThan in + * the body of your transition rule. Custom filters always take precedence over the default + * transition behavior.

+ * @public + */ + TransitionDefaultMinimumObjectSize?: TransitionDefaultMinimumObjectSize | undefined; +} +/** + *

Specifies the lifecycle configuration for objects in an Amazon S3 bucket. For more + * information, see Object Lifecycle Management + * in the Amazon S3 User Guide.

+ * @public + */ +export interface BucketLifecycleConfiguration { + /** + *

A lifecycle rule for individual objects in an Amazon S3 bucket.

+ * @public + */ + Rules: LifecycleRule[] | undefined; +} +/** + * @public + */ +export interface PutBucketLifecycleConfigurationRequest { + /** + *

The name of the bucket for which to set the configuration.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the request when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request. For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

Container for lifecycle rules. You can add as many as 1,000 rules.

+ * @public + */ + LifecycleConfiguration?: BucketLifecycleConfiguration | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * + *

This parameter applies to general purpose buckets only. It is not supported for + * directory bucket lifecycle configurations.

+ *
+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

Indicates which default minimum object size behavior is applied to the lifecycle + * configuration.

+ * + *

This parameter applies to general purpose buckets only. It is not supported for + * directory bucket lifecycle configurations.

+ *
+ *
    + *
  • + *

    + * all_storage_classes_128K - Objects smaller than 128 KB will not + * transition to any storage class by default.

    + *
  • + *
  • + *

    + * varies_by_storage_class - Objects smaller than 128 KB will + * transition to Glacier Flexible Retrieval or Glacier Deep Archive storage classes. By + * default, all other storage classes will prevent transitions smaller than 128 KB. + *

    + *
  • + *
+ *

To customize the minimum object size for any transition you can add a filter that + * specifies a custom ObjectSizeGreaterThan or ObjectSizeLessThan in + * the body of your transition rule. Custom filters always take precedence over the default + * transition behavior.

+ * @public + */ + TransitionDefaultMinimumObjectSize?: TransitionDefaultMinimumObjectSize | undefined; +} +/** + *

Container for logging status information.

+ * @public + */ +export interface BucketLoggingStatus { + /** + *

Describes where logs are stored and the prefix that Amazon S3 assigns to all log object keys + * for a bucket. For more information, see PUT Bucket logging in the + * Amazon S3 API Reference.

+ * @public + */ + LoggingEnabled?: LoggingEnabled | undefined; +} +/** + * @public + */ +export interface PutBucketLoggingRequest { + /** + *

The name of the bucket for which to set the logging parameters.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Container for logging status information.

+ * @public + */ + BucketLoggingStatus: BucketLoggingStatus | undefined; + /** + *

The MD5 hash of the PutBucketLogging request body.

+ *

For requests made using the Amazon Web Services Command Line Interface (CLI) or Amazon Web Services SDKs, this field is calculated automatically.

+ * @public + */ + ContentMD5?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the request when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request. For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface PutBucketMetricsConfigurationRequest { + /** + *

The name of the bucket for which the metrics configuration is set.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The ID used to identify the metrics configuration. The ID has a 64 character limit and + * can only contain letters, numbers, periods, dashes, and underscores.

+ * @public + */ + Id: string | undefined; + /** + *

Specifies the metrics configuration.

+ * @public + */ + MetricsConfiguration: MetricsConfiguration | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface PutBucketNotificationConfigurationRequest { + /** + *

The name of the bucket.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

A container for specifying the notification configuration of the bucket. If this element + * is empty, notifications are turned off for the bucket.

+ * @public + */ + NotificationConfiguration: NotificationConfiguration | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

Skips validation of Amazon SQS, Amazon SNS, and Lambda + * destinations. True or false value.

+ * @public + */ + SkipDestinationValidation?: boolean | undefined; +} +/** + * @public + */ +export interface PutBucketOwnershipControlsRequest { + /** + *

The name of the Amazon S3 bucket whose OwnershipControls you want to set.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The MD5 hash of the OwnershipControls request body.

+ *

For requests made using the Amazon Web Services Command Line Interface (CLI) or Amazon Web Services SDKs, this field is calculated automatically.

+ * @public + */ + ContentMD5?: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

The OwnershipControls (BucketOwnerEnforced, BucketOwnerPreferred, or + * ObjectWriter) that you want to apply to this Amazon S3 bucket.

+ * @public + */ + OwnershipControls: OwnershipControls | undefined; +} +/** + * @public + */ +export interface PutBucketPolicyRequest { + /** + *

The name of the bucket.

+ *

+ * Directory buckets - When you use this operation with a directory bucket, you must use path-style requests in the format https://s3express-control.region-code.amazonaws.com/bucket-name + * . Virtual-hosted-style requests aren't supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must also follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * DOC-EXAMPLE-BUCKET--usw2-az1--x-s3). For information about bucket naming restrictions, see Directory bucket naming rules in the Amazon S3 User Guide + *

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The MD5 hash of the request body.

+ *

For requests made using the Amazon Web Services Command Line Interface (CLI) or Amazon Web Services SDKs, this field is calculated automatically.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ContentMD5?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the request when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum-algorithm + * or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request.

+ *

For the x-amz-checksum-algorithm + * header, replace + * algorithm + * with the supported algorithm from the following list:

+ *
    + *
  • + *

    + * CRC32 + *

    + *
  • + *
  • + *

    + * CRC32C + *

    + *
  • + *
  • + *

    + * CRC64NVME + *

    + *
  • + *
  • + *

    + * SHA1 + *

    + *
  • + *
  • + *

    + * SHA256 + *

    + *
  • + *
+ *

For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If the individual checksum value you provide through x-amz-checksum-algorithm + * doesn't match the checksum algorithm you set through x-amz-sdk-checksum-algorithm, Amazon S3 fails the request with a BadDigest error.

+ * + *

For directory buckets, when you use Amazon Web Services SDKs, CRC32 is the default checksum algorithm that's used for performance.

+ *
+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

Set this parameter to true to confirm that you want to remove your permissions to change + * this bucket policy in the future.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ConfirmRemoveSelfBucketAccess?: boolean | undefined; + /** + *

The bucket policy as a JSON document.

+ *

For directory buckets, the only IAM action supported in the bucket policy is + * s3express:CreateSession.

+ * @public + */ + Policy: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * + *

For directory buckets, this header is not supported in this API operation. If you specify this header, the request fails with the HTTP status code + * 501 Not Implemented.

+ *
+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface PutBucketReplicationRequest { + /** + *

The name of the bucket

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The Base64 encoded 128-bit MD5 digest of the data. You must use this header as a message + * integrity check to verify that the request body was not corrupted in transit. For more + * information, see RFC 1864.

+ *

For requests made using the Amazon Web Services Command Line Interface (CLI) or Amazon Web Services SDKs, this field is calculated automatically.

+ * @public + */ + ContentMD5?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the request when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request. For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

A container for replication rules. You can add up to 1,000 rules. The maximum size of a + * replication configuration is 2 MB.

+ * @public + */ + ReplicationConfiguration: ReplicationConfiguration | undefined; + /** + *

A token to allow Object Lock to be enabled for an existing bucket.

+ * @public + */ + Token?: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

Container for Payer.

+ * @public + */ +export interface RequestPaymentConfiguration { + /** + *

Specifies who pays for the download and request fees.

+ * @public + */ + Payer: Payer | undefined; +} +/** + * @public + */ +export interface PutBucketRequestPaymentRequest { + /** + *

The bucket name.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The Base64 encoded 128-bit MD5 digest of the data. You must use this header as a message + * integrity check to verify that the request body was not corrupted in transit. For more + * information, see RFC 1864.

+ *

For requests made using the Amazon Web Services Command Line Interface (CLI) or Amazon Web Services SDKs, this field is calculated automatically.

+ * @public + */ + ContentMD5?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the request when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request. For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

Container for Payer.

+ * @public + */ + RequestPaymentConfiguration: RequestPaymentConfiguration | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

Container for TagSet elements.

+ * @public + */ +export interface Tagging { + /** + *

A collection for a set of tags

+ * @public + */ + TagSet: Tag[] | undefined; +} +/** + * @public + */ +export interface PutBucketTaggingRequest { + /** + *

The bucket name.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The Base64 encoded 128-bit MD5 digest of the data. You must use this header as a message + * integrity check to verify that the request body was not corrupted in transit. For more + * information, see RFC 1864.

+ *

For requests made using the Amazon Web Services Command Line Interface (CLI) or Amazon Web Services SDKs, this field is calculated automatically.

+ * @public + */ + ContentMD5?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the request when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request. For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

Container for the TagSet and Tag elements.

+ * @public + */ + Tagging: Tagging | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const MFADelete: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +/** + * @public + */ +export type MFADelete = (typeof MFADelete)[keyof typeof MFADelete]; +/** + *

Describes the versioning state of an Amazon S3 bucket. For more information, see PUT + * Bucket versioning in the Amazon S3 API Reference.

+ * @public + */ +export interface VersioningConfiguration { + /** + *

Specifies whether MFA delete is enabled in the bucket versioning configuration. This + * element is only returned if the bucket has been configured with MFA delete. If the bucket + * has never been so configured, this element is not returned.

+ * @public + */ + MFADelete?: MFADelete | undefined; + /** + *

The versioning state of the bucket.

+ * @public + */ + Status?: BucketVersioningStatus | undefined; +} +/** + * @public + */ +export interface PutBucketVersioningRequest { + /** + *

The bucket name.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

>The Base64 encoded 128-bit MD5 digest of the data. You must use this header as a + * message integrity check to verify that the request body was not corrupted in transit. For + * more information, see RFC + * 1864.

+ *

For requests made using the Amazon Web Services Command Line Interface (CLI) or Amazon Web Services SDKs, this field is calculated automatically.

+ * @public + */ + ContentMD5?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the request when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request. For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

The concatenation of the authentication device's serial number, a space, and the value + * that is displayed on your authentication device.

+ * @public + */ + MFA?: string | undefined; + /** + *

Container for setting the versioning state.

+ * @public + */ + VersioningConfiguration: VersioningConfiguration | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

Specifies website configuration parameters for an Amazon S3 bucket.

+ * @public + */ +export interface WebsiteConfiguration { + /** + *

The name of the error document for the website.

+ * @public + */ + ErrorDocument?: ErrorDocument | undefined; + /** + *

The name of the index document for the website.

+ * @public + */ + IndexDocument?: IndexDocument | undefined; + /** + *

The redirect behavior for every request to this bucket's website endpoint.

+ * + *

If you specify this property, you can't specify any other property.

+ *
+ * @public + */ + RedirectAllRequestsTo?: RedirectAllRequestsTo | undefined; + /** + *

Rules that define when a redirect is applied and the redirect behavior.

+ * @public + */ + RoutingRules?: RoutingRule[] | undefined; +} +/** + * @public + */ +export interface PutBucketWebsiteRequest { + /** + *

The bucket name.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The Base64 encoded 128-bit MD5 digest of the data. You must use this header as a message + * integrity check to verify that the request body was not corrupted in transit. For more + * information, see RFC 1864.

+ *

For requests made using the Amazon Web Services Command Line Interface (CLI) or Amazon Web Services SDKs, this field is calculated automatically.

+ * @public + */ + ContentMD5?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the request when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request. For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

Container for the request.

+ * @public + */ + WebsiteConfiguration: WebsiteConfiguration | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

+ * The existing object was created with a different encryption type. + * Subsequent write requests must include the appropriate encryption + * parameters in the request or while creating the session. + *

+ * @public + */ +export declare class EncryptionTypeMismatch extends __BaseException { + readonly name: "EncryptionTypeMismatch"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

You may receive this error in multiple cases. Depending on the reason for the error, you may receive one of the messages below:

+ *
    + *
  • + *

    Cannot specify both a write offset value and user-defined object metadata for existing objects.

    + *
  • + *
  • + *

    Checksum Type mismatch occurred, expected checksum Type: sha1, actual checksum Type: crc32c.

    + *
  • + *
  • + *

    Request body cannot be empty when 'write offset' is specified.

    + *
  • + *
+ * @public + */ +export declare class InvalidRequest extends __BaseException { + readonly name: "InvalidRequest"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

+ * The write offset value that you specified does not match the current object size. + *

+ * @public + */ +export declare class InvalidWriteOffset extends __BaseException { + readonly name: "InvalidWriteOffset"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface PutObjectOutput { + /** + *

If the expiration is configured for the object (see PutBucketLifecycleConfiguration) in the Amazon S3 User Guide, + * the response includes this header. It includes the expiry-date and + * rule-id key-value pairs that provide information about object expiration. + * The value of the rule-id is URL-encoded.

+ * + *

Object expiration information is not returned in directory buckets and this header returns the value "NotImplemented" in all responses for directory buckets.

+ *
+ * @public + */ + Expiration?: string | undefined; + /** + *

Entity tag for the uploaded object.

+ *

+ * General purpose buckets - To ensure that data is not + * corrupted traversing the network, for objects where the ETag is the MD5 digest of the + * object, you can calculate the MD5 while putting an object to Amazon S3 and compare the returned + * ETag to the calculated MD5 value.

+ *

+ * Directory buckets - The ETag for the object in + * a directory bucket isn't the MD5 digest of the object.

+ * @public + */ + ETag?: string | undefined; + /** + *

The Base64 encoded, 32-bit CRC32 checksum of the object. This checksum is only be present if the checksum was uploaded + * with the object. When you use an API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32?: string | undefined; + /** + *

The Base64 encoded, 32-bit CRC32C checksum of the object. This checksum is only present if the checksum was uploaded + * with the object. When you use an API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32C?: string | undefined; + /** + *

The Base64 encoded, 64-bit CRC64NVME checksum of the object. This header + * is present if the object was uploaded with the CRC64NVME checksum algorithm, or if it + * was uploaded without a checksum (and Amazon S3 added the default checksum, + * CRC64NVME, to the uploaded object). For more information about how + * checksums are calculated with multipart uploads, see Checking object integrity + * in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC64NVME?: string | undefined; + /** + *

The Base64 encoded, 160-bit SHA1 digest of the object. This will only be present if the object was uploaded + * with the object. When you use the API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA1?: string | undefined; + /** + *

The Base64 encoded, 256-bit SHA256 digest of the object. This will only be present if the object was uploaded + * with the object. When you use an API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA256?: string | undefined; + /** + *

This header specifies the checksum type of the object, which determines how part-level + * checksums are combined to create an object-level checksum for multipart objects. For + * PutObject uploads, the checksum type is always FULL_OBJECT. You can use this header as a + * data integrity check to verify that the checksum type that is received is the same checksum + * that was specified. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumType?: ChecksumType | undefined; + /** + *

The server-side encryption algorithm used when you store this object in Amazon S3.

+ * @public + */ + ServerSideEncryption?: ServerSideEncryption | undefined; + /** + *

Version ID of the object.

+ *

If you enable versioning for a bucket, Amazon S3 automatically generates a unique version ID + * for the object being stored. Amazon S3 returns this ID in the response. When you enable + * versioning for a bucket, if Amazon S3 receives multiple write requests for the same object + * simultaneously, it stores all of the objects. For more information about versioning, see + * Adding Objects to + * Versioning-Enabled Buckets in the Amazon S3 User Guide. For + * information about returning the versioning state of a bucket, see GetBucketVersioning.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + VersionId?: string | undefined; + /** + *

If server-side encryption with a customer-provided encryption key was requested, the + * response will include this header to confirm the encryption algorithm that's used.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerAlgorithm?: string | undefined; + /** + *

If server-side encryption with a customer-provided encryption key was requested, the + * response will include this header to provide the round-trip message integrity verification + * of the customer-provided encryption key.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKeyMD5?: string | undefined; + /** + *

If present, indicates the ID of the KMS key that was used for object encryption.

+ * @public + */ + SSEKMSKeyId?: string | undefined; + /** + *

If present, indicates the Amazon Web Services KMS Encryption Context to use for object encryption. The value of + * this header is a Base64 encoded string of a UTF-8 encoded JSON, which contains the encryption context as key-value pairs. + * This value is stored as object metadata and automatically gets + * passed on to Amazon Web Services KMS for future GetObject + * operations on this object.

+ * @public + */ + SSEKMSEncryptionContext?: string | undefined; + /** + *

Indicates whether the uploaded object uses an S3 Bucket Key for server-side encryption + * with Key Management Service (KMS) keys (SSE-KMS).

+ * @public + */ + BucketKeyEnabled?: boolean | undefined; + /** + *

+ * The size of the object in bytes. This value is only be present if you append to an object. + *

+ * + *

This functionality is only supported for objects in the Amazon S3 Express One Zone storage class in directory buckets.

+ *
+ * @public + */ + Size?: number | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; +} +/** + * @public + */ +export interface PutObjectRequest { + /** + *

The canned ACL to apply to the object. For more information, see Canned + * ACL in the Amazon S3 User Guide.

+ *

When adding a new object, you can use headers to grant ACL-based permissions to + * individual Amazon Web Services accounts or to predefined groups defined by Amazon S3. These permissions are + * then added to the ACL on the object. By default, all objects are private. Only the owner + * has full access control. For more information, see Access Control List (ACL) Overview + * and Managing + * ACLs Using the REST API in the Amazon S3 User Guide.

+ *

If the bucket that you're uploading objects to uses the bucket owner enforced setting + * for S3 Object Ownership, ACLs are disabled and no longer affect permissions. Buckets that + * use this setting only accept PUT requests that don't specify an ACL or PUT requests that + * specify bucket owner full control ACLs, such as the bucket-owner-full-control + * canned ACL or an equivalent form of this ACL expressed in the XML format. PUT requests that + * contain other ACLs (for example, custom grants to certain Amazon Web Services accounts) fail and return a + * 400 error with the error code AccessControlListNotSupported. + * For more information, see Controlling ownership of + * objects and disabling ACLs in the Amazon S3 User Guide.

+ * + *
    + *
  • + *

    This functionality is not supported for directory buckets.

    + *
  • + *
  • + *

    This functionality is not supported for Amazon S3 on Outposts.

    + *
  • + *
+ *
+ * @public + */ + ACL?: ObjectCannedACL | undefined; + /** + *

Object data.

+ * @public + */ + Body?: StreamingBlobTypes | undefined; + /** + *

The bucket name to which the PUT action was initiated.

+ *

+ * Directory buckets - + * When you use this operation with a directory bucket, you must use virtual-hosted-style requests in the format + * Bucket-name.s3express-zone-id.region-code.amazonaws.com. Path-style requests are not supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * amzn-s3-demo-bucket--usw2-az1--x-s3). For information about bucket naming + * restrictions, see Directory bucket naming + * rules in the Amazon S3 User Guide.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ * + *

Object Lambda access points are not supported by directory buckets.

+ *
+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Can be used to specify caching behavior along the request/reply chain. For more + * information, see http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9.

+ * @public + */ + CacheControl?: string | undefined; + /** + *

Specifies presentational information for the object. For more information, see https://www.rfc-editor.org/rfc/rfc6266#section-4.

+ * @public + */ + ContentDisposition?: string | undefined; + /** + *

Specifies what content encodings have been applied to the object and thus what decoding + * mechanisms must be applied to obtain the media-type referenced by the Content-Type header + * field. For more information, see https://www.rfc-editor.org/rfc/rfc9110.html#field.content-encoding.

+ * @public + */ + ContentEncoding?: string | undefined; + /** + *

The language the content is in.

+ * @public + */ + ContentLanguage?: string | undefined; + /** + *

Size of the body in bytes. This parameter is useful when the size of the body cannot be + * determined automatically. For more information, see https://www.rfc-editor.org/rfc/rfc9110.html#name-content-length.

+ * @public + */ + ContentLength?: number | undefined; + /** + *

The Base64 encoded 128-bit MD5 digest of the message (without the headers) according to + * RFC 1864. This header can be used as a message integrity check to verify that the data is + * the same data that was originally sent. Although it is optional, we recommend using the + * Content-MD5 mechanism as an end-to-end integrity check. For more information about REST + * request authentication, see REST Authentication.

+ * + *

The Content-MD5 or x-amz-sdk-checksum-algorithm header is + * required for any request to upload an object with a retention period configured using + * Amazon S3 Object Lock. For more information, see Uploading objects to an Object Lock enabled bucket in the + * Amazon S3 User Guide.

+ *
+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ContentMD5?: string | undefined; + /** + *

A standard MIME type describing the format of the contents. For more information, see + * https://www.rfc-editor.org/rfc/rfc9110.html#name-content-type.

+ * @public + */ + ContentType?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the object when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum-algorithm + * or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request.

+ *

For the x-amz-checksum-algorithm + * header, replace + * algorithm + * with the supported algorithm from the following list:

+ *
    + *
  • + *

    + * CRC32 + *

    + *
  • + *
  • + *

    + * CRC32C + *

    + *
  • + *
  • + *

    + * CRC64NVME + *

    + *
  • + *
  • + *

    + * SHA1 + *

    + *
  • + *
  • + *

    + * SHA256 + *

    + *
  • + *
+ *

For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If the individual checksum value you provide through x-amz-checksum-algorithm + * doesn't match the checksum algorithm you set through x-amz-sdk-checksum-algorithm, Amazon S3 fails the request with a BadDigest error.

+ * + *

The Content-MD5 or x-amz-sdk-checksum-algorithm header is + * required for any request to upload an object with a retention period configured using + * Amazon S3 Object Lock. For more information, see Uploading objects to an Object Lock enabled bucket in the + * Amazon S3 User Guide.

+ *
+ *

For directory buckets, when you use Amazon Web Services SDKs, CRC32 is the default checksum algorithm that's used for performance.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is the same data that was originally sent. + * This header specifies the Base64 encoded, 32-bit CRC32 checksum of the object. For more information, see + * Checking object integrity in the + * Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is the same data that was originally sent. + * This header specifies the Base64 encoded, 32-bit CRC32C checksum of the object. For more information, see + * Checking object integrity in the + * Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32C?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is + * the same data that was originally sent. This header specifies the Base64 encoded, 64-bit + * CRC64NVME checksum of the object. The CRC64NVME checksum is + * always a full object checksum. For more information, see Checking object integrity + * in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC64NVME?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is the same data that was originally sent. + * This header specifies the Base64 encoded, 160-bit SHA1 digest of the object. For more information, see + * Checking object integrity in the + * Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA1?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is the same data that was originally sent. + * This header specifies the Base64 encoded, 256-bit SHA256 digest of the object. For more information, see + * Checking object integrity in the + * Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA256?: string | undefined; + /** + *

The date and time at which the object is no longer cacheable. For more information, see + * https://www.rfc-editor.org/rfc/rfc7234#section-5.3.

+ * @public + */ + Expires?: Date | undefined; + /** + *

Uploads the object only if the ETag (entity tag) value provided during the WRITE + * operation matches the ETag of the object in S3. If the ETag values do not match, the + * operation returns a 412 Precondition Failed error.

+ *

If a conflicting operation occurs during the upload S3 returns a 409 ConditionalRequestConflict response. On a 409 failure you should fetch the object's ETag and retry the upload.

+ *

Expects the ETag value as a string.

+ *

For more information about conditional requests, see RFC 7232, or Conditional requests in the Amazon S3 User Guide.

+ * @public + */ + IfMatch?: string | undefined; + /** + *

Uploads the object only if the object key name does not already exist in the bucket + * specified. Otherwise, Amazon S3 returns a 412 Precondition Failed error.

+ *

If a conflicting operation occurs during the upload S3 returns a 409 + * ConditionalRequestConflict response. On a 409 failure you should retry the + * upload.

+ *

Expects the '*' (asterisk) character.

+ *

For more information about conditional requests, see RFC 7232, or Conditional requests in the Amazon S3 User Guide.

+ * @public + */ + IfNoneMatch?: string | undefined; + /** + *

Gives the grantee READ, READ_ACP, and WRITE_ACP permissions on the object.

+ * + *
    + *
  • + *

    This functionality is not supported for directory buckets.

    + *
  • + *
  • + *

    This functionality is not supported for Amazon S3 on Outposts.

    + *
  • + *
+ *
+ * @public + */ + GrantFullControl?: string | undefined; + /** + *

Allows grantee to read the object data and its metadata.

+ * + *
    + *
  • + *

    This functionality is not supported for directory buckets.

    + *
  • + *
  • + *

    This functionality is not supported for Amazon S3 on Outposts.

    + *
  • + *
+ *
+ * @public + */ + GrantRead?: string | undefined; + /** + *

Allows grantee to read the object ACL.

+ * + *
    + *
  • + *

    This functionality is not supported for directory buckets.

    + *
  • + *
  • + *

    This functionality is not supported for Amazon S3 on Outposts.

    + *
  • + *
+ *
+ * @public + */ + GrantReadACP?: string | undefined; + /** + *

Allows grantee to write the ACL for the applicable object.

+ * + *
    + *
  • + *

    This functionality is not supported for directory buckets.

    + *
  • + *
  • + *

    This functionality is not supported for Amazon S3 on Outposts.

    + *
  • + *
+ *
+ * @public + */ + GrantWriteACP?: string | undefined; + /** + *

Object key for which the PUT action was initiated.

+ * @public + */ + Key: string | undefined; + /** + *

+ * Specifies the offset for appending data to existing objects in bytes. + * The offset must be equal to the size of the existing object being appended to. + * If no object exists, setting this header to 0 will create a new object. + *

+ * + *

This functionality is only supported for objects in the Amazon S3 Express One Zone storage class in directory buckets.

+ *
+ * @public + */ + WriteOffsetBytes?: number | undefined; + /** + *

A map of metadata to store with the object in S3.

+ * @public + */ + Metadata?: Record | undefined; + /** + *

The server-side encryption algorithm that was used when you store this object in Amazon S3 + * (for example, AES256, aws:kms, aws:kms:dsse).

+ *
    + *
  • + *

    + * General purpose buckets - You have four mutually + * exclusive options to protect data using server-side encryption in Amazon S3, depending on + * how you choose to manage the encryption keys. Specifically, the encryption key + * options are Amazon S3 managed keys (SSE-S3), Amazon Web Services KMS keys (SSE-KMS or DSSE-KMS), and + * customer-provided keys (SSE-C). Amazon S3 encrypts data with server-side encryption by + * using Amazon S3 managed keys (SSE-S3) by default. You can optionally tell Amazon S3 to encrypt + * data at rest by using server-side encryption with other key options. For more + * information, see Using Server-Side + * Encryption in the Amazon S3 User Guide.

    + *
  • + *
  • + *

    + * Directory buckets - + * For directory buckets, there are only two supported options for server-side encryption: server-side encryption with Amazon S3 managed keys (SSE-S3) (AES256) and server-side encryption with KMS keys (SSE-KMS) (aws:kms). We recommend that the bucket's default encryption uses the desired encryption configuration and you don't override the bucket default encryption in your + * CreateSession requests or PUT object requests. Then, new objects + * are automatically encrypted with the desired encryption settings. For more + * information, see Protecting data with server-side encryption in the Amazon S3 User Guide. For more information about the encryption overriding behaviors in directory buckets, see Specifying server-side encryption with KMS for new object uploads.

    + *

    In the Zonal endpoint API calls (except CopyObject and UploadPartCopy) using the REST API, the encryption request headers must match the encryption settings that are specified in the CreateSession request. + * You can't override the values of the encryption settings (x-amz-server-side-encryption, x-amz-server-side-encryption-aws-kms-key-id, x-amz-server-side-encryption-context, and x-amz-server-side-encryption-bucket-key-enabled) that are specified in the CreateSession request. + * You don't need to explicitly specify these encryption settings values in Zonal endpoint API calls, and + * Amazon S3 will use the encryption settings values from the CreateSession request to protect new objects in the directory bucket. + *

    + * + *

    When you use the CLI or the Amazon Web Services SDKs, for CreateSession, the session token refreshes automatically to avoid service interruptions when a session expires. The CLI or the Amazon Web Services SDKs use the bucket's default encryption configuration for the + * CreateSession request. It's not supported to override the encryption settings values in the CreateSession request. + * So in the Zonal endpoint API calls (except CopyObject and UploadPartCopy), + * the encryption request headers must match the default encryption configuration of the directory bucket. + * + *

    + *
    + *
  • + *
+ * @public + */ + ServerSideEncryption?: ServerSideEncryption | undefined; + /** + *

By default, Amazon S3 uses the STANDARD Storage Class to store newly created objects. The + * STANDARD storage class provides high durability and high availability. Depending on + * performance needs, you can specify a different Storage Class. For more information, see + * Storage + * Classes in the Amazon S3 User Guide.

+ * + *
    + *
  • + *

    Directory buckets only support EXPRESS_ONEZONE (the S3 Express One Zone storage class) in Availability Zones and ONEZONE_IA (the S3 One Zone-Infrequent Access storage class) in Dedicated Local Zones.

    + *
  • + *
  • + *

    Amazon S3 on Outposts only uses the OUTPOSTS Storage Class.

    + *
  • + *
+ *
+ * @public + */ + StorageClass?: StorageClass | undefined; + /** + *

If the bucket is configured as a website, redirects requests for this object to another + * object in the same bucket or to an external URL. Amazon S3 stores the value of this header in + * the object metadata. For information about object metadata, see Object Key and Metadata in the + * Amazon S3 User Guide.

+ *

In the following example, the request header sets the redirect to an object + * (anotherPage.html) in the same bucket:

+ *

+ * x-amz-website-redirect-location: /anotherPage.html + *

+ *

In the following example, the request header sets the object redirect to another + * website:

+ *

+ * x-amz-website-redirect-location: http://www.example.com/ + *

+ *

For more information about website hosting in Amazon S3, see Hosting Websites on Amazon S3 and + * How to + * Configure Website Page Redirects in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + WebsiteRedirectLocation?: string | undefined; + /** + *

Specifies the algorithm to use when encrypting the object (for example, + * AES256).

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerAlgorithm?: string | undefined; + /** + *

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This + * value is used to store the object and then it is discarded; Amazon S3 does not store the + * encryption key. The key must be appropriate for use with the algorithm specified in the + * x-amz-server-side-encryption-customer-algorithm header.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKey?: string | undefined; + /** + *

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses + * this header for a message integrity check to ensure that the encryption key was transmitted + * without error.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKeyMD5?: string | undefined; + /** + *

Specifies the KMS key ID (Key ID, Key ARN, or Key Alias) to use for object encryption. If the KMS key doesn't exist in the same + * account that's issuing the command, you must use the full Key ARN not the Key ID.

+ *

+ * General purpose buckets - If you specify x-amz-server-side-encryption with aws:kms or aws:kms:dsse, this header specifies the ID (Key ID, Key ARN, or Key Alias) of the KMS + * key to use. If you specify + * x-amz-server-side-encryption:aws:kms or + * x-amz-server-side-encryption:aws:kms:dsse, but do not provide x-amz-server-side-encryption-aws-kms-key-id, Amazon S3 uses the Amazon Web Services managed key + * (aws/s3) to protect the data.

+ *

+ * Directory buckets - To encrypt data using SSE-KMS, it's recommended to specify the + * x-amz-server-side-encryption header to aws:kms. Then, the x-amz-server-side-encryption-aws-kms-key-id header implicitly uses + * the bucket's default KMS customer managed key ID. If you want to explicitly set the + * x-amz-server-side-encryption-aws-kms-key-id header, it must match the bucket's default customer managed key (using key ID or ARN, not alias). Your SSE-KMS configuration can only support 1 customer managed key per directory bucket's lifetime. + * The Amazon Web Services managed key (aws/s3) isn't supported. + * + * Incorrect key specification results in an HTTP 400 Bad Request error.

+ * @public + */ + SSEKMSKeyId?: string | undefined; + /** + *

Specifies the Amazon Web Services KMS Encryption Context as an additional encryption context to use for object encryption. The value of + * this header is a Base64 encoded string of a UTF-8 encoded JSON, which contains the encryption context as key-value pairs. + * This value is stored as object metadata and automatically gets passed on + * to Amazon Web Services KMS for future GetObject operations on + * this object.

+ *

+ * General purpose buckets - This value must be explicitly added during CopyObject operations if you want an additional encryption context for your object. For more information, see Encryption context in the Amazon S3 User Guide.

+ *

+ * Directory buckets - You can optionally provide an explicit encryption context value. The value must match the default encryption context - the bucket Amazon Resource Name (ARN). An additional encryption context value is not supported.

+ * @public + */ + SSEKMSEncryptionContext?: string | undefined; + /** + *

Specifies whether Amazon S3 should use an S3 Bucket Key for object encryption with + * server-side encryption using Key Management Service (KMS) keys (SSE-KMS).

+ *

+ * General purpose buckets - Setting this header to + * true causes Amazon S3 to use an S3 Bucket Key for object encryption with + * SSE-KMS. Also, specifying this header with a PUT action doesn't affect bucket-level settings for S3 + * Bucket Key.

+ *

+ * Directory buckets - S3 Bucket Keys are always enabled for GET and PUT operations in a directory bucket and can’t be disabled. S3 Bucket Keys aren't supported, when you copy SSE-KMS encrypted objects from general purpose buckets + * to directory buckets, from directory buckets to general purpose buckets, or between directory buckets, through CopyObject, UploadPartCopy, the Copy operation in Batch Operations, or + * the import jobs. In this case, Amazon S3 makes a call to KMS every time a copy request is made for a KMS-encrypted object.

+ * @public + */ + BucketKeyEnabled?: boolean | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

The tag-set for the object. The tag-set must be encoded as URL Query parameters. (For + * example, "Key1=Value1")

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + Tagging?: string | undefined; + /** + *

The Object Lock mode that you want to apply to this object.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ObjectLockMode?: ObjectLockMode | undefined; + /** + *

The date and time when you want this object's Object Lock to expire. Must be formatted + * as a timestamp parameter.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ObjectLockRetainUntilDate?: Date | undefined; + /** + *

Specifies whether a legal hold will be applied to this object. For more information + * about S3 Object Lock, see Object Lock in the + * Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ObjectLockLegalHoldStatus?: ObjectLockLegalHoldStatus | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

+ * You have attempted to add more parts than the maximum of 10000 + * that are allowed for this object. You can use the CopyObject operation + * to copy this object to another and then add more data to the newly copied object. + *

+ * @public + */ +export declare class TooManyParts extends __BaseException { + readonly name: "TooManyParts"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface PutObjectAclOutput { + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; +} +/** + * @public + */ +export interface PutObjectAclRequest { + /** + *

The canned ACL to apply to the object. For more information, see Canned + * ACL.

+ * @public + */ + ACL?: ObjectCannedACL | undefined; + /** + *

Contains the elements that set the ACL permissions for an object per grantee.

+ * @public + */ + AccessControlPolicy?: AccessControlPolicy | undefined; + /** + *

The bucket name that contains the object to which you want to attach the ACL.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The Base64 encoded 128-bit MD5 digest of the data. This header must be used as a message + * integrity check to verify that the request body was not corrupted in transit. For more + * information, go to RFC + * 1864.> + *

+ *

For requests made using the Amazon Web Services Command Line Interface (CLI) or Amazon Web Services SDKs, this field is calculated automatically.

+ * @public + */ + ContentMD5?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the object when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request. For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

Allows grantee the read, write, read ACP, and write ACP permissions on the + * bucket.

+ *

This functionality is not supported for Amazon S3 on Outposts.

+ * @public + */ + GrantFullControl?: string | undefined; + /** + *

Allows grantee to list the objects in the bucket.

+ *

This functionality is not supported for Amazon S3 on Outposts.

+ * @public + */ + GrantRead?: string | undefined; + /** + *

Allows grantee to read the bucket ACL.

+ *

This functionality is not supported for Amazon S3 on Outposts.

+ * @public + */ + GrantReadACP?: string | undefined; + /** + *

Allows grantee to create new objects in the bucket.

+ *

For the bucket and object owners of existing objects, also allows deletions and + * overwrites of those objects.

+ * @public + */ + GrantWrite?: string | undefined; + /** + *

Allows grantee to write the ACL for the applicable bucket.

+ *

This functionality is not supported for Amazon S3 on Outposts.

+ * @public + */ + GrantWriteACP?: string | undefined; + /** + *

Key for which the PUT action was initiated.

+ * @public + */ + Key: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

Version ID used to reference a specific version of the object.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + VersionId?: string | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface PutObjectLegalHoldOutput { + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; +} +/** + * @public + */ +export interface PutObjectLegalHoldRequest { + /** + *

The bucket name containing the object that you want to place a legal hold on.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The key name for the object that you want to place a legal hold on.

+ * @public + */ + Key: string | undefined; + /** + *

Container element for the legal hold configuration you want to apply to the specified + * object.

+ * @public + */ + LegalHold?: ObjectLockLegalHold | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

The version ID of the object that you want to place a legal hold on.

+ * @public + */ + VersionId?: string | undefined; + /** + *

The MD5 hash for the request body.

+ *

For requests made using the Amazon Web Services Command Line Interface (CLI) or Amazon Web Services SDKs, this field is calculated automatically.

+ * @public + */ + ContentMD5?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the object when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request. For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface PutObjectLockConfigurationOutput { + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; +} +/** + * @public + */ +export interface PutObjectLockConfigurationRequest { + /** + *

The bucket whose Object Lock configuration you want to create or replace.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The Object Lock configuration that you want to apply to the specified bucket.

+ * @public + */ + ObjectLockConfiguration?: ObjectLockConfiguration | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

A token to allow Object Lock to be enabled for an existing bucket.

+ * @public + */ + Token?: string | undefined; + /** + *

The MD5 hash for the request body.

+ *

For requests made using the Amazon Web Services Command Line Interface (CLI) or Amazon Web Services SDKs, this field is calculated automatically.

+ * @public + */ + ContentMD5?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the object when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request. For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface PutObjectRetentionOutput { + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; +} +/** + * @public + */ +export interface PutObjectRetentionRequest { + /** + *

The bucket name that contains the object you want to apply this Object Retention + * configuration to.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The key name for the object that you want to apply this Object Retention configuration + * to.

+ * @public + */ + Key: string | undefined; + /** + *

The container element for the Object Retention configuration.

+ * @public + */ + Retention?: ObjectLockRetention | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

The version ID for the object that you want to apply this Object Retention configuration + * to.

+ * @public + */ + VersionId?: string | undefined; + /** + *

Indicates whether this action should bypass Governance-mode restrictions.

+ * @public + */ + BypassGovernanceRetention?: boolean | undefined; + /** + *

The MD5 hash for the request body.

+ *

For requests made using the Amazon Web Services Command Line Interface (CLI) or Amazon Web Services SDKs, this field is calculated automatically.

+ * @public + */ + ContentMD5?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the object when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request. For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface PutObjectTaggingOutput { + /** + *

The versionId of the object the tag-set was added to.

+ * @public + */ + VersionId?: string | undefined; +} +/** + * @public + */ +export interface PutObjectTaggingRequest { + /** + *

The bucket name containing the object.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Name of the object key.

+ * @public + */ + Key: string | undefined; + /** + *

The versionId of the object that the tag-set will be added to.

+ * @public + */ + VersionId?: string | undefined; + /** + *

The MD5 hash for the request body.

+ *

For requests made using the Amazon Web Services Command Line Interface (CLI) or Amazon Web Services SDKs, this field is calculated automatically.

+ * @public + */ + ContentMD5?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the object when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request. For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

Container for the TagSet and Tag elements

+ * @public + */ + Tagging: Tagging | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; +} +/** + * @public + */ +export interface PutPublicAccessBlockRequest { + /** + *

The name of the Amazon S3 bucket whose PublicAccessBlock configuration you want + * to set.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The MD5 hash of the PutPublicAccessBlock request body.

+ *

For requests made using the Amazon Web Services Command Line Interface (CLI) or Amazon Web Services SDKs, this field is calculated automatically.

+ * @public + */ + ContentMD5?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the object when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request. For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

The PublicAccessBlock configuration that you want to apply to this Amazon S3 + * bucket. You can enable the configuration options in any combination. For more information + * about when Amazon S3 considers a bucket or object public, see The Meaning of "Public" in the Amazon S3 User Guide.

+ * @public + */ + PublicAccessBlockConfiguration: PublicAccessBlockConfiguration | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

This action is not allowed against this storage tier.

+ * @public + */ +export declare class ObjectAlreadyInActiveTierError extends __BaseException { + readonly name: "ObjectAlreadyInActiveTierError"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface RestoreObjectOutput { + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; + /** + *

Indicates the path in the provided S3 output location where Select results will be + * restored to.

+ * @public + */ + RestoreOutputPath?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const Tier: { + readonly Bulk: "Bulk"; + readonly Expedited: "Expedited"; + readonly Standard: "Standard"; +}; +/** + * @public + */ +export type Tier = (typeof Tier)[keyof typeof Tier]; +/** + *

Container for S3 Glacier job parameters.

+ * @public + */ +export interface GlacierJobParameters { + /** + *

Retrieval tier at which the restore will be processed.

+ * @public + */ + Tier: Tier | undefined; +} +/** + *

Contains the type of server-side encryption used.

+ * @public + */ +export interface Encryption { + /** + *

The server-side encryption algorithm used when storing job results in Amazon S3 (for example, + * AES256, aws:kms).

+ * @public + */ + EncryptionType: ServerSideEncryption | undefined; + /** + *

If the encryption type is aws:kms, this optional value specifies the ID of + * the symmetric encryption customer managed key to use for encryption of job results. Amazon S3 only + * supports symmetric encryption KMS keys. For more information, see Asymmetric keys in KMS in the Amazon Web Services Key Management Service + * Developer Guide.

+ * @public + */ + KMSKeyId?: string | undefined; + /** + *

If the encryption type is aws:kms, this optional value can be used to + * specify the encryption context for the restore results.

+ * @public + */ + KMSContext?: string | undefined; +} +/** + *

A metadata key-value pair to store with an object.

+ * @public + */ +export interface MetadataEntry { + /** + *

Name of the object.

+ * @public + */ + Name?: string | undefined; + /** + *

Value of the object.

+ * @public + */ + Value?: string | undefined; +} +/** + *

Describes an Amazon S3 location that will receive the results of the restore request.

+ * @public + */ +export interface S3Location { + /** + *

The name of the bucket where the restore results will be placed.

+ * @public + */ + BucketName: string | undefined; + /** + *

The prefix that is prepended to the restore results for this request.

+ * @public + */ + Prefix: string | undefined; + /** + *

Contains the type of server-side encryption used.

+ * @public + */ + Encryption?: Encryption | undefined; + /** + *

The canned ACL to apply to the restore results.

+ * @public + */ + CannedACL?: ObjectCannedACL | undefined; + /** + *

A list of grants that control access to the staged results.

+ * @public + */ + AccessControlList?: Grant[] | undefined; + /** + *

The tag-set that is applied to the restore results.

+ * @public + */ + Tagging?: Tagging | undefined; + /** + *

A list of metadata to store with the restore results in S3.

+ * @public + */ + UserMetadata?: MetadataEntry[] | undefined; + /** + *

The class of storage used to store the restore results.

+ * @public + */ + StorageClass?: StorageClass | undefined; +} +/** + *

Describes the location where the restore job's output is stored.

+ * @public + */ +export interface OutputLocation { + /** + *

Describes an S3 location that will receive the results of the restore request.

+ * @public + */ + S3?: S3Location | undefined; +} +/** + * @public + * @enum + */ +export declare const ExpressionType: { + readonly SQL: "SQL"; +}; +/** + * @public + */ +export type ExpressionType = (typeof ExpressionType)[keyof typeof ExpressionType]; +/** + * @public + * @enum + */ +export declare const CompressionType: { + readonly BZIP2: "BZIP2"; + readonly GZIP: "GZIP"; + readonly NONE: "NONE"; +}; +/** + * @public + */ +export type CompressionType = (typeof CompressionType)[keyof typeof CompressionType]; +/** + * @public + * @enum + */ +export declare const FileHeaderInfo: { + readonly IGNORE: "IGNORE"; + readonly NONE: "NONE"; + readonly USE: "USE"; +}; +/** + * @public + */ +export type FileHeaderInfo = (typeof FileHeaderInfo)[keyof typeof FileHeaderInfo]; +/** + *

Describes how an uncompressed comma-separated values (CSV)-formatted input object is + * formatted.

+ * @public + */ +export interface CSVInput { + /** + *

Describes the first line of input. Valid values are:

+ *
    + *
  • + *

    + * NONE: First line is not a header.

    + *
  • + *
  • + *

    + * IGNORE: First line is a header, but you can't use the header values + * to indicate the column in an expression. You can use column position (such as _1, _2, + * …) to indicate the column (SELECT s._1 FROM OBJECT s).

    + *
  • + *
  • + *

    + * Use: First line is a header, and you can use the header value to + * identify a column in an expression (SELECT "name" FROM OBJECT).

    + *
  • + *
+ * @public + */ + FileHeaderInfo?: FileHeaderInfo | undefined; + /** + *

A single character used to indicate that a row should be ignored when the character is + * present at the start of that row. You can specify any character to indicate a comment line. + * The default character is #.

+ *

Default: # + *

+ * @public + */ + Comments?: string | undefined; + /** + *

A single character used for escaping the quotation mark character inside an already + * escaped value. For example, the value """ a , b """ is parsed as " a , b + * ".

+ * @public + */ + QuoteEscapeCharacter?: string | undefined; + /** + *

A single character used to separate individual records in the input. Instead of the + * default value, you can specify an arbitrary delimiter.

+ * @public + */ + RecordDelimiter?: string | undefined; + /** + *

A single character used to separate individual fields in a record. You can specify an + * arbitrary delimiter.

+ * @public + */ + FieldDelimiter?: string | undefined; + /** + *

A single character used for escaping when the field delimiter is part of the value. For + * example, if the value is a, b, Amazon S3 wraps this field value in quotation marks, + * as follows: " a , b ".

+ *

Type: String

+ *

Default: " + *

+ *

Ancestors: CSV + *

+ * @public + */ + QuoteCharacter?: string | undefined; + /** + *

Specifies that CSV field values may contain quoted record delimiters and such records + * should be allowed. Default value is FALSE. Setting this value to TRUE may lower + * performance.

+ * @public + */ + AllowQuotedRecordDelimiter?: boolean | undefined; +} +/** + * @public + * @enum + */ +export declare const JSONType: { + readonly DOCUMENT: "DOCUMENT"; + readonly LINES: "LINES"; +}; +/** + * @public + */ +export type JSONType = (typeof JSONType)[keyof typeof JSONType]; +/** + *

Specifies JSON as object's input serialization format.

+ * @public + */ +export interface JSONInput { + /** + *

The type of JSON. Valid values: Document, Lines.

+ * @public + */ + Type?: JSONType | undefined; +} +/** + *

Container for Parquet.

+ * @public + */ +export interface ParquetInput { +} +/** + *

Describes the serialization format of the object.

+ * @public + */ +export interface InputSerialization { + /** + *

Describes the serialization of a CSV-encoded object.

+ * @public + */ + CSV?: CSVInput | undefined; + /** + *

Specifies object's compression format. Valid values: NONE, GZIP, BZIP2. Default Value: + * NONE.

+ * @public + */ + CompressionType?: CompressionType | undefined; + /** + *

Specifies JSON as object's input serialization format.

+ * @public + */ + JSON?: JSONInput | undefined; + /** + *

Specifies Parquet as object's input serialization format.

+ * @public + */ + Parquet?: ParquetInput | undefined; +} +/** + * @public + * @enum + */ +export declare const QuoteFields: { + readonly ALWAYS: "ALWAYS"; + readonly ASNEEDED: "ASNEEDED"; +}; +/** + * @public + */ +export type QuoteFields = (typeof QuoteFields)[keyof typeof QuoteFields]; +/** + *

Describes how uncompressed comma-separated values (CSV)-formatted results are + * formatted.

+ * @public + */ +export interface CSVOutput { + /** + *

Indicates whether to use quotation marks around output fields.

+ *
    + *
  • + *

    + * ALWAYS: Always use quotation marks for output fields.

    + *
  • + *
  • + *

    + * ASNEEDED: Use quotation marks for output fields when needed.

    + *
  • + *
+ * @public + */ + QuoteFields?: QuoteFields | undefined; + /** + *

The single character used for escaping the quote character inside an already escaped + * value.

+ * @public + */ + QuoteEscapeCharacter?: string | undefined; + /** + *

A single character used to separate individual records in the output. Instead of the + * default value, you can specify an arbitrary delimiter.

+ * @public + */ + RecordDelimiter?: string | undefined; + /** + *

The value used to separate individual fields in a record. You can specify an arbitrary + * delimiter.

+ * @public + */ + FieldDelimiter?: string | undefined; + /** + *

A single character used for escaping when the field delimiter is part of the value. For + * example, if the value is a, b, Amazon S3 wraps this field value in quotation marks, + * as follows: " a , b ".

+ * @public + */ + QuoteCharacter?: string | undefined; +} +/** + *

Specifies JSON as request's output serialization format.

+ * @public + */ +export interface JSONOutput { + /** + *

The value used to separate individual records in the output. If no value is specified, + * Amazon S3 uses a newline character ('\n').

+ * @public + */ + RecordDelimiter?: string | undefined; +} +/** + *

Describes how results of the Select job are serialized.

+ * @public + */ +export interface OutputSerialization { + /** + *

Describes the serialization of CSV-encoded Select results.

+ * @public + */ + CSV?: CSVOutput | undefined; + /** + *

Specifies JSON as request's output serialization format.

+ * @public + */ + JSON?: JSONOutput | undefined; +} +/** + * + *

Amazon S3 Select is no longer available to new customers. Existing customers of Amazon S3 Select can continue to use the feature as usual. Learn more + *

+ *
+ *

Describes the parameters for Select job types.

+ *

Learn How to optimize querying your data in Amazon S3 using + * Amazon Athena, S3 Object Lambda, or client-side filtering.

+ * @public + */ +export interface SelectParameters { + /** + *

Describes the serialization format of the object.

+ * @public + */ + InputSerialization: InputSerialization | undefined; + /** + *

The type of the provided expression (for example, SQL).

+ * @public + */ + ExpressionType: ExpressionType | undefined; + /** + * + *

Amazon S3 Select is no longer available to new customers. Existing customers of Amazon S3 Select can continue to use the feature as usual. Learn more + *

+ *
+ *

The expression that is used to query the object.

+ * @public + */ + Expression: string | undefined; + /** + *

Describes how the results of the Select job are serialized.

+ * @public + */ + OutputSerialization: OutputSerialization | undefined; +} +/** + * @public + * @enum + */ +export declare const RestoreRequestType: { + readonly SELECT: "SELECT"; +}; +/** + * @public + */ +export type RestoreRequestType = (typeof RestoreRequestType)[keyof typeof RestoreRequestType]; +/** + *

Container for restore job parameters.

+ * @public + */ +export interface RestoreRequest { + /** + *

Lifetime of the active copy in days. Do not use with restores that specify + * OutputLocation.

+ *

The Days element is required for regular restores, and must not be provided for select + * requests.

+ * @public + */ + Days?: number | undefined; + /** + *

S3 Glacier related parameters pertaining to this job. Do not use with restores that + * specify OutputLocation.

+ * @public + */ + GlacierJobParameters?: GlacierJobParameters | undefined; + /** + * + *

Amazon S3 Select is no longer available to new customers. Existing customers of Amazon S3 Select can continue to use the feature as usual. Learn more + *

+ *
+ *

Type of restore request.

+ * @public + */ + Type?: RestoreRequestType | undefined; + /** + *

Retrieval tier at which the restore will be processed.

+ * @public + */ + Tier?: Tier | undefined; + /** + *

The optional description for the job.

+ * @public + */ + Description?: string | undefined; + /** + * + *

Amazon S3 Select is no longer available to new customers. Existing customers of Amazon S3 Select can continue to use the feature as usual. Learn more + *

+ *
+ *

Describes the parameters for Select job types.

+ * @public + */ + SelectParameters?: SelectParameters | undefined; + /** + *

Describes the location where the restore job's output is stored.

+ * @public + */ + OutputLocation?: OutputLocation | undefined; +} +/** + * @public + */ +export interface RestoreObjectRequest { + /** + *

The bucket name containing the object to restore.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Object key for which the action was initiated.

+ * @public + */ + Key: string | undefined; + /** + *

VersionId used to reference a specific version of the object.

+ * @public + */ + VersionId?: string | undefined; + /** + *

Container for restore job parameters.

+ * @public + */ + RestoreRequest?: RestoreRequest | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

Indicates the algorithm used to create the checksum for the object when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request. For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

+ * @public + */ +export interface ContinuationEvent { +} +/** + *

A message that indicates the request is complete and no more messages will be sent. You + * should not assume that the request is complete until the client receives an + * EndEvent.

+ * @public + */ +export interface EndEvent { +} +/** + *

This data type contains information about progress of an operation.

+ * @public + */ +export interface Progress { + /** + *

The current number of object bytes scanned.

+ * @public + */ + BytesScanned?: number | undefined; + /** + *

The current number of uncompressed object bytes processed.

+ * @public + */ + BytesProcessed?: number | undefined; + /** + *

The current number of bytes of records payload data returned.

+ * @public + */ + BytesReturned?: number | undefined; +} +/** + *

This data type contains information about the progress event of an operation.

+ * @public + */ +export interface ProgressEvent { + /** + *

The Progress event details.

+ * @public + */ + Details?: Progress | undefined; +} +/** + *

The container for the records event.

+ * @public + */ +export interface RecordsEvent { + /** + *

The byte array of partial, one or more result records. S3 Select doesn't guarantee that + * a record will be self-contained in one record frame. To ensure continuous streaming of + * data, S3 Select might split the same record across multiple record frames instead of + * aggregating the results in memory. Some S3 clients (for example, the SDKforJava) handle this behavior by creating a ByteStream out of the response by + * default. Other clients might not handle this behavior by default. In those cases, you must + * aggregate the results on the client side and parse the response.

+ * @public + */ + Payload?: Uint8Array | undefined; +} +/** + *

Container for the stats details.

+ * @public + */ +export interface Stats { + /** + *

The total number of object bytes scanned.

+ * @public + */ + BytesScanned?: number | undefined; + /** + *

The total number of uncompressed object bytes processed.

+ * @public + */ + BytesProcessed?: number | undefined; + /** + *

The total number of bytes of records payload data returned.

+ * @public + */ + BytesReturned?: number | undefined; +} +/** + *

Container for the Stats Event.

+ * @public + */ +export interface StatsEvent { + /** + *

The Stats event details.

+ * @public + */ + Details?: Stats | undefined; +} +/** + *

The container for selecting objects from a content event stream.

+ * @public + */ +export type SelectObjectContentEventStream = SelectObjectContentEventStream.ContMember | SelectObjectContentEventStream.EndMember | SelectObjectContentEventStream.ProgressMember | SelectObjectContentEventStream.RecordsMember | SelectObjectContentEventStream.StatsMember | SelectObjectContentEventStream.$UnknownMember; +/** + * @public + */ +export declare namespace SelectObjectContentEventStream { + /** + *

The Records Event.

+ * @public + */ + interface RecordsMember { + Records: RecordsEvent; + Stats?: never; + Progress?: never; + Cont?: never; + End?: never; + $unknown?: never; + } + /** + *

The Stats Event.

+ * @public + */ + interface StatsMember { + Records?: never; + Stats: StatsEvent; + Progress?: never; + Cont?: never; + End?: never; + $unknown?: never; + } + /** + *

The Progress Event.

+ * @public + */ + interface ProgressMember { + Records?: never; + Stats?: never; + Progress: ProgressEvent; + Cont?: never; + End?: never; + $unknown?: never; + } + /** + *

The Continuation Event.

+ * @public + */ + interface ContMember { + Records?: never; + Stats?: never; + Progress?: never; + Cont: ContinuationEvent; + End?: never; + $unknown?: never; + } + /** + *

The End Event.

+ * @public + */ + interface EndMember { + Records?: never; + Stats?: never; + Progress?: never; + Cont?: never; + End: EndEvent; + $unknown?: never; + } + /** + * @public + */ + interface $UnknownMember { + Records?: never; + Stats?: never; + Progress?: never; + Cont?: never; + End?: never; + $unknown: [string, any]; + } + interface Visitor { + Records: (value: RecordsEvent) => T; + Stats: (value: StatsEvent) => T; + Progress: (value: ProgressEvent) => T; + Cont: (value: ContinuationEvent) => T; + End: (value: EndEvent) => T; + _: (name: string, value: any) => T; + } + const visit: (value: SelectObjectContentEventStream, visitor: Visitor) => T; +} +/** + * @public + */ +export interface SelectObjectContentOutput { + /** + *

The array of results.

+ * @public + */ + Payload?: AsyncIterable | undefined; +} +/** + *

Container for specifying if periodic QueryProgress messages should be + * sent.

+ * @public + */ +export interface RequestProgress { + /** + *

Specifies whether periodic QueryProgress frames should be sent. Valid values: TRUE, + * FALSE. Default value: FALSE.

+ * @public + */ + Enabled?: boolean | undefined; +} +/** + *

Specifies the byte range of the object to get the records from. A record is processed + * when its first byte is contained by the range. This parameter is optional, but when + * specified, it must not be empty. See RFC 2616, Section 14.35.1 about how to specify the + * start and end of the range.

+ * @public + */ +export interface ScanRange { + /** + *

Specifies the start of the byte range. This parameter is optional. Valid values: + * non-negative integers. The default value is 0. If only start is supplied, it + * means scan from that point to the end of the file. For example, + * 50 means scan + * from byte 50 until the end of the file.

+ * @public + */ + Start?: number | undefined; + /** + *

Specifies the end of the byte range. This parameter is optional. Valid values: + * non-negative integers. The default value is one less than the size of the object being + * queried. If only the End parameter is supplied, it is interpreted to mean scan the last N + * bytes of the file. For example, + * 50 means scan the + * last 50 bytes.

+ * @public + */ + End?: number | undefined; +} +/** + * + *

Learn Amazon S3 Select is no longer available to new customers. Existing customers of Amazon S3 Select can continue to use the feature as usual. Learn more + *

+ *
+ *

Request to filter the contents of an Amazon S3 object based on a simple Structured Query + * Language (SQL) statement. In the request, along with the SQL expression, you must specify a + * data serialization format (JSON or CSV) of the object. Amazon S3 uses this to parse object data + * into records. It returns only records that match the specified SQL expression. You must + * also specify the data serialization format for the response. For more information, see + * S3Select API Documentation.

+ * @public + */ +export interface SelectObjectContentRequest { + /** + *

The S3 bucket.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

The object key.

+ * @public + */ + Key: string | undefined; + /** + *

The server-side encryption (SSE) algorithm used to encrypt the object. This parameter is needed only when the object was created + * using a checksum algorithm. For more information, + * see Protecting data using SSE-C keys in the + * Amazon S3 User Guide.

+ * @public + */ + SSECustomerAlgorithm?: string | undefined; + /** + *

The server-side encryption (SSE) customer managed key. This parameter is needed only when the object was created using a checksum algorithm. + * For more information, see + * Protecting data using SSE-C keys in the + * Amazon S3 User Guide.

+ * @public + */ + SSECustomerKey?: string | undefined; + /** + *

The MD5 server-side encryption (SSE) customer managed key. This parameter is needed only when the object was created using a checksum + * algorithm. For more information, + * see Protecting data using SSE-C keys in the + * Amazon S3 User Guide.

+ * @public + */ + SSECustomerKeyMD5?: string | undefined; + /** + *

The expression that is used to query the object.

+ * @public + */ + Expression: string | undefined; + /** + *

The type of the provided expression (for example, SQL).

+ * @public + */ + ExpressionType: ExpressionType | undefined; + /** + *

Specifies if periodic request progress information should be enabled.

+ * @public + */ + RequestProgress?: RequestProgress | undefined; + /** + *

Describes the format of the data in the object that is being queried.

+ * @public + */ + InputSerialization: InputSerialization | undefined; + /** + *

Describes the format of the data that you want Amazon S3 to return in response.

+ * @public + */ + OutputSerialization: OutputSerialization | undefined; + /** + *

Specifies the byte range of the object to get the records from. A record is processed + * when its first byte is contained by the range. This parameter is optional, but when + * specified, it must not be empty. See RFC 2616, Section 14.35.1 about how to specify the + * start and end of the range.

+ *

+ * ScanRangemay be used in the following ways:

+ *
    + *
  • + *

    + * 50100 + * - process only the records starting between the bytes 50 and 100 (inclusive, counting + * from zero)

    + *
  • + *
  • + *

    + * 50 - + * process only the records starting after the byte 50

    + *
  • + *
  • + *

    + * 50 - + * process only the records within the last 50 bytes of the file.

    + *
  • + *
+ * @public + */ + ScanRange?: ScanRange | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface UploadPartOutput { + /** + *

The server-side encryption algorithm used when you store this object in Amazon S3 (for + * example, AES256, aws:kms).

+ * @public + */ + ServerSideEncryption?: ServerSideEncryption | undefined; + /** + *

Entity tag for the uploaded object.

+ * @public + */ + ETag?: string | undefined; + /** + *

The Base64 encoded, 32-bit CRC32 checksum of the object. This checksum is only be present if the checksum was uploaded + * with the object. When you use an API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32?: string | undefined; + /** + *

The Base64 encoded, 32-bit CRC32C checksum of the object. This checksum is only present if the checksum was uploaded + * with the object. When you use an API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32C?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is + * the same data that was originally sent. This header specifies the Base64 encoded, 64-bit + * CRC64NVME checksum of the part. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC64NVME?: string | undefined; + /** + *

The Base64 encoded, 160-bit SHA1 digest of the object. This will only be present if the object was uploaded + * with the object. When you use the API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA1?: string | undefined; + /** + *

The Base64 encoded, 256-bit SHA256 digest of the object. This will only be present if the object was uploaded + * with the object. When you use an API operation on an object that was uploaded using multipart uploads, this value may not be a direct checksum value of the full object. Instead, it's a calculation based on the checksum values of each individual part. For more information about how checksums are calculated + * with multipart uploads, see + * Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA256?: string | undefined; + /** + *

If server-side encryption with a customer-provided encryption key was requested, the + * response will include this header to confirm the encryption algorithm that's used.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerAlgorithm?: string | undefined; + /** + *

If server-side encryption with a customer-provided encryption key was requested, the + * response will include this header to provide the round-trip message integrity verification + * of the customer-provided encryption key.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKeyMD5?: string | undefined; + /** + *

If present, indicates the ID of the KMS key that was used for object encryption.

+ * @public + */ + SSEKMSKeyId?: string | undefined; + /** + *

Indicates whether the multipart upload uses an S3 Bucket Key for server-side encryption + * with Key Management Service (KMS) keys (SSE-KMS).

+ * @public + */ + BucketKeyEnabled?: boolean | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; +} +/** + * @public + */ +export interface UploadPartRequest { + /** + *

Object data.

+ * @public + */ + Body?: StreamingBlobTypes | undefined; + /** + *

The name of the bucket to which the multipart upload was initiated.

+ *

+ * Directory buckets - + * When you use this operation with a directory bucket, you must use virtual-hosted-style requests in the format + * Bucket-name.s3express-zone-id.region-code.amazonaws.com. Path-style requests are not supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * amzn-s3-demo-bucket--usw2-az1--x-s3). For information about bucket naming + * restrictions, see Directory bucket naming + * rules in the Amazon S3 User Guide.

+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ * + *

Object Lambda access points are not supported by directory buckets.

+ *
+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Size of the body in bytes. This parameter is useful when the size of the body cannot be + * determined automatically.

+ * @public + */ + ContentLength?: number | undefined; + /** + *

The Base64 encoded 128-bit MD5 digest of the part data. This parameter is auto-populated + * when using the command from the CLI. This parameter is required if object lock parameters + * are specified.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + ContentMD5?: string | undefined; + /** + *

Indicates the algorithm used to create the checksum for the object when you use the SDK. This header will not provide any + * additional functionality if you don't use the SDK. When you send this header, there must be a corresponding x-amz-checksum or + * x-amz-trailer header sent. Otherwise, Amazon S3 fails the request with the HTTP status code 400 Bad Request. For more + * information, see Checking object integrity in + * the Amazon S3 User Guide.

+ *

If you provide an individual checksum, Amazon S3 ignores any provided + * ChecksumAlgorithm parameter.

+ *

This checksum algorithm must be the same for all parts and it match the checksum value + * supplied in the CreateMultipartUpload request.

+ * @public + */ + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is the same data that was originally sent. + * This header specifies the Base64 encoded, 32-bit CRC32 checksum of the object. For more information, see + * Checking object integrity in the + * Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is the same data that was originally sent. + * This header specifies the Base64 encoded, 32-bit CRC32C checksum of the object. For more information, see + * Checking object integrity in the + * Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32C?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is + * the same data that was originally sent. This header specifies the Base64 encoded, 64-bit + * CRC64NVME checksum of the part. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC64NVME?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is the same data that was originally sent. + * This header specifies the Base64 encoded, 160-bit SHA1 digest of the object. For more information, see + * Checking object integrity in the + * Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA1?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is the same data that was originally sent. + * This header specifies the Base64 encoded, 256-bit SHA256 digest of the object. For more information, see + * Checking object integrity in the + * Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA256?: string | undefined; + /** + *

Object key for which the multipart upload was initiated.

+ * @public + */ + Key: string | undefined; + /** + *

Part number of part being uploaded. This is a positive integer between 1 and + * 10,000.

+ * @public + */ + PartNumber: number | undefined; + /** + *

Upload ID identifying the multipart upload whose part is being uploaded.

+ * @public + */ + UploadId: string | undefined; + /** + *

Specifies the algorithm to use when encrypting the object (for example, AES256).

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerAlgorithm?: string | undefined; + /** + *

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This + * value is used to store the object and then it is discarded; Amazon S3 does not store the + * encryption key. The key must be appropriate for use with the algorithm specified in the + * x-amz-server-side-encryption-customer-algorithm header. This must be the + * same encryption key specified in the initiate multipart upload request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKey?: string | undefined; + /** + *

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses + * this header for a message integrity check to ensure that the encryption key was transmitted + * without error.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKeyMD5?: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; +} +/** + *

Container for all response elements.

+ * @public + */ +export interface CopyPartResult { + /** + *

Entity tag of the object.

+ * @public + */ + ETag?: string | undefined; + /** + *

Date and time at which the object was uploaded.

+ * @public + */ + LastModified?: Date | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is the same data that was originally sent. + * This header specifies the Base64 encoded, 32-bit CRC32 checksum of the part. For more information, see + * Checking object integrity in the + * Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is the same data that was originally sent. + * This header specifies the Base64 encoded, 32-bit CRC32C checksum of the part. For more information, see + * Checking object integrity in the + * Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC32C?: string | undefined; + /** + *

The Base64 encoded, 64-bit CRC64NVME checksum of the part. This checksum is present + * if the multipart upload request was created with the CRC64NVME checksum algorithm to the uploaded object). For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC64NVME?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is the same data that was originally sent. + * This header specifies the Base64 encoded, 160-bit SHA1 checksum of the part. For more information, see + * Checking object integrity in the + * Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA1?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is the same data that was originally sent. + * This header specifies the Base64 encoded, 256-bit SHA256 checksum of the part. For more information, see + * Checking object integrity in the + * Amazon S3 User Guide.

+ * @public + */ + ChecksumSHA256?: string | undefined; +} +/** + * @public + */ +export interface UploadPartCopyOutput { + /** + *

The version of the source object that was copied, if you have enabled versioning on the + * source bucket.

+ * + *

This functionality is not supported when the source object is in a directory bucket.

+ *
+ * @public + */ + CopySourceVersionId?: string | undefined; + /** + *

Container for all response elements.

+ * @public + */ + CopyPartResult?: CopyPartResult | undefined; + /** + *

The server-side encryption algorithm used when you store this object in Amazon S3 (for + * example, AES256, aws:kms).

+ * @public + */ + ServerSideEncryption?: ServerSideEncryption | undefined; + /** + *

If server-side encryption with a customer-provided encryption key was requested, the + * response will include this header to confirm the encryption algorithm that's used.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerAlgorithm?: string | undefined; + /** + *

If server-side encryption with a customer-provided encryption key was requested, the + * response will include this header to provide the round-trip message integrity verification + * of the customer-provided encryption key.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + SSECustomerKeyMD5?: string | undefined; + /** + *

If present, indicates the ID of the KMS key that was used for object encryption.

+ * @public + */ + SSEKMSKeyId?: string | undefined; + /** + *

Indicates whether the multipart upload uses an S3 Bucket Key for server-side encryption + * with Key Management Service (KMS) keys (SSE-KMS).

+ * @public + */ + BucketKeyEnabled?: boolean | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; +} +/** + * @public + */ +export interface UploadPartCopyRequest { + /** + *

The bucket name.

+ *

+ * Directory buckets - When you use this operation with a directory bucket, you must use virtual-hosted-style requests in the format + * Bucket-name.s3express-zone-id.region-code.amazonaws.com. Path-style requests are not supported. Directory bucket names must be unique in the chosen Zone (Availability Zone or Local Zone). Bucket names must follow the format + * bucket-base-name--zone-id--x-s3 (for example, + * amzn-s3-demo-bucket--usw2-az1--x-s3). For information about bucket naming + * restrictions, see Directory bucket naming + * rules in the Amazon S3 User Guide.

+ * + *

Copying objects across different Amazon Web Services Regions isn't supported when the source or destination bucket is in Amazon Web Services Local Zones. The source and destination buckets must have the same parent Amazon Web Services Region. Otherwise, + * you get an HTTP 400 Bad Request error with the error code InvalidRequest.

+ *
+ *

+ * Access points - When you use this action with an access point for general purpose buckets, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When you use this action with an access point for directory buckets, you must provide the access point name in place of the bucket name. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

+ * + *

Object Lambda access points are not supported by directory buckets.

+ *
+ *

+ * S3 on Outposts - When you use this action with S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the + * form + * AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts, the destination bucket must be the Outposts access point ARN or the access point alias. For more information about S3 on Outposts, see What is S3 on Outposts? in the Amazon S3 User Guide.

+ *

Note: To supply the Multi-region Access Point (MRAP) to Bucket, you need to install the "@aws-sdk/signature-v4-crt" package to your project dependencies. + * For more information, please go to https://github.com/aws/aws-sdk-js-v3#known-issues

+ * @public + */ + Bucket: string | undefined; + /** + *

Specifies the source object for the copy operation. You specify the value in one of two + * formats, depending on whether you want to access the source object through an access point:

+ *
    + *
  • + *

    For objects not accessed through an access point, specify the name of the source bucket + * and key of the source object, separated by a slash (/). For example, to copy the + * object reports/january.pdf from the bucket + * awsexamplebucket, use awsexamplebucket/reports/january.pdf. + * The value must be URL-encoded.

    + *
  • + *
  • + *

    For objects accessed through access points, specify the Amazon Resource Name (ARN) of the object as accessed through the access point, in the format arn:aws:s3:::accesspoint//object/. For example, to copy the object reports/january.pdf through access point my-access-point owned by account 123456789012 in Region us-west-2, use the URL encoding of arn:aws:s3:us-west-2:123456789012:accesspoint/my-access-point/object/reports/january.pdf. The value must be URL encoded.

    + * + *
      + *
    • + *

      Amazon S3 supports copy operations using Access points only when the source and destination buckets are in the same Amazon Web Services Region.

      + *
    • + *
    • + *

      Access points are not supported by directory buckets.

      + *
    • + *
    + *
    + *

    Alternatively, for objects accessed through Amazon S3 on Outposts, specify the ARN of the object as accessed in the format arn:aws:s3-outposts:::outpost//object/. For example, to copy the object reports/january.pdf through outpost my-outpost owned by account 123456789012 in Region us-west-2, use the URL encoding of arn:aws:s3-outposts:us-west-2:123456789012:outpost/my-outpost/object/reports/january.pdf. The value must be URL-encoded.

    + *
  • + *
+ *

If your bucket has versioning enabled, you could have multiple versions of the same + * object. By default, x-amz-copy-source identifies the current version of the + * source object to copy. To copy a specific version of the source object to copy, append + * ?versionId= to the x-amz-copy-source request + * header (for example, x-amz-copy-source: + * /awsexamplebucket/reports/january.pdf?versionId=QUpfdndhfd8438MNFDN93jdnJFkdmqnh893).

+ *

If the current version is a delete marker and you don't specify a versionId in the + * x-amz-copy-source request header, Amazon S3 returns a 404 Not Found + * error, because the object does not exist. If you specify versionId in the + * x-amz-copy-source and the versionId is a delete marker, Amazon S3 returns an + * HTTP 400 Bad Request error, because you are not allowed to specify a delete + * marker as a version for the x-amz-copy-source.

+ * + *

+ * Directory buckets - + * S3 Versioning isn't enabled and supported for directory buckets.

+ *
+ * @public + */ + CopySource: string | undefined; + /** + *

Copies the object if its entity tag (ETag) matches the specified tag.

+ *

If both of the x-amz-copy-source-if-match and + * x-amz-copy-source-if-unmodified-since headers are present in the request as + * follows:

+ *

+ * x-amz-copy-source-if-match condition evaluates to true, + * and;

+ *

+ * x-amz-copy-source-if-unmodified-since condition evaluates to + * false;

+ *

Amazon S3 returns 200 OK and copies the data. + *

+ * @public + */ + CopySourceIfMatch?: string | undefined; + /** + *

Copies the object if it has been modified since the specified time.

+ *

If both of the x-amz-copy-source-if-none-match and + * x-amz-copy-source-if-modified-since headers are present in the request as + * follows:

+ *

+ * x-amz-copy-source-if-none-match condition evaluates to false, + * and;

+ *

+ * x-amz-copy-source-if-modified-since condition evaluates to + * true;

+ *

Amazon S3 returns 412 Precondition Failed response code. + *

+ * @public + */ + CopySourceIfModifiedSince?: Date | undefined; + /** + *

Copies the object if its entity tag (ETag) is different than the specified ETag.

+ *

If both of the x-amz-copy-source-if-none-match and + * x-amz-copy-source-if-modified-since headers are present in the request as + * follows:

+ *

+ * x-amz-copy-source-if-none-match condition evaluates to false, + * and;

+ *

+ * x-amz-copy-source-if-modified-since condition evaluates to + * true;

+ *

Amazon S3 returns 412 Precondition Failed response code. + *

+ * @public + */ + CopySourceIfNoneMatch?: string | undefined; + /** + *

Copies the object if it hasn't been modified since the specified time.

+ *

If both of the x-amz-copy-source-if-match and + * x-amz-copy-source-if-unmodified-since headers are present in the request as + * follows:

+ *

+ * x-amz-copy-source-if-match condition evaluates to true, + * and;

+ *

+ * x-amz-copy-source-if-unmodified-since condition evaluates to + * false;

+ *

Amazon S3 returns 200 OK and copies the data. + *

+ * @public + */ + CopySourceIfUnmodifiedSince?: Date | undefined; + /** + *

The range of bytes to copy from the source object. The range value must use the form + * bytes=first-last, where the first and last are the zero-based byte offsets to copy. For + * example, bytes=0-9 indicates that you want to copy the first 10 bytes of the source. You + * can copy a range only if the source object is greater than 5 MB.

+ * @public + */ + CopySourceRange?: string | undefined; + /** + *

Object key for which the multipart upload was initiated.

+ * @public + */ + Key: string | undefined; + /** + *

Part number of part being copied. This is a positive integer between 1 and + * 10,000.

+ * @public + */ + PartNumber: number | undefined; + /** + *

Upload ID identifying the multipart upload whose part is being copied.

+ * @public + */ + UploadId: string | undefined; + /** + *

Specifies the algorithm to use when encrypting the object (for example, AES256).

+ * + *

This functionality is not supported when the destination bucket is a directory bucket.

+ *
+ * @public + */ + SSECustomerAlgorithm?: string | undefined; + /** + *

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This + * value is used to store the object and then it is discarded; Amazon S3 does not store the + * encryption key. The key must be appropriate for use with the algorithm specified in the + * x-amz-server-side-encryption-customer-algorithm header. This must be the + * same encryption key specified in the initiate multipart upload request.

+ * + *

This functionality is not supported when the destination bucket is a directory bucket.

+ *
+ * @public + */ + SSECustomerKey?: string | undefined; + /** + *

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses + * this header for a message integrity check to ensure that the encryption key was transmitted + * without error.

+ * + *

This functionality is not supported when the destination bucket is a directory bucket.

+ *
+ * @public + */ + SSECustomerKeyMD5?: string | undefined; + /** + *

Specifies the algorithm to use when decrypting the source object (for example, + * AES256).

+ * + *

This functionality is not supported when the source object is in a directory bucket.

+ *
+ * @public + */ + CopySourceSSECustomerAlgorithm?: string | undefined; + /** + *

Specifies the customer-provided encryption key for Amazon S3 to use to decrypt the source + * object. The encryption key provided in this header must be one that was used when the + * source object was created.

+ * + *

This functionality is not supported when the source object is in a directory bucket.

+ *
+ * @public + */ + CopySourceSSECustomerKey?: string | undefined; + /** + *

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses + * this header for a message integrity check to ensure that the encryption key was transmitted + * without error.

+ * + *

This functionality is not supported when the source object is in a directory bucket.

+ *
+ * @public + */ + CopySourceSSECustomerKeyMD5?: string | undefined; + /** + *

Confirms that the requester knows that they will be charged for the request. Bucket + * owners need not specify this parameter in their requests. If either the source or + * destination S3 bucket has Requester Pays enabled, the requester will pay for corresponding + * charges to copy the object. For information about downloading objects from Requester Pays + * buckets, see Downloading Objects in + * Requester Pays Buckets in the Amazon S3 User Guide.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestPayer?: RequestPayer | undefined; + /** + *

The account ID of the expected destination bucket owner. If the account ID that you provide does not match the actual owner of the destination bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedBucketOwner?: string | undefined; + /** + *

The account ID of the expected source bucket owner. If the account ID that you provide does not match the actual owner of the source bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

+ * @public + */ + ExpectedSourceBucketOwner?: string | undefined; +} +/** + * @public + */ +export interface WriteGetObjectResponseRequest { + /** + *

Route prefix to the HTTP URL generated.

+ * @public + */ + RequestRoute: string | undefined; + /** + *

A single use encrypted token that maps WriteGetObjectResponse to the end + * user GetObject request.

+ * @public + */ + RequestToken: string | undefined; + /** + *

The object data.

+ * @public + */ + Body?: StreamingBlobTypes | undefined; + /** + *

The integer status code for an HTTP response of a corresponding GetObject + * request. The following is a list of status codes.

+ *
    + *
  • + *

    + * 200 - OK + *

    + *
  • + *
  • + *

    + * 206 - Partial Content + *

    + *
  • + *
  • + *

    + * 304 - Not Modified + *

    + *
  • + *
  • + *

    + * 400 - Bad Request + *

    + *
  • + *
  • + *

    + * 401 - Unauthorized + *

    + *
  • + *
  • + *

    + * 403 - Forbidden + *

    + *
  • + *
  • + *

    + * 404 - Not Found + *

    + *
  • + *
  • + *

    + * 405 - Method Not Allowed + *

    + *
  • + *
  • + *

    + * 409 - Conflict + *

    + *
  • + *
  • + *

    + * 411 - Length Required + *

    + *
  • + *
  • + *

    + * 412 - Precondition Failed + *

    + *
  • + *
  • + *

    + * 416 - Range Not Satisfiable + *

    + *
  • + *
  • + *

    + * 500 - Internal Server Error + *

    + *
  • + *
  • + *

    + * 503 - Service Unavailable + *

    + *
  • + *
+ * @public + */ + StatusCode?: number | undefined; + /** + *

A string that uniquely identifies an error condition. Returned in the tag + * of the error XML response for a corresponding GetObject call. Cannot be used + * with a successful StatusCode header or when the transformed object is provided + * in the body. All error codes from S3 are sentence-cased. The regular expression (regex) + * value is "^[A-Z][a-zA-Z]+$".

+ * @public + */ + ErrorCode?: string | undefined; + /** + *

Contains a generic description of the error condition. Returned in the + * tag of the error XML response for a corresponding GetObject call. Cannot be + * used with a successful StatusCode header or when the transformed object is + * provided in body.

+ * @public + */ + ErrorMessage?: string | undefined; + /** + *

Indicates that a range of bytes was specified.

+ * @public + */ + AcceptRanges?: string | undefined; + /** + *

Specifies caching behavior along the request/reply chain.

+ * @public + */ + CacheControl?: string | undefined; + /** + *

Specifies presentational information for the object.

+ * @public + */ + ContentDisposition?: string | undefined; + /** + *

Specifies what content encodings have been applied to the object and thus what decoding + * mechanisms must be applied to obtain the media-type referenced by the Content-Type header + * field.

+ * @public + */ + ContentEncoding?: string | undefined; + /** + *

The language the content is in.

+ * @public + */ + ContentLanguage?: string | undefined; + /** + *

The size of the content body in bytes.

+ * @public + */ + ContentLength?: number | undefined; + /** + *

The portion of the object returned in the response.

+ * @public + */ + ContentRange?: string | undefined; + /** + *

A standard MIME type describing the format of the object data.

+ * @public + */ + ContentType?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is + * the same data that was originally sent. This specifies the Base64 encoded, 32-bit CRC32 + * checksum of the object returned by the Object Lambda function. This may not match the + * checksum for the object stored in Amazon S3. Amazon S3 will perform validation of the checksum values + * only when the original GetObject request required checksum validation. For + * more information about checksums, see Checking object + * integrity in the Amazon S3 User Guide.

+ *

Only one checksum header can be specified at a time. If you supply multiple checksum + * headers, this request will fail.

+ *

+ * @public + */ + ChecksumCRC32?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is + * the same data that was originally sent. This specifies the Base64 encoded, 32-bit CRC32C + * checksum of the object returned by the Object Lambda function. This may not match the + * checksum for the object stored in Amazon S3. Amazon S3 will perform validation of the checksum values + * only when the original GetObject request required checksum validation. For + * more information about checksums, see Checking object + * integrity in the Amazon S3 User Guide.

+ *

Only one checksum header can be specified at a time. If you supply multiple checksum + * headers, this request will fail.

+ * @public + */ + ChecksumCRC32C?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is + * the same data that was originally sent. This header specifies the Base64 encoded, 64-bit + * CRC64NVME checksum of the part. For more information, see Checking object integrity in the Amazon S3 User Guide.

+ * @public + */ + ChecksumCRC64NVME?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is + * the same data that was originally sent. This specifies the Base64 encoded, 160-bit SHA1 + * digest of the object returned by the Object Lambda function. This may not match the + * checksum for the object stored in Amazon S3. Amazon S3 will perform validation of the checksum values + * only when the original GetObject request required checksum validation. For + * more information about checksums, see Checking object + * integrity in the Amazon S3 User Guide.

+ *

Only one checksum header can be specified at a time. If you supply multiple checksum + * headers, this request will fail.

+ * @public + */ + ChecksumSHA1?: string | undefined; + /** + *

This header can be used as a data integrity check to verify that the data received is + * the same data that was originally sent. This specifies the Base64 encoded, 256-bit SHA256 + * digest of the object returned by the Object Lambda function. This may not match the + * checksum for the object stored in Amazon S3. Amazon S3 will perform validation of the checksum values + * only when the original GetObject request required checksum validation. For + * more information about checksums, see Checking object + * integrity in the Amazon S3 User Guide.

+ *

Only one checksum header can be specified at a time. If you supply multiple checksum + * headers, this request will fail.

+ * @public + */ + ChecksumSHA256?: string | undefined; + /** + *

Specifies whether an object stored in Amazon S3 is (true) or is not + * (false) a delete marker. To learn more about delete markers, see Working with delete markers.

+ * @public + */ + DeleteMarker?: boolean | undefined; + /** + *

An opaque identifier assigned by a web server to a specific version of a resource found + * at a URL.

+ * @public + */ + ETag?: string | undefined; + /** + *

The date and time at which the object is no longer cacheable.

+ * @public + */ + Expires?: Date | undefined; + /** + *

If the object expiration is configured (see PUT Bucket lifecycle), the response includes + * this header. It includes the expiry-date and rule-id key-value + * pairs that provide the object expiration information. The value of the rule-id + * is URL-encoded.

+ * @public + */ + Expiration?: string | undefined; + /** + *

The date and time that the object was last modified.

+ * @public + */ + LastModified?: Date | undefined; + /** + *

Set to the number of metadata entries not returned in x-amz-meta headers. + * This can happen if you create metadata using an API like SOAP that supports more flexible + * metadata than the REST API. For example, using SOAP, you can create metadata whose values + * are not legal HTTP headers.

+ * @public + */ + MissingMeta?: number | undefined; + /** + *

A map of metadata to store with the object in S3.

+ * @public + */ + Metadata?: Record | undefined; + /** + *

Indicates whether an object stored in Amazon S3 has Object Lock enabled. For more information + * about S3 Object Lock, see Object Lock.

+ * @public + */ + ObjectLockMode?: ObjectLockMode | undefined; + /** + *

Indicates whether an object stored in Amazon S3 has an active legal hold.

+ * @public + */ + ObjectLockLegalHoldStatus?: ObjectLockLegalHoldStatus | undefined; + /** + *

The date and time when Object Lock is configured to expire.

+ * @public + */ + ObjectLockRetainUntilDate?: Date | undefined; + /** + *

The count of parts this object has.

+ * @public + */ + PartsCount?: number | undefined; + /** + *

Indicates if request involves bucket that is either a source or destination in a + * Replication rule. For more information about S3 Replication, see Replication.

+ * @public + */ + ReplicationStatus?: ReplicationStatus | undefined; + /** + *

If present, indicates that the requester was successfully charged for the + * request.

+ * + *

This functionality is not supported for directory buckets.

+ *
+ * @public + */ + RequestCharged?: RequestCharged | undefined; + /** + *

Provides information about object restoration operation and expiration time of the + * restored object copy.

+ * @public + */ + Restore?: string | undefined; + /** + *

The server-side encryption algorithm used when storing requested object in Amazon S3 (for + * example, AES256, aws:kms).

+ * @public + */ + ServerSideEncryption?: ServerSideEncryption | undefined; + /** + *

Encryption algorithm used if server-side encryption with a customer-provided encryption + * key was specified for object stored in Amazon S3.

+ * @public + */ + SSECustomerAlgorithm?: string | undefined; + /** + *

If present, specifies the ID (Key ID, Key ARN, or Key Alias) of the Amazon Web Services Key + * Management Service (Amazon Web Services KMS) symmetric encryption customer managed key that was used for stored in + * Amazon S3 object.

+ * @public + */ + SSEKMSKeyId?: string | undefined; + /** + *

128-bit MD5 digest of customer-provided encryption key used in Amazon S3 to encrypt data + * stored in S3. For more information, see Protecting data + * using server-side encryption with customer-provided encryption keys + * (SSE-C).

+ * @public + */ + SSECustomerKeyMD5?: string | undefined; + /** + *

Provides storage class information of the object. Amazon S3 returns this header for all + * objects except for S3 Standard storage class objects.

+ *

For more information, see Storage Classes.

+ * @public + */ + StorageClass?: StorageClass | undefined; + /** + *

The number of tags, if any, on the object.

+ * @public + */ + TagCount?: number | undefined; + /** + *

An ID used to reference a specific version of the object.

+ * @public + */ + VersionId?: string | undefined; + /** + *

Indicates whether the object stored in Amazon S3 uses an S3 bucket key for server-side + * encryption with Amazon Web Services KMS (SSE-KMS).

+ * @public + */ + BucketKeyEnabled?: boolean | undefined; +} +/** + * @internal + */ +export declare const PutBucketEncryptionRequestFilterSensitiveLog: (obj: PutBucketEncryptionRequest) => any; +/** + * @internal + */ +export declare const PutBucketInventoryConfigurationRequestFilterSensitiveLog: (obj: PutBucketInventoryConfigurationRequest) => any; +/** + * @internal + */ +export declare const PutObjectOutputFilterSensitiveLog: (obj: PutObjectOutput) => any; +/** + * @internal + */ +export declare const PutObjectRequestFilterSensitiveLog: (obj: PutObjectRequest) => any; +/** + * @internal + */ +export declare const EncryptionFilterSensitiveLog: (obj: Encryption) => any; +/** + * @internal + */ +export declare const S3LocationFilterSensitiveLog: (obj: S3Location) => any; +/** + * @internal + */ +export declare const OutputLocationFilterSensitiveLog: (obj: OutputLocation) => any; +/** + * @internal + */ +export declare const RestoreRequestFilterSensitiveLog: (obj: RestoreRequest) => any; +/** + * @internal + */ +export declare const RestoreObjectRequestFilterSensitiveLog: (obj: RestoreObjectRequest) => any; +/** + * @internal + */ +export declare const SelectObjectContentEventStreamFilterSensitiveLog: (obj: SelectObjectContentEventStream) => any; +/** + * @internal + */ +export declare const SelectObjectContentOutputFilterSensitiveLog: (obj: SelectObjectContentOutput) => any; +/** + * @internal + */ +export declare const SelectObjectContentRequestFilterSensitiveLog: (obj: SelectObjectContentRequest) => any; +/** + * @internal + */ +export declare const UploadPartOutputFilterSensitiveLog: (obj: UploadPartOutput) => any; +/** + * @internal + */ +export declare const UploadPartRequestFilterSensitiveLog: (obj: UploadPartRequest) => any; +/** + * @internal + */ +export declare const UploadPartCopyOutputFilterSensitiveLog: (obj: UploadPartCopyOutput) => any; +/** + * @internal + */ +export declare const UploadPartCopyRequestFilterSensitiveLog: (obj: UploadPartCopyRequest) => any; +/** + * @internal + */ +export declare const WriteGetObjectResponseRequestFilterSensitiveLog: (obj: WriteGetObjectResponseRequest) => any; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/pagination/Interfaces.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/pagination/Interfaces.d.ts new file mode 100644 index 00000000..ea3155da --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/pagination/Interfaces.d.ts @@ -0,0 +1,8 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { S3Client } from "../S3Client"; +/** + * @public + */ +export interface S3PaginationConfiguration extends PaginationConfiguration { + client: S3Client; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/pagination/ListBucketsPaginator.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/pagination/ListBucketsPaginator.d.ts new file mode 100644 index 00000000..32ba8009 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/pagination/ListBucketsPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListBucketsCommandInput, ListBucketsCommandOutput } from "../commands/ListBucketsCommand"; +import { S3PaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListBuckets: (config: S3PaginationConfiguration, input: ListBucketsCommandInput, ...rest: any[]) => Paginator; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/pagination/ListDirectoryBucketsPaginator.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/pagination/ListDirectoryBucketsPaginator.d.ts new file mode 100644 index 00000000..4003b928 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/pagination/ListDirectoryBucketsPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListDirectoryBucketsCommandInput, ListDirectoryBucketsCommandOutput } from "../commands/ListDirectoryBucketsCommand"; +import { S3PaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListDirectoryBuckets: (config: S3PaginationConfiguration, input: ListDirectoryBucketsCommandInput, ...rest: any[]) => Paginator; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/pagination/ListObjectsV2Paginator.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/pagination/ListObjectsV2Paginator.d.ts new file mode 100644 index 00000000..fd6301e4 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/pagination/ListObjectsV2Paginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListObjectsV2CommandInput, ListObjectsV2CommandOutput } from "../commands/ListObjectsV2Command"; +import { S3PaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListObjectsV2: (config: S3PaginationConfiguration, input: ListObjectsV2CommandInput, ...rest: any[]) => Paginator; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/pagination/ListPartsPaginator.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/pagination/ListPartsPaginator.d.ts new file mode 100644 index 00000000..1baf3a1d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/pagination/ListPartsPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListPartsCommandInput, ListPartsCommandOutput } from "../commands/ListPartsCommand"; +import { S3PaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListParts: (config: S3PaginationConfiguration, input: ListPartsCommandInput, ...rest: any[]) => Paginator; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/pagination/index.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/pagination/index.d.ts new file mode 100644 index 00000000..9438ebe6 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/pagination/index.d.ts @@ -0,0 +1,5 @@ +export * from "./Interfaces"; +export * from "./ListBucketsPaginator"; +export * from "./ListDirectoryBucketsPaginator"; +export * from "./ListObjectsV2Paginator"; +export * from "./ListPartsPaginator"; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/protocols/Aws_restXml.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/protocols/Aws_restXml.d.ts new file mode 100644 index 00000000..0b30f3d4 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/protocols/Aws_restXml.d.ts @@ -0,0 +1,884 @@ +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@smithy/protocol-http"; +import { EventStreamSerdeContext as __EventStreamSerdeContext, SdkStreamSerdeContext as __SdkStreamSerdeContext, SerdeContext as __SerdeContext } from "@smithy/types"; +import { AbortMultipartUploadCommandInput, AbortMultipartUploadCommandOutput } from "../commands/AbortMultipartUploadCommand"; +import { CompleteMultipartUploadCommandInput, CompleteMultipartUploadCommandOutput } from "../commands/CompleteMultipartUploadCommand"; +import { CopyObjectCommandInput, CopyObjectCommandOutput } from "../commands/CopyObjectCommand"; +import { CreateBucketCommandInput, CreateBucketCommandOutput } from "../commands/CreateBucketCommand"; +import { CreateBucketMetadataTableConfigurationCommandInput, CreateBucketMetadataTableConfigurationCommandOutput } from "../commands/CreateBucketMetadataTableConfigurationCommand"; +import { CreateMultipartUploadCommandInput, CreateMultipartUploadCommandOutput } from "../commands/CreateMultipartUploadCommand"; +import { CreateSessionCommandInput, CreateSessionCommandOutput } from "../commands/CreateSessionCommand"; +import { DeleteBucketAnalyticsConfigurationCommandInput, DeleteBucketAnalyticsConfigurationCommandOutput } from "../commands/DeleteBucketAnalyticsConfigurationCommand"; +import { DeleteBucketCommandInput, DeleteBucketCommandOutput } from "../commands/DeleteBucketCommand"; +import { DeleteBucketCorsCommandInput, DeleteBucketCorsCommandOutput } from "../commands/DeleteBucketCorsCommand"; +import { DeleteBucketEncryptionCommandInput, DeleteBucketEncryptionCommandOutput } from "../commands/DeleteBucketEncryptionCommand"; +import { DeleteBucketIntelligentTieringConfigurationCommandInput, DeleteBucketIntelligentTieringConfigurationCommandOutput } from "../commands/DeleteBucketIntelligentTieringConfigurationCommand"; +import { DeleteBucketInventoryConfigurationCommandInput, DeleteBucketInventoryConfigurationCommandOutput } from "../commands/DeleteBucketInventoryConfigurationCommand"; +import { DeleteBucketLifecycleCommandInput, DeleteBucketLifecycleCommandOutput } from "../commands/DeleteBucketLifecycleCommand"; +import { DeleteBucketMetadataTableConfigurationCommandInput, DeleteBucketMetadataTableConfigurationCommandOutput } from "../commands/DeleteBucketMetadataTableConfigurationCommand"; +import { DeleteBucketMetricsConfigurationCommandInput, DeleteBucketMetricsConfigurationCommandOutput } from "../commands/DeleteBucketMetricsConfigurationCommand"; +import { DeleteBucketOwnershipControlsCommandInput, DeleteBucketOwnershipControlsCommandOutput } from "../commands/DeleteBucketOwnershipControlsCommand"; +import { DeleteBucketPolicyCommandInput, DeleteBucketPolicyCommandOutput } from "../commands/DeleteBucketPolicyCommand"; +import { DeleteBucketReplicationCommandInput, DeleteBucketReplicationCommandOutput } from "../commands/DeleteBucketReplicationCommand"; +import { DeleteBucketTaggingCommandInput, DeleteBucketTaggingCommandOutput } from "../commands/DeleteBucketTaggingCommand"; +import { DeleteBucketWebsiteCommandInput, DeleteBucketWebsiteCommandOutput } from "../commands/DeleteBucketWebsiteCommand"; +import { DeleteObjectCommandInput, DeleteObjectCommandOutput } from "../commands/DeleteObjectCommand"; +import { DeleteObjectsCommandInput, DeleteObjectsCommandOutput } from "../commands/DeleteObjectsCommand"; +import { DeleteObjectTaggingCommandInput, DeleteObjectTaggingCommandOutput } from "../commands/DeleteObjectTaggingCommand"; +import { DeletePublicAccessBlockCommandInput, DeletePublicAccessBlockCommandOutput } from "../commands/DeletePublicAccessBlockCommand"; +import { GetBucketAccelerateConfigurationCommandInput, GetBucketAccelerateConfigurationCommandOutput } from "../commands/GetBucketAccelerateConfigurationCommand"; +import { GetBucketAclCommandInput, GetBucketAclCommandOutput } from "../commands/GetBucketAclCommand"; +import { GetBucketAnalyticsConfigurationCommandInput, GetBucketAnalyticsConfigurationCommandOutput } from "../commands/GetBucketAnalyticsConfigurationCommand"; +import { GetBucketCorsCommandInput, GetBucketCorsCommandOutput } from "../commands/GetBucketCorsCommand"; +import { GetBucketEncryptionCommandInput, GetBucketEncryptionCommandOutput } from "../commands/GetBucketEncryptionCommand"; +import { GetBucketIntelligentTieringConfigurationCommandInput, GetBucketIntelligentTieringConfigurationCommandOutput } from "../commands/GetBucketIntelligentTieringConfigurationCommand"; +import { GetBucketInventoryConfigurationCommandInput, GetBucketInventoryConfigurationCommandOutput } from "../commands/GetBucketInventoryConfigurationCommand"; +import { GetBucketLifecycleConfigurationCommandInput, GetBucketLifecycleConfigurationCommandOutput } from "../commands/GetBucketLifecycleConfigurationCommand"; +import { GetBucketLocationCommandInput, GetBucketLocationCommandOutput } from "../commands/GetBucketLocationCommand"; +import { GetBucketLoggingCommandInput, GetBucketLoggingCommandOutput } from "../commands/GetBucketLoggingCommand"; +import { GetBucketMetadataTableConfigurationCommandInput, GetBucketMetadataTableConfigurationCommandOutput } from "../commands/GetBucketMetadataTableConfigurationCommand"; +import { GetBucketMetricsConfigurationCommandInput, GetBucketMetricsConfigurationCommandOutput } from "../commands/GetBucketMetricsConfigurationCommand"; +import { GetBucketNotificationConfigurationCommandInput, GetBucketNotificationConfigurationCommandOutput } from "../commands/GetBucketNotificationConfigurationCommand"; +import { GetBucketOwnershipControlsCommandInput, GetBucketOwnershipControlsCommandOutput } from "../commands/GetBucketOwnershipControlsCommand"; +import { GetBucketPolicyCommandInput, GetBucketPolicyCommandOutput } from "../commands/GetBucketPolicyCommand"; +import { GetBucketPolicyStatusCommandInput, GetBucketPolicyStatusCommandOutput } from "../commands/GetBucketPolicyStatusCommand"; +import { GetBucketReplicationCommandInput, GetBucketReplicationCommandOutput } from "../commands/GetBucketReplicationCommand"; +import { GetBucketRequestPaymentCommandInput, GetBucketRequestPaymentCommandOutput } from "../commands/GetBucketRequestPaymentCommand"; +import { GetBucketTaggingCommandInput, GetBucketTaggingCommandOutput } from "../commands/GetBucketTaggingCommand"; +import { GetBucketVersioningCommandInput, GetBucketVersioningCommandOutput } from "../commands/GetBucketVersioningCommand"; +import { GetBucketWebsiteCommandInput, GetBucketWebsiteCommandOutput } from "../commands/GetBucketWebsiteCommand"; +import { GetObjectAclCommandInput, GetObjectAclCommandOutput } from "../commands/GetObjectAclCommand"; +import { GetObjectAttributesCommandInput, GetObjectAttributesCommandOutput } from "../commands/GetObjectAttributesCommand"; +import { GetObjectCommandInput, GetObjectCommandOutput } from "../commands/GetObjectCommand"; +import { GetObjectLegalHoldCommandInput, GetObjectLegalHoldCommandOutput } from "../commands/GetObjectLegalHoldCommand"; +import { GetObjectLockConfigurationCommandInput, GetObjectLockConfigurationCommandOutput } from "../commands/GetObjectLockConfigurationCommand"; +import { GetObjectRetentionCommandInput, GetObjectRetentionCommandOutput } from "../commands/GetObjectRetentionCommand"; +import { GetObjectTaggingCommandInput, GetObjectTaggingCommandOutput } from "../commands/GetObjectTaggingCommand"; +import { GetObjectTorrentCommandInput, GetObjectTorrentCommandOutput } from "../commands/GetObjectTorrentCommand"; +import { GetPublicAccessBlockCommandInput, GetPublicAccessBlockCommandOutput } from "../commands/GetPublicAccessBlockCommand"; +import { HeadBucketCommandInput, HeadBucketCommandOutput } from "../commands/HeadBucketCommand"; +import { HeadObjectCommandInput, HeadObjectCommandOutput } from "../commands/HeadObjectCommand"; +import { ListBucketAnalyticsConfigurationsCommandInput, ListBucketAnalyticsConfigurationsCommandOutput } from "../commands/ListBucketAnalyticsConfigurationsCommand"; +import { ListBucketIntelligentTieringConfigurationsCommandInput, ListBucketIntelligentTieringConfigurationsCommandOutput } from "../commands/ListBucketIntelligentTieringConfigurationsCommand"; +import { ListBucketInventoryConfigurationsCommandInput, ListBucketInventoryConfigurationsCommandOutput } from "../commands/ListBucketInventoryConfigurationsCommand"; +import { ListBucketMetricsConfigurationsCommandInput, ListBucketMetricsConfigurationsCommandOutput } from "../commands/ListBucketMetricsConfigurationsCommand"; +import { ListBucketsCommandInput, ListBucketsCommandOutput } from "../commands/ListBucketsCommand"; +import { ListDirectoryBucketsCommandInput, ListDirectoryBucketsCommandOutput } from "../commands/ListDirectoryBucketsCommand"; +import { ListMultipartUploadsCommandInput, ListMultipartUploadsCommandOutput } from "../commands/ListMultipartUploadsCommand"; +import { ListObjectsCommandInput, ListObjectsCommandOutput } from "../commands/ListObjectsCommand"; +import { ListObjectsV2CommandInput, ListObjectsV2CommandOutput } from "../commands/ListObjectsV2Command"; +import { ListObjectVersionsCommandInput, ListObjectVersionsCommandOutput } from "../commands/ListObjectVersionsCommand"; +import { ListPartsCommandInput, ListPartsCommandOutput } from "../commands/ListPartsCommand"; +import { PutBucketAccelerateConfigurationCommandInput, PutBucketAccelerateConfigurationCommandOutput } from "../commands/PutBucketAccelerateConfigurationCommand"; +import { PutBucketAclCommandInput, PutBucketAclCommandOutput } from "../commands/PutBucketAclCommand"; +import { PutBucketAnalyticsConfigurationCommandInput, PutBucketAnalyticsConfigurationCommandOutput } from "../commands/PutBucketAnalyticsConfigurationCommand"; +import { PutBucketCorsCommandInput, PutBucketCorsCommandOutput } from "../commands/PutBucketCorsCommand"; +import { PutBucketEncryptionCommandInput, PutBucketEncryptionCommandOutput } from "../commands/PutBucketEncryptionCommand"; +import { PutBucketIntelligentTieringConfigurationCommandInput, PutBucketIntelligentTieringConfigurationCommandOutput } from "../commands/PutBucketIntelligentTieringConfigurationCommand"; +import { PutBucketInventoryConfigurationCommandInput, PutBucketInventoryConfigurationCommandOutput } from "../commands/PutBucketInventoryConfigurationCommand"; +import { PutBucketLifecycleConfigurationCommandInput, PutBucketLifecycleConfigurationCommandOutput } from "../commands/PutBucketLifecycleConfigurationCommand"; +import { PutBucketLoggingCommandInput, PutBucketLoggingCommandOutput } from "../commands/PutBucketLoggingCommand"; +import { PutBucketMetricsConfigurationCommandInput, PutBucketMetricsConfigurationCommandOutput } from "../commands/PutBucketMetricsConfigurationCommand"; +import { PutBucketNotificationConfigurationCommandInput, PutBucketNotificationConfigurationCommandOutput } from "../commands/PutBucketNotificationConfigurationCommand"; +import { PutBucketOwnershipControlsCommandInput, PutBucketOwnershipControlsCommandOutput } from "../commands/PutBucketOwnershipControlsCommand"; +import { PutBucketPolicyCommandInput, PutBucketPolicyCommandOutput } from "../commands/PutBucketPolicyCommand"; +import { PutBucketReplicationCommandInput, PutBucketReplicationCommandOutput } from "../commands/PutBucketReplicationCommand"; +import { PutBucketRequestPaymentCommandInput, PutBucketRequestPaymentCommandOutput } from "../commands/PutBucketRequestPaymentCommand"; +import { PutBucketTaggingCommandInput, PutBucketTaggingCommandOutput } from "../commands/PutBucketTaggingCommand"; +import { PutBucketVersioningCommandInput, PutBucketVersioningCommandOutput } from "../commands/PutBucketVersioningCommand"; +import { PutBucketWebsiteCommandInput, PutBucketWebsiteCommandOutput } from "../commands/PutBucketWebsiteCommand"; +import { PutObjectAclCommandInput, PutObjectAclCommandOutput } from "../commands/PutObjectAclCommand"; +import { PutObjectCommandInput, PutObjectCommandOutput } from "../commands/PutObjectCommand"; +import { PutObjectLegalHoldCommandInput, PutObjectLegalHoldCommandOutput } from "../commands/PutObjectLegalHoldCommand"; +import { PutObjectLockConfigurationCommandInput, PutObjectLockConfigurationCommandOutput } from "../commands/PutObjectLockConfigurationCommand"; +import { PutObjectRetentionCommandInput, PutObjectRetentionCommandOutput } from "../commands/PutObjectRetentionCommand"; +import { PutObjectTaggingCommandInput, PutObjectTaggingCommandOutput } from "../commands/PutObjectTaggingCommand"; +import { PutPublicAccessBlockCommandInput, PutPublicAccessBlockCommandOutput } from "../commands/PutPublicAccessBlockCommand"; +import { RestoreObjectCommandInput, RestoreObjectCommandOutput } from "../commands/RestoreObjectCommand"; +import { SelectObjectContentCommandInput, SelectObjectContentCommandOutput } from "../commands/SelectObjectContentCommand"; +import { UploadPartCommandInput, UploadPartCommandOutput } from "../commands/UploadPartCommand"; +import { UploadPartCopyCommandInput, UploadPartCopyCommandOutput } from "../commands/UploadPartCopyCommand"; +import { WriteGetObjectResponseCommandInput, WriteGetObjectResponseCommandOutput } from "../commands/WriteGetObjectResponseCommand"; +/** + * serializeAws_restXmlAbortMultipartUploadCommand + */ +export declare const se_AbortMultipartUploadCommand: (input: AbortMultipartUploadCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlCompleteMultipartUploadCommand + */ +export declare const se_CompleteMultipartUploadCommand: (input: CompleteMultipartUploadCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlCopyObjectCommand + */ +export declare const se_CopyObjectCommand: (input: CopyObjectCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlCreateBucketCommand + */ +export declare const se_CreateBucketCommand: (input: CreateBucketCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlCreateBucketMetadataTableConfigurationCommand + */ +export declare const se_CreateBucketMetadataTableConfigurationCommand: (input: CreateBucketMetadataTableConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlCreateMultipartUploadCommand + */ +export declare const se_CreateMultipartUploadCommand: (input: CreateMultipartUploadCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlCreateSessionCommand + */ +export declare const se_CreateSessionCommand: (input: CreateSessionCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlDeleteBucketCommand + */ +export declare const se_DeleteBucketCommand: (input: DeleteBucketCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlDeleteBucketAnalyticsConfigurationCommand + */ +export declare const se_DeleteBucketAnalyticsConfigurationCommand: (input: DeleteBucketAnalyticsConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlDeleteBucketCorsCommand + */ +export declare const se_DeleteBucketCorsCommand: (input: DeleteBucketCorsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlDeleteBucketEncryptionCommand + */ +export declare const se_DeleteBucketEncryptionCommand: (input: DeleteBucketEncryptionCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlDeleteBucketIntelligentTieringConfigurationCommand + */ +export declare const se_DeleteBucketIntelligentTieringConfigurationCommand: (input: DeleteBucketIntelligentTieringConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlDeleteBucketInventoryConfigurationCommand + */ +export declare const se_DeleteBucketInventoryConfigurationCommand: (input: DeleteBucketInventoryConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlDeleteBucketLifecycleCommand + */ +export declare const se_DeleteBucketLifecycleCommand: (input: DeleteBucketLifecycleCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlDeleteBucketMetadataTableConfigurationCommand + */ +export declare const se_DeleteBucketMetadataTableConfigurationCommand: (input: DeleteBucketMetadataTableConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlDeleteBucketMetricsConfigurationCommand + */ +export declare const se_DeleteBucketMetricsConfigurationCommand: (input: DeleteBucketMetricsConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlDeleteBucketOwnershipControlsCommand + */ +export declare const se_DeleteBucketOwnershipControlsCommand: (input: DeleteBucketOwnershipControlsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlDeleteBucketPolicyCommand + */ +export declare const se_DeleteBucketPolicyCommand: (input: DeleteBucketPolicyCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlDeleteBucketReplicationCommand + */ +export declare const se_DeleteBucketReplicationCommand: (input: DeleteBucketReplicationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlDeleteBucketTaggingCommand + */ +export declare const se_DeleteBucketTaggingCommand: (input: DeleteBucketTaggingCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlDeleteBucketWebsiteCommand + */ +export declare const se_DeleteBucketWebsiteCommand: (input: DeleteBucketWebsiteCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlDeleteObjectCommand + */ +export declare const se_DeleteObjectCommand: (input: DeleteObjectCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlDeleteObjectsCommand + */ +export declare const se_DeleteObjectsCommand: (input: DeleteObjectsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlDeleteObjectTaggingCommand + */ +export declare const se_DeleteObjectTaggingCommand: (input: DeleteObjectTaggingCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlDeletePublicAccessBlockCommand + */ +export declare const se_DeletePublicAccessBlockCommand: (input: DeletePublicAccessBlockCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketAccelerateConfigurationCommand + */ +export declare const se_GetBucketAccelerateConfigurationCommand: (input: GetBucketAccelerateConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketAclCommand + */ +export declare const se_GetBucketAclCommand: (input: GetBucketAclCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketAnalyticsConfigurationCommand + */ +export declare const se_GetBucketAnalyticsConfigurationCommand: (input: GetBucketAnalyticsConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketCorsCommand + */ +export declare const se_GetBucketCorsCommand: (input: GetBucketCorsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketEncryptionCommand + */ +export declare const se_GetBucketEncryptionCommand: (input: GetBucketEncryptionCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketIntelligentTieringConfigurationCommand + */ +export declare const se_GetBucketIntelligentTieringConfigurationCommand: (input: GetBucketIntelligentTieringConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketInventoryConfigurationCommand + */ +export declare const se_GetBucketInventoryConfigurationCommand: (input: GetBucketInventoryConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketLifecycleConfigurationCommand + */ +export declare const se_GetBucketLifecycleConfigurationCommand: (input: GetBucketLifecycleConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketLocationCommand + */ +export declare const se_GetBucketLocationCommand: (input: GetBucketLocationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketLoggingCommand + */ +export declare const se_GetBucketLoggingCommand: (input: GetBucketLoggingCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketMetadataTableConfigurationCommand + */ +export declare const se_GetBucketMetadataTableConfigurationCommand: (input: GetBucketMetadataTableConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketMetricsConfigurationCommand + */ +export declare const se_GetBucketMetricsConfigurationCommand: (input: GetBucketMetricsConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketNotificationConfigurationCommand + */ +export declare const se_GetBucketNotificationConfigurationCommand: (input: GetBucketNotificationConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketOwnershipControlsCommand + */ +export declare const se_GetBucketOwnershipControlsCommand: (input: GetBucketOwnershipControlsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketPolicyCommand + */ +export declare const se_GetBucketPolicyCommand: (input: GetBucketPolicyCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketPolicyStatusCommand + */ +export declare const se_GetBucketPolicyStatusCommand: (input: GetBucketPolicyStatusCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketReplicationCommand + */ +export declare const se_GetBucketReplicationCommand: (input: GetBucketReplicationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketRequestPaymentCommand + */ +export declare const se_GetBucketRequestPaymentCommand: (input: GetBucketRequestPaymentCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketTaggingCommand + */ +export declare const se_GetBucketTaggingCommand: (input: GetBucketTaggingCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketVersioningCommand + */ +export declare const se_GetBucketVersioningCommand: (input: GetBucketVersioningCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetBucketWebsiteCommand + */ +export declare const se_GetBucketWebsiteCommand: (input: GetBucketWebsiteCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetObjectCommand + */ +export declare const se_GetObjectCommand: (input: GetObjectCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetObjectAclCommand + */ +export declare const se_GetObjectAclCommand: (input: GetObjectAclCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetObjectAttributesCommand + */ +export declare const se_GetObjectAttributesCommand: (input: GetObjectAttributesCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetObjectLegalHoldCommand + */ +export declare const se_GetObjectLegalHoldCommand: (input: GetObjectLegalHoldCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetObjectLockConfigurationCommand + */ +export declare const se_GetObjectLockConfigurationCommand: (input: GetObjectLockConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetObjectRetentionCommand + */ +export declare const se_GetObjectRetentionCommand: (input: GetObjectRetentionCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetObjectTaggingCommand + */ +export declare const se_GetObjectTaggingCommand: (input: GetObjectTaggingCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetObjectTorrentCommand + */ +export declare const se_GetObjectTorrentCommand: (input: GetObjectTorrentCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlGetPublicAccessBlockCommand + */ +export declare const se_GetPublicAccessBlockCommand: (input: GetPublicAccessBlockCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlHeadBucketCommand + */ +export declare const se_HeadBucketCommand: (input: HeadBucketCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlHeadObjectCommand + */ +export declare const se_HeadObjectCommand: (input: HeadObjectCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlListBucketAnalyticsConfigurationsCommand + */ +export declare const se_ListBucketAnalyticsConfigurationsCommand: (input: ListBucketAnalyticsConfigurationsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlListBucketIntelligentTieringConfigurationsCommand + */ +export declare const se_ListBucketIntelligentTieringConfigurationsCommand: (input: ListBucketIntelligentTieringConfigurationsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlListBucketInventoryConfigurationsCommand + */ +export declare const se_ListBucketInventoryConfigurationsCommand: (input: ListBucketInventoryConfigurationsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlListBucketMetricsConfigurationsCommand + */ +export declare const se_ListBucketMetricsConfigurationsCommand: (input: ListBucketMetricsConfigurationsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlListBucketsCommand + */ +export declare const se_ListBucketsCommand: (input: ListBucketsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlListDirectoryBucketsCommand + */ +export declare const se_ListDirectoryBucketsCommand: (input: ListDirectoryBucketsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlListMultipartUploadsCommand + */ +export declare const se_ListMultipartUploadsCommand: (input: ListMultipartUploadsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlListObjectsCommand + */ +export declare const se_ListObjectsCommand: (input: ListObjectsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlListObjectsV2Command + */ +export declare const se_ListObjectsV2Command: (input: ListObjectsV2CommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlListObjectVersionsCommand + */ +export declare const se_ListObjectVersionsCommand: (input: ListObjectVersionsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlListPartsCommand + */ +export declare const se_ListPartsCommand: (input: ListPartsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutBucketAccelerateConfigurationCommand + */ +export declare const se_PutBucketAccelerateConfigurationCommand: (input: PutBucketAccelerateConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutBucketAclCommand + */ +export declare const se_PutBucketAclCommand: (input: PutBucketAclCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutBucketAnalyticsConfigurationCommand + */ +export declare const se_PutBucketAnalyticsConfigurationCommand: (input: PutBucketAnalyticsConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutBucketCorsCommand + */ +export declare const se_PutBucketCorsCommand: (input: PutBucketCorsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutBucketEncryptionCommand + */ +export declare const se_PutBucketEncryptionCommand: (input: PutBucketEncryptionCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutBucketIntelligentTieringConfigurationCommand + */ +export declare const se_PutBucketIntelligentTieringConfigurationCommand: (input: PutBucketIntelligentTieringConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutBucketInventoryConfigurationCommand + */ +export declare const se_PutBucketInventoryConfigurationCommand: (input: PutBucketInventoryConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutBucketLifecycleConfigurationCommand + */ +export declare const se_PutBucketLifecycleConfigurationCommand: (input: PutBucketLifecycleConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutBucketLoggingCommand + */ +export declare const se_PutBucketLoggingCommand: (input: PutBucketLoggingCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutBucketMetricsConfigurationCommand + */ +export declare const se_PutBucketMetricsConfigurationCommand: (input: PutBucketMetricsConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutBucketNotificationConfigurationCommand + */ +export declare const se_PutBucketNotificationConfigurationCommand: (input: PutBucketNotificationConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutBucketOwnershipControlsCommand + */ +export declare const se_PutBucketOwnershipControlsCommand: (input: PutBucketOwnershipControlsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutBucketPolicyCommand + */ +export declare const se_PutBucketPolicyCommand: (input: PutBucketPolicyCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutBucketReplicationCommand + */ +export declare const se_PutBucketReplicationCommand: (input: PutBucketReplicationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutBucketRequestPaymentCommand + */ +export declare const se_PutBucketRequestPaymentCommand: (input: PutBucketRequestPaymentCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutBucketTaggingCommand + */ +export declare const se_PutBucketTaggingCommand: (input: PutBucketTaggingCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutBucketVersioningCommand + */ +export declare const se_PutBucketVersioningCommand: (input: PutBucketVersioningCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutBucketWebsiteCommand + */ +export declare const se_PutBucketWebsiteCommand: (input: PutBucketWebsiteCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutObjectCommand + */ +export declare const se_PutObjectCommand: (input: PutObjectCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutObjectAclCommand + */ +export declare const se_PutObjectAclCommand: (input: PutObjectAclCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutObjectLegalHoldCommand + */ +export declare const se_PutObjectLegalHoldCommand: (input: PutObjectLegalHoldCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutObjectLockConfigurationCommand + */ +export declare const se_PutObjectLockConfigurationCommand: (input: PutObjectLockConfigurationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutObjectRetentionCommand + */ +export declare const se_PutObjectRetentionCommand: (input: PutObjectRetentionCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutObjectTaggingCommand + */ +export declare const se_PutObjectTaggingCommand: (input: PutObjectTaggingCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlPutPublicAccessBlockCommand + */ +export declare const se_PutPublicAccessBlockCommand: (input: PutPublicAccessBlockCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlRestoreObjectCommand + */ +export declare const se_RestoreObjectCommand: (input: RestoreObjectCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlSelectObjectContentCommand + */ +export declare const se_SelectObjectContentCommand: (input: SelectObjectContentCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlUploadPartCommand + */ +export declare const se_UploadPartCommand: (input: UploadPartCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlUploadPartCopyCommand + */ +export declare const se_UploadPartCopyCommand: (input: UploadPartCopyCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restXmlWriteGetObjectResponseCommand + */ +export declare const se_WriteGetObjectResponseCommand: (input: WriteGetObjectResponseCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * deserializeAws_restXmlAbortMultipartUploadCommand + */ +export declare const de_AbortMultipartUploadCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlCompleteMultipartUploadCommand + */ +export declare const de_CompleteMultipartUploadCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlCopyObjectCommand + */ +export declare const de_CopyObjectCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlCreateBucketCommand + */ +export declare const de_CreateBucketCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlCreateBucketMetadataTableConfigurationCommand + */ +export declare const de_CreateBucketMetadataTableConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlCreateMultipartUploadCommand + */ +export declare const de_CreateMultipartUploadCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlCreateSessionCommand + */ +export declare const de_CreateSessionCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlDeleteBucketCommand + */ +export declare const de_DeleteBucketCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlDeleteBucketAnalyticsConfigurationCommand + */ +export declare const de_DeleteBucketAnalyticsConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlDeleteBucketCorsCommand + */ +export declare const de_DeleteBucketCorsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlDeleteBucketEncryptionCommand + */ +export declare const de_DeleteBucketEncryptionCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlDeleteBucketIntelligentTieringConfigurationCommand + */ +export declare const de_DeleteBucketIntelligentTieringConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlDeleteBucketInventoryConfigurationCommand + */ +export declare const de_DeleteBucketInventoryConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlDeleteBucketLifecycleCommand + */ +export declare const de_DeleteBucketLifecycleCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlDeleteBucketMetadataTableConfigurationCommand + */ +export declare const de_DeleteBucketMetadataTableConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlDeleteBucketMetricsConfigurationCommand + */ +export declare const de_DeleteBucketMetricsConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlDeleteBucketOwnershipControlsCommand + */ +export declare const de_DeleteBucketOwnershipControlsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlDeleteBucketPolicyCommand + */ +export declare const de_DeleteBucketPolicyCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlDeleteBucketReplicationCommand + */ +export declare const de_DeleteBucketReplicationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlDeleteBucketTaggingCommand + */ +export declare const de_DeleteBucketTaggingCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlDeleteBucketWebsiteCommand + */ +export declare const de_DeleteBucketWebsiteCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlDeleteObjectCommand + */ +export declare const de_DeleteObjectCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlDeleteObjectsCommand + */ +export declare const de_DeleteObjectsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlDeleteObjectTaggingCommand + */ +export declare const de_DeleteObjectTaggingCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlDeletePublicAccessBlockCommand + */ +export declare const de_DeletePublicAccessBlockCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketAccelerateConfigurationCommand + */ +export declare const de_GetBucketAccelerateConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketAclCommand + */ +export declare const de_GetBucketAclCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketAnalyticsConfigurationCommand + */ +export declare const de_GetBucketAnalyticsConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketCorsCommand + */ +export declare const de_GetBucketCorsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketEncryptionCommand + */ +export declare const de_GetBucketEncryptionCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketIntelligentTieringConfigurationCommand + */ +export declare const de_GetBucketIntelligentTieringConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketInventoryConfigurationCommand + */ +export declare const de_GetBucketInventoryConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketLifecycleConfigurationCommand + */ +export declare const de_GetBucketLifecycleConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketLocationCommand + */ +export declare const de_GetBucketLocationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketLoggingCommand + */ +export declare const de_GetBucketLoggingCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketMetadataTableConfigurationCommand + */ +export declare const de_GetBucketMetadataTableConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketMetricsConfigurationCommand + */ +export declare const de_GetBucketMetricsConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketNotificationConfigurationCommand + */ +export declare const de_GetBucketNotificationConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketOwnershipControlsCommand + */ +export declare const de_GetBucketOwnershipControlsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketPolicyCommand + */ +export declare const de_GetBucketPolicyCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketPolicyStatusCommand + */ +export declare const de_GetBucketPolicyStatusCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketReplicationCommand + */ +export declare const de_GetBucketReplicationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketRequestPaymentCommand + */ +export declare const de_GetBucketRequestPaymentCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketTaggingCommand + */ +export declare const de_GetBucketTaggingCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketVersioningCommand + */ +export declare const de_GetBucketVersioningCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetBucketWebsiteCommand + */ +export declare const de_GetBucketWebsiteCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetObjectCommand + */ +export declare const de_GetObjectCommand: (output: __HttpResponse, context: __SerdeContext & __SdkStreamSerdeContext) => Promise; +/** + * deserializeAws_restXmlGetObjectAclCommand + */ +export declare const de_GetObjectAclCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetObjectAttributesCommand + */ +export declare const de_GetObjectAttributesCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetObjectLegalHoldCommand + */ +export declare const de_GetObjectLegalHoldCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetObjectLockConfigurationCommand + */ +export declare const de_GetObjectLockConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetObjectRetentionCommand + */ +export declare const de_GetObjectRetentionCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetObjectTaggingCommand + */ +export declare const de_GetObjectTaggingCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlGetObjectTorrentCommand + */ +export declare const de_GetObjectTorrentCommand: (output: __HttpResponse, context: __SerdeContext & __SdkStreamSerdeContext) => Promise; +/** + * deserializeAws_restXmlGetPublicAccessBlockCommand + */ +export declare const de_GetPublicAccessBlockCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlHeadBucketCommand + */ +export declare const de_HeadBucketCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlHeadObjectCommand + */ +export declare const de_HeadObjectCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlListBucketAnalyticsConfigurationsCommand + */ +export declare const de_ListBucketAnalyticsConfigurationsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlListBucketIntelligentTieringConfigurationsCommand + */ +export declare const de_ListBucketIntelligentTieringConfigurationsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlListBucketInventoryConfigurationsCommand + */ +export declare const de_ListBucketInventoryConfigurationsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlListBucketMetricsConfigurationsCommand + */ +export declare const de_ListBucketMetricsConfigurationsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlListBucketsCommand + */ +export declare const de_ListBucketsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlListDirectoryBucketsCommand + */ +export declare const de_ListDirectoryBucketsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlListMultipartUploadsCommand + */ +export declare const de_ListMultipartUploadsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlListObjectsCommand + */ +export declare const de_ListObjectsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlListObjectsV2Command + */ +export declare const de_ListObjectsV2Command: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlListObjectVersionsCommand + */ +export declare const de_ListObjectVersionsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlListPartsCommand + */ +export declare const de_ListPartsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutBucketAccelerateConfigurationCommand + */ +export declare const de_PutBucketAccelerateConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutBucketAclCommand + */ +export declare const de_PutBucketAclCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutBucketAnalyticsConfigurationCommand + */ +export declare const de_PutBucketAnalyticsConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutBucketCorsCommand + */ +export declare const de_PutBucketCorsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutBucketEncryptionCommand + */ +export declare const de_PutBucketEncryptionCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutBucketIntelligentTieringConfigurationCommand + */ +export declare const de_PutBucketIntelligentTieringConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutBucketInventoryConfigurationCommand + */ +export declare const de_PutBucketInventoryConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutBucketLifecycleConfigurationCommand + */ +export declare const de_PutBucketLifecycleConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutBucketLoggingCommand + */ +export declare const de_PutBucketLoggingCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutBucketMetricsConfigurationCommand + */ +export declare const de_PutBucketMetricsConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutBucketNotificationConfigurationCommand + */ +export declare const de_PutBucketNotificationConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutBucketOwnershipControlsCommand + */ +export declare const de_PutBucketOwnershipControlsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutBucketPolicyCommand + */ +export declare const de_PutBucketPolicyCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutBucketReplicationCommand + */ +export declare const de_PutBucketReplicationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutBucketRequestPaymentCommand + */ +export declare const de_PutBucketRequestPaymentCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutBucketTaggingCommand + */ +export declare const de_PutBucketTaggingCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutBucketVersioningCommand + */ +export declare const de_PutBucketVersioningCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutBucketWebsiteCommand + */ +export declare const de_PutBucketWebsiteCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutObjectCommand + */ +export declare const de_PutObjectCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutObjectAclCommand + */ +export declare const de_PutObjectAclCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutObjectLegalHoldCommand + */ +export declare const de_PutObjectLegalHoldCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutObjectLockConfigurationCommand + */ +export declare const de_PutObjectLockConfigurationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutObjectRetentionCommand + */ +export declare const de_PutObjectRetentionCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutObjectTaggingCommand + */ +export declare const de_PutObjectTaggingCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlPutPublicAccessBlockCommand + */ +export declare const de_PutPublicAccessBlockCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlRestoreObjectCommand + */ +export declare const de_RestoreObjectCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlSelectObjectContentCommand + */ +export declare const de_SelectObjectContentCommand: (output: __HttpResponse, context: __SerdeContext & __EventStreamSerdeContext) => Promise; +/** + * deserializeAws_restXmlUploadPartCommand + */ +export declare const de_UploadPartCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlUploadPartCopyCommand + */ +export declare const de_UploadPartCopyCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restXmlWriteGetObjectResponseCommand + */ +export declare const de_WriteGetObjectResponseCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/runtimeConfig.browser.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/runtimeConfig.browser.d.ts new file mode 100644 index 00000000..0ca6e968 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/runtimeConfig.browser.d.ts @@ -0,0 +1,70 @@ +/// +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { S3ClientConfig } from "./S3Client"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: S3ClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | ((_: unknown) => () => Promise); + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + eventStreamSerdeProvider: import("@smithy/types").EventStreamSerdeProvider; + maxAttempts: number | import("@smithy/types").Provider; + md5: import("@smithy/types").HashConstructor; + region: string | import("@smithy/types").Provider; + requestHandler: import("@smithy/protocol-http").HttpHandler | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha1: import("@smithy/types").HashConstructor; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + streamHasher: import("@smithy/types").StreamHasher | import("@smithy/types").StreamHasher; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + getAwsChunkedEncodingStream: import("@smithy/types").GetAwsChunkedEncodingStream | import("@smithy/types").GetAwsChunkedEncodingStream; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + signingEscapePath: boolean; + useArnRegion: boolean | import("@smithy/types").Provider; + sdkStreamMixin: import("@smithy/types").SdkStreamMixinInjector; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + requestChecksumCalculation?: import("@aws-sdk/middleware-flexible-checksums").RequestChecksumCalculation | import("@smithy/types").Provider | undefined; + responseChecksumValidation?: import("@aws-sdk/middleware-flexible-checksums").ResponseChecksumValidation | import("@smithy/types").Provider | undefined; + requestStreamBufferSize?: number | false | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").S3HttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | typeof import("@aws-sdk/signature-v4-multi-region").SignatureV4MultiRegion; + sigv4aSigningRegionSet?: string[] | import("@smithy/types").Provider | undefined; + forcePathStyle?: (boolean & (boolean | import("@smithy/types").Provider)) | undefined; + useAccelerateEndpoint?: (boolean & (boolean | import("@smithy/types").Provider)) | undefined; + disableMultiregionAccessPoints?: (boolean & (boolean | import("@smithy/types").Provider)) | undefined; + followRegionRedirects?: boolean | undefined; + s3ExpressIdentityProvider?: import("@aws-sdk/middleware-sdk-s3").S3ExpressIdentityProvider | undefined; + bucketEndpoint?: boolean | undefined; + useGlobalEndpoint?: boolean | import("@smithy/types").Provider | undefined; + disableS3ExpressSessionAuth?: boolean | import("@smithy/types").Provider | undefined; +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/runtimeConfig.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/runtimeConfig.d.ts new file mode 100644 index 00000000..9ea38f46 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/runtimeConfig.d.ts @@ -0,0 +1,71 @@ +/// +import { HashConstructor as __HashConstructor } from "@aws-sdk/types"; +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { S3ClientConfig } from "./S3Client"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: S3ClientConfig) => { + runtime: string; + defaultsMode: import("@aws-sdk/types").Provider; + authSchemePreference: string[] | import("@aws-sdk/types").Provider; + bodyLengthChecker: import("@aws-sdk/types").BodyLengthCalculator; + credentialDefaultProvider: ((input: any) => import("@aws-sdk/types").AwsCredentialIdentityProvider) | ((init?: import("@aws-sdk/credential-provider-node").DefaultProviderInit | undefined) => import("@aws-sdk/types").MemoizedProvider); + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-node").PreviouslyResolved | undefined) => Promise; + disableS3ExpressSessionAuth: boolean | import("@aws-sdk/types").Provider; + eventStreamSerdeProvider: import("@aws-sdk/types").EventStreamSerdeProvider; + maxAttempts: number | import("@aws-sdk/types").Provider; + md5: __HashConstructor; + region: string | import("@aws-sdk/types").Provider; + requestChecksumCalculation: import("@aws-sdk/middleware-flexible-checksums").RequestChecksumCalculation | import("@aws-sdk/types").Provider; + requestHandler: RequestHandler | import("@smithy/protocol-http").HttpHandler; + responseChecksumValidation: import("@aws-sdk/middleware-flexible-checksums").ResponseChecksumValidation | import("@aws-sdk/types").Provider; + retryMode: string | import("@aws-sdk/types").Provider; + sha1: __HashConstructor; + sha256: __HashConstructor; + sigv4aSigningRegionSet: string[] | import("@aws-sdk/types").Provider; + streamCollector: import("@aws-sdk/types").StreamCollector; + streamHasher: import("@aws-sdk/types").StreamHasher | import("@aws-sdk/types").StreamHasher; + useArnRegion: boolean | import("@aws-sdk/types").Provider; + useDualstackEndpoint: boolean | import("@aws-sdk/types").Provider; + useFipsEndpoint: boolean | import("@aws-sdk/types").Provider; + userAgentAppId: string | import("@aws-sdk/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@aws-sdk/types").UrlParser; + base64Decoder: import("@aws-sdk/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@aws-sdk/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + getAwsChunkedEncodingStream: import("@aws-sdk/types").GetAwsChunkedEncodingStream | import("@aws-sdk/types").GetAwsChunkedEncodingStream; + logger: import("@aws-sdk/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + signingEscapePath: boolean; + sdkStreamMixin: import("@aws-sdk/types").SdkStreamMixinInjector; + customUserAgent?: string | import("@aws-sdk/types").UserAgent | undefined; + requestStreamBufferSize?: number | false | undefined; + retryStrategy?: import("@aws-sdk/types").RetryStrategy | import("@aws-sdk/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@aws-sdk/types").Endpoint | import("@aws-sdk/types").Provider | import("@aws-sdk/types").EndpointV2 | import("@aws-sdk/types").Provider) & (string | import("@aws-sdk/types").Provider | import("@aws-sdk/types").Endpoint | import("@aws-sdk/types").Provider | import("@aws-sdk/types").EndpointV2 | import("@aws-sdk/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@aws-sdk/types").Logger | undefined; + }) => import("@aws-sdk/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").S3HttpAuthSchemeProvider; + credentials?: import("@aws-sdk/types").AwsCredentialIdentity | import("@aws-sdk/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@aws-sdk/types").RequestSigner | ((authScheme?: import("@aws-sdk/types").AuthScheme | undefined) => Promise) | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@aws-sdk/types").RequestSigner) | typeof import("@aws-sdk/signature-v4-multi-region").SignatureV4MultiRegion; + forcePathStyle?: (boolean & (boolean | import("@aws-sdk/types").Provider)) | undefined; + useAccelerateEndpoint?: (boolean & (boolean | import("@aws-sdk/types").Provider)) | undefined; + disableMultiregionAccessPoints?: (boolean & (boolean | import("@aws-sdk/types").Provider)) | undefined; + followRegionRedirects?: boolean | undefined; + s3ExpressIdentityProvider?: import("@aws-sdk/middleware-sdk-s3").S3ExpressIdentityProvider | undefined; + bucketEndpoint?: boolean | undefined; + useGlobalEndpoint?: boolean | import("@aws-sdk/types").Provider | undefined; +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/runtimeConfig.native.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/runtimeConfig.native.d.ts new file mode 100644 index 00000000..49788d9f --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/runtimeConfig.native.d.ts @@ -0,0 +1,69 @@ +/// +import { S3ClientConfig } from "./S3Client"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: S3ClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: import("@smithy/types").NodeHttpHandlerOptions | import("@smithy/types").FetchHttpHandlerOptions | Record | import("@smithy/protocol-http").HttpHandler | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + streamHasher: import("@smithy/types").StreamHasher | import("@smithy/types").StreamHasher; + md5: import("@smithy/types").HashConstructor; + sha1: import("@smithy/types").HashConstructor; + getAwsChunkedEncodingStream: import("@smithy/types").GetAwsChunkedEncodingStream | import("@smithy/types").GetAwsChunkedEncodingStream; + credentialDefaultProvider: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | ((_: unknown) => () => Promise); + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + eventStreamSerdeProvider: import("@smithy/types").EventStreamSerdeProvider; + defaultsMode: import("@smithy/smithy-client").DefaultsMode | import("@smithy/types").Provider; + signingEscapePath: boolean; + useArnRegion: boolean | import("@smithy/types").Provider; + sdkStreamMixin: import("@smithy/types").SdkStreamMixinInjector; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + requestChecksumCalculation?: import("@aws-sdk/middleware-flexible-checksums").RequestChecksumCalculation | import("@smithy/types").Provider | undefined; + responseChecksumValidation?: import("@aws-sdk/middleware-flexible-checksums").ResponseChecksumValidation | import("@smithy/types").Provider | undefined; + requestStreamBufferSize?: number | false | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").S3HttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | typeof import("@aws-sdk/signature-v4-multi-region").SignatureV4MultiRegion; + sigv4aSigningRegionSet?: string[] | import("@smithy/types").Provider | undefined; + forcePathStyle?: (boolean & (boolean | import("@smithy/types").Provider)) | undefined; + useAccelerateEndpoint?: (boolean & (boolean | import("@smithy/types").Provider)) | undefined; + disableMultiregionAccessPoints?: (boolean & (boolean | import("@smithy/types").Provider)) | undefined; + followRegionRedirects?: boolean | undefined; + s3ExpressIdentityProvider?: import("@aws-sdk/middleware-sdk-s3").S3ExpressIdentityProvider | undefined; + bucketEndpoint?: boolean | undefined; + useGlobalEndpoint?: boolean | import("@smithy/types").Provider | undefined; + disableS3ExpressSessionAuth?: boolean | import("@smithy/types").Provider | undefined; +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/runtimeConfig.shared.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/runtimeConfig.shared.d.ts new file mode 100644 index 00000000..3a6fc9ad --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/runtimeConfig.shared.d.ts @@ -0,0 +1,28 @@ +/// +import { SignatureV4MultiRegion } from "@aws-sdk/signature-v4-multi-region"; +import { S3ClientConfig } from "./S3Client"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: S3ClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + getAwsChunkedEncodingStream: import("@smithy/types").GetAwsChunkedEncodingStream | import("@smithy/types").GetAwsChunkedEncodingStream; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").S3HttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + logger: import("@smithy/types").Logger; + sdkStreamMixin: import("@smithy/types").SdkStreamMixinInjector; + serviceId: string; + signerConstructor: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | typeof SignatureV4MultiRegion; + signingEscapePath: boolean; + urlParser: import("@smithy/types").UrlParser; + useArnRegion: boolean | import("@smithy/types").Provider; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/runtimeExtensions.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/runtimeExtensions.d.ts new file mode 100644 index 00000000..6daa2a5e --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/runtimeExtensions.d.ts @@ -0,0 +1,17 @@ +import { S3ExtensionConfiguration } from "./extensionConfiguration"; +/** + * @public + */ +export interface RuntimeExtension { + configure(extensionConfiguration: S3ExtensionConfiguration): void; +} +/** + * @public + */ +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +/** + * @internal + */ +export declare const resolveRuntimeExtensions: (runtimeConfig: any, extensions: RuntimeExtension[]) => any; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/S3.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/S3.d.ts new file mode 100644 index 00000000..c79abde5 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/S3.d.ts @@ -0,0 +1,1751 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + AbortMultipartUploadCommandInput, + AbortMultipartUploadCommandOutput, +} from "./commands/AbortMultipartUploadCommand"; +import { + CompleteMultipartUploadCommandInput, + CompleteMultipartUploadCommandOutput, +} from "./commands/CompleteMultipartUploadCommand"; +import { + CopyObjectCommandInput, + CopyObjectCommandOutput, +} from "./commands/CopyObjectCommand"; +import { + CreateBucketCommandInput, + CreateBucketCommandOutput, +} from "./commands/CreateBucketCommand"; +import { + CreateBucketMetadataTableConfigurationCommandInput, + CreateBucketMetadataTableConfigurationCommandOutput, +} from "./commands/CreateBucketMetadataTableConfigurationCommand"; +import { + CreateMultipartUploadCommandInput, + CreateMultipartUploadCommandOutput, +} from "./commands/CreateMultipartUploadCommand"; +import { + CreateSessionCommandInput, + CreateSessionCommandOutput, +} from "./commands/CreateSessionCommand"; +import { + DeleteBucketAnalyticsConfigurationCommandInput, + DeleteBucketAnalyticsConfigurationCommandOutput, +} from "./commands/DeleteBucketAnalyticsConfigurationCommand"; +import { + DeleteBucketCommandInput, + DeleteBucketCommandOutput, +} from "./commands/DeleteBucketCommand"; +import { + DeleteBucketCorsCommandInput, + DeleteBucketCorsCommandOutput, +} from "./commands/DeleteBucketCorsCommand"; +import { + DeleteBucketEncryptionCommandInput, + DeleteBucketEncryptionCommandOutput, +} from "./commands/DeleteBucketEncryptionCommand"; +import { + DeleteBucketIntelligentTieringConfigurationCommandInput, + DeleteBucketIntelligentTieringConfigurationCommandOutput, +} from "./commands/DeleteBucketIntelligentTieringConfigurationCommand"; +import { + DeleteBucketInventoryConfigurationCommandInput, + DeleteBucketInventoryConfigurationCommandOutput, +} from "./commands/DeleteBucketInventoryConfigurationCommand"; +import { + DeleteBucketLifecycleCommandInput, + DeleteBucketLifecycleCommandOutput, +} from "./commands/DeleteBucketLifecycleCommand"; +import { + DeleteBucketMetadataTableConfigurationCommandInput, + DeleteBucketMetadataTableConfigurationCommandOutput, +} from "./commands/DeleteBucketMetadataTableConfigurationCommand"; +import { + DeleteBucketMetricsConfigurationCommandInput, + DeleteBucketMetricsConfigurationCommandOutput, +} from "./commands/DeleteBucketMetricsConfigurationCommand"; +import { + DeleteBucketOwnershipControlsCommandInput, + DeleteBucketOwnershipControlsCommandOutput, +} from "./commands/DeleteBucketOwnershipControlsCommand"; +import { + DeleteBucketPolicyCommandInput, + DeleteBucketPolicyCommandOutput, +} from "./commands/DeleteBucketPolicyCommand"; +import { + DeleteBucketReplicationCommandInput, + DeleteBucketReplicationCommandOutput, +} from "./commands/DeleteBucketReplicationCommand"; +import { + DeleteBucketTaggingCommandInput, + DeleteBucketTaggingCommandOutput, +} from "./commands/DeleteBucketTaggingCommand"; +import { + DeleteBucketWebsiteCommandInput, + DeleteBucketWebsiteCommandOutput, +} from "./commands/DeleteBucketWebsiteCommand"; +import { + DeleteObjectCommandInput, + DeleteObjectCommandOutput, +} from "./commands/DeleteObjectCommand"; +import { + DeleteObjectsCommandInput, + DeleteObjectsCommandOutput, +} from "./commands/DeleteObjectsCommand"; +import { + DeleteObjectTaggingCommandInput, + DeleteObjectTaggingCommandOutput, +} from "./commands/DeleteObjectTaggingCommand"; +import { + DeletePublicAccessBlockCommandInput, + DeletePublicAccessBlockCommandOutput, +} from "./commands/DeletePublicAccessBlockCommand"; +import { + GetBucketAccelerateConfigurationCommandInput, + GetBucketAccelerateConfigurationCommandOutput, +} from "./commands/GetBucketAccelerateConfigurationCommand"; +import { + GetBucketAclCommandInput, + GetBucketAclCommandOutput, +} from "./commands/GetBucketAclCommand"; +import { + GetBucketAnalyticsConfigurationCommandInput, + GetBucketAnalyticsConfigurationCommandOutput, +} from "./commands/GetBucketAnalyticsConfigurationCommand"; +import { + GetBucketCorsCommandInput, + GetBucketCorsCommandOutput, +} from "./commands/GetBucketCorsCommand"; +import { + GetBucketEncryptionCommandInput, + GetBucketEncryptionCommandOutput, +} from "./commands/GetBucketEncryptionCommand"; +import { + GetBucketIntelligentTieringConfigurationCommandInput, + GetBucketIntelligentTieringConfigurationCommandOutput, +} from "./commands/GetBucketIntelligentTieringConfigurationCommand"; +import { + GetBucketInventoryConfigurationCommandInput, + GetBucketInventoryConfigurationCommandOutput, +} from "./commands/GetBucketInventoryConfigurationCommand"; +import { + GetBucketLifecycleConfigurationCommandInput, + GetBucketLifecycleConfigurationCommandOutput, +} from "./commands/GetBucketLifecycleConfigurationCommand"; +import { + GetBucketLocationCommandInput, + GetBucketLocationCommandOutput, +} from "./commands/GetBucketLocationCommand"; +import { + GetBucketLoggingCommandInput, + GetBucketLoggingCommandOutput, +} from "./commands/GetBucketLoggingCommand"; +import { + GetBucketMetadataTableConfigurationCommandInput, + GetBucketMetadataTableConfigurationCommandOutput, +} from "./commands/GetBucketMetadataTableConfigurationCommand"; +import { + GetBucketMetricsConfigurationCommandInput, + GetBucketMetricsConfigurationCommandOutput, +} from "./commands/GetBucketMetricsConfigurationCommand"; +import { + GetBucketNotificationConfigurationCommandInput, + GetBucketNotificationConfigurationCommandOutput, +} from "./commands/GetBucketNotificationConfigurationCommand"; +import { + GetBucketOwnershipControlsCommandInput, + GetBucketOwnershipControlsCommandOutput, +} from "./commands/GetBucketOwnershipControlsCommand"; +import { + GetBucketPolicyCommandInput, + GetBucketPolicyCommandOutput, +} from "./commands/GetBucketPolicyCommand"; +import { + GetBucketPolicyStatusCommandInput, + GetBucketPolicyStatusCommandOutput, +} from "./commands/GetBucketPolicyStatusCommand"; +import { + GetBucketReplicationCommandInput, + GetBucketReplicationCommandOutput, +} from "./commands/GetBucketReplicationCommand"; +import { + GetBucketRequestPaymentCommandInput, + GetBucketRequestPaymentCommandOutput, +} from "./commands/GetBucketRequestPaymentCommand"; +import { + GetBucketTaggingCommandInput, + GetBucketTaggingCommandOutput, +} from "./commands/GetBucketTaggingCommand"; +import { + GetBucketVersioningCommandInput, + GetBucketVersioningCommandOutput, +} from "./commands/GetBucketVersioningCommand"; +import { + GetBucketWebsiteCommandInput, + GetBucketWebsiteCommandOutput, +} from "./commands/GetBucketWebsiteCommand"; +import { + GetObjectAclCommandInput, + GetObjectAclCommandOutput, +} from "./commands/GetObjectAclCommand"; +import { + GetObjectAttributesCommandInput, + GetObjectAttributesCommandOutput, +} from "./commands/GetObjectAttributesCommand"; +import { + GetObjectCommandInput, + GetObjectCommandOutput, +} from "./commands/GetObjectCommand"; +import { + GetObjectLegalHoldCommandInput, + GetObjectLegalHoldCommandOutput, +} from "./commands/GetObjectLegalHoldCommand"; +import { + GetObjectLockConfigurationCommandInput, + GetObjectLockConfigurationCommandOutput, +} from "./commands/GetObjectLockConfigurationCommand"; +import { + GetObjectRetentionCommandInput, + GetObjectRetentionCommandOutput, +} from "./commands/GetObjectRetentionCommand"; +import { + GetObjectTaggingCommandInput, + GetObjectTaggingCommandOutput, +} from "./commands/GetObjectTaggingCommand"; +import { + GetObjectTorrentCommandInput, + GetObjectTorrentCommandOutput, +} from "./commands/GetObjectTorrentCommand"; +import { + GetPublicAccessBlockCommandInput, + GetPublicAccessBlockCommandOutput, +} from "./commands/GetPublicAccessBlockCommand"; +import { + HeadBucketCommandInput, + HeadBucketCommandOutput, +} from "./commands/HeadBucketCommand"; +import { + HeadObjectCommandInput, + HeadObjectCommandOutput, +} from "./commands/HeadObjectCommand"; +import { + ListBucketAnalyticsConfigurationsCommandInput, + ListBucketAnalyticsConfigurationsCommandOutput, +} from "./commands/ListBucketAnalyticsConfigurationsCommand"; +import { + ListBucketIntelligentTieringConfigurationsCommandInput, + ListBucketIntelligentTieringConfigurationsCommandOutput, +} from "./commands/ListBucketIntelligentTieringConfigurationsCommand"; +import { + ListBucketInventoryConfigurationsCommandInput, + ListBucketInventoryConfigurationsCommandOutput, +} from "./commands/ListBucketInventoryConfigurationsCommand"; +import { + ListBucketMetricsConfigurationsCommandInput, + ListBucketMetricsConfigurationsCommandOutput, +} from "./commands/ListBucketMetricsConfigurationsCommand"; +import { + ListBucketsCommandInput, + ListBucketsCommandOutput, +} from "./commands/ListBucketsCommand"; +import { + ListDirectoryBucketsCommandInput, + ListDirectoryBucketsCommandOutput, +} from "./commands/ListDirectoryBucketsCommand"; +import { + ListMultipartUploadsCommandInput, + ListMultipartUploadsCommandOutput, +} from "./commands/ListMultipartUploadsCommand"; +import { + ListObjectsCommandInput, + ListObjectsCommandOutput, +} from "./commands/ListObjectsCommand"; +import { + ListObjectsV2CommandInput, + ListObjectsV2CommandOutput, +} from "./commands/ListObjectsV2Command"; +import { + ListObjectVersionsCommandInput, + ListObjectVersionsCommandOutput, +} from "./commands/ListObjectVersionsCommand"; +import { + ListPartsCommandInput, + ListPartsCommandOutput, +} from "./commands/ListPartsCommand"; +import { + PutBucketAccelerateConfigurationCommandInput, + PutBucketAccelerateConfigurationCommandOutput, +} from "./commands/PutBucketAccelerateConfigurationCommand"; +import { + PutBucketAclCommandInput, + PutBucketAclCommandOutput, +} from "./commands/PutBucketAclCommand"; +import { + PutBucketAnalyticsConfigurationCommandInput, + PutBucketAnalyticsConfigurationCommandOutput, +} from "./commands/PutBucketAnalyticsConfigurationCommand"; +import { + PutBucketCorsCommandInput, + PutBucketCorsCommandOutput, +} from "./commands/PutBucketCorsCommand"; +import { + PutBucketEncryptionCommandInput, + PutBucketEncryptionCommandOutput, +} from "./commands/PutBucketEncryptionCommand"; +import { + PutBucketIntelligentTieringConfigurationCommandInput, + PutBucketIntelligentTieringConfigurationCommandOutput, +} from "./commands/PutBucketIntelligentTieringConfigurationCommand"; +import { + PutBucketInventoryConfigurationCommandInput, + PutBucketInventoryConfigurationCommandOutput, +} from "./commands/PutBucketInventoryConfigurationCommand"; +import { + PutBucketLifecycleConfigurationCommandInput, + PutBucketLifecycleConfigurationCommandOutput, +} from "./commands/PutBucketLifecycleConfigurationCommand"; +import { + PutBucketLoggingCommandInput, + PutBucketLoggingCommandOutput, +} from "./commands/PutBucketLoggingCommand"; +import { + PutBucketMetricsConfigurationCommandInput, + PutBucketMetricsConfigurationCommandOutput, +} from "./commands/PutBucketMetricsConfigurationCommand"; +import { + PutBucketNotificationConfigurationCommandInput, + PutBucketNotificationConfigurationCommandOutput, +} from "./commands/PutBucketNotificationConfigurationCommand"; +import { + PutBucketOwnershipControlsCommandInput, + PutBucketOwnershipControlsCommandOutput, +} from "./commands/PutBucketOwnershipControlsCommand"; +import { + PutBucketPolicyCommandInput, + PutBucketPolicyCommandOutput, +} from "./commands/PutBucketPolicyCommand"; +import { + PutBucketReplicationCommandInput, + PutBucketReplicationCommandOutput, +} from "./commands/PutBucketReplicationCommand"; +import { + PutBucketRequestPaymentCommandInput, + PutBucketRequestPaymentCommandOutput, +} from "./commands/PutBucketRequestPaymentCommand"; +import { + PutBucketTaggingCommandInput, + PutBucketTaggingCommandOutput, +} from "./commands/PutBucketTaggingCommand"; +import { + PutBucketVersioningCommandInput, + PutBucketVersioningCommandOutput, +} from "./commands/PutBucketVersioningCommand"; +import { + PutBucketWebsiteCommandInput, + PutBucketWebsiteCommandOutput, +} from "./commands/PutBucketWebsiteCommand"; +import { + PutObjectAclCommandInput, + PutObjectAclCommandOutput, +} from "./commands/PutObjectAclCommand"; +import { + PutObjectCommandInput, + PutObjectCommandOutput, +} from "./commands/PutObjectCommand"; +import { + PutObjectLegalHoldCommandInput, + PutObjectLegalHoldCommandOutput, +} from "./commands/PutObjectLegalHoldCommand"; +import { + PutObjectLockConfigurationCommandInput, + PutObjectLockConfigurationCommandOutput, +} from "./commands/PutObjectLockConfigurationCommand"; +import { + PutObjectRetentionCommandInput, + PutObjectRetentionCommandOutput, +} from "./commands/PutObjectRetentionCommand"; +import { + PutObjectTaggingCommandInput, + PutObjectTaggingCommandOutput, +} from "./commands/PutObjectTaggingCommand"; +import { + PutPublicAccessBlockCommandInput, + PutPublicAccessBlockCommandOutput, +} from "./commands/PutPublicAccessBlockCommand"; +import { + RestoreObjectCommandInput, + RestoreObjectCommandOutput, +} from "./commands/RestoreObjectCommand"; +import { + SelectObjectContentCommandInput, + SelectObjectContentCommandOutput, +} from "./commands/SelectObjectContentCommand"; +import { + UploadPartCommandInput, + UploadPartCommandOutput, +} from "./commands/UploadPartCommand"; +import { + UploadPartCopyCommandInput, + UploadPartCopyCommandOutput, +} from "./commands/UploadPartCopyCommand"; +import { + WriteGetObjectResponseCommandInput, + WriteGetObjectResponseCommandOutput, +} from "./commands/WriteGetObjectResponseCommand"; +import { S3Client } from "./S3Client"; +export interface S3 { + abortMultipartUpload( + args: AbortMultipartUploadCommandInput, + options?: __HttpHandlerOptions + ): Promise; + abortMultipartUpload( + args: AbortMultipartUploadCommandInput, + cb: (err: any, data?: AbortMultipartUploadCommandOutput) => void + ): void; + abortMultipartUpload( + args: AbortMultipartUploadCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: AbortMultipartUploadCommandOutput) => void + ): void; + completeMultipartUpload( + args: CompleteMultipartUploadCommandInput, + options?: __HttpHandlerOptions + ): Promise; + completeMultipartUpload( + args: CompleteMultipartUploadCommandInput, + cb: (err: any, data?: CompleteMultipartUploadCommandOutput) => void + ): void; + completeMultipartUpload( + args: CompleteMultipartUploadCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CompleteMultipartUploadCommandOutput) => void + ): void; + copyObject( + args: CopyObjectCommandInput, + options?: __HttpHandlerOptions + ): Promise; + copyObject( + args: CopyObjectCommandInput, + cb: (err: any, data?: CopyObjectCommandOutput) => void + ): void; + copyObject( + args: CopyObjectCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CopyObjectCommandOutput) => void + ): void; + createBucket( + args: CreateBucketCommandInput, + options?: __HttpHandlerOptions + ): Promise; + createBucket( + args: CreateBucketCommandInput, + cb: (err: any, data?: CreateBucketCommandOutput) => void + ): void; + createBucket( + args: CreateBucketCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateBucketCommandOutput) => void + ): void; + createBucketMetadataTableConfiguration( + args: CreateBucketMetadataTableConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + createBucketMetadataTableConfiguration( + args: CreateBucketMetadataTableConfigurationCommandInput, + cb: ( + err: any, + data?: CreateBucketMetadataTableConfigurationCommandOutput + ) => void + ): void; + createBucketMetadataTableConfiguration( + args: CreateBucketMetadataTableConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: CreateBucketMetadataTableConfigurationCommandOutput + ) => void + ): void; + createMultipartUpload( + args: CreateMultipartUploadCommandInput, + options?: __HttpHandlerOptions + ): Promise; + createMultipartUpload( + args: CreateMultipartUploadCommandInput, + cb: (err: any, data?: CreateMultipartUploadCommandOutput) => void + ): void; + createMultipartUpload( + args: CreateMultipartUploadCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateMultipartUploadCommandOutput) => void + ): void; + createSession( + args: CreateSessionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + createSession( + args: CreateSessionCommandInput, + cb: (err: any, data?: CreateSessionCommandOutput) => void + ): void; + createSession( + args: CreateSessionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateSessionCommandOutput) => void + ): void; + deleteBucket( + args: DeleteBucketCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteBucket( + args: DeleteBucketCommandInput, + cb: (err: any, data?: DeleteBucketCommandOutput) => void + ): void; + deleteBucket( + args: DeleteBucketCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteBucketCommandOutput) => void + ): void; + deleteBucketAnalyticsConfiguration( + args: DeleteBucketAnalyticsConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteBucketAnalyticsConfiguration( + args: DeleteBucketAnalyticsConfigurationCommandInput, + cb: ( + err: any, + data?: DeleteBucketAnalyticsConfigurationCommandOutput + ) => void + ): void; + deleteBucketAnalyticsConfiguration( + args: DeleteBucketAnalyticsConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: DeleteBucketAnalyticsConfigurationCommandOutput + ) => void + ): void; + deleteBucketCors( + args: DeleteBucketCorsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteBucketCors( + args: DeleteBucketCorsCommandInput, + cb: (err: any, data?: DeleteBucketCorsCommandOutput) => void + ): void; + deleteBucketCors( + args: DeleteBucketCorsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteBucketCorsCommandOutput) => void + ): void; + deleteBucketEncryption( + args: DeleteBucketEncryptionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteBucketEncryption( + args: DeleteBucketEncryptionCommandInput, + cb: (err: any, data?: DeleteBucketEncryptionCommandOutput) => void + ): void; + deleteBucketEncryption( + args: DeleteBucketEncryptionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteBucketEncryptionCommandOutput) => void + ): void; + deleteBucketIntelligentTieringConfiguration( + args: DeleteBucketIntelligentTieringConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteBucketIntelligentTieringConfiguration( + args: DeleteBucketIntelligentTieringConfigurationCommandInput, + cb: ( + err: any, + data?: DeleteBucketIntelligentTieringConfigurationCommandOutput + ) => void + ): void; + deleteBucketIntelligentTieringConfiguration( + args: DeleteBucketIntelligentTieringConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: DeleteBucketIntelligentTieringConfigurationCommandOutput + ) => void + ): void; + deleteBucketInventoryConfiguration( + args: DeleteBucketInventoryConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteBucketInventoryConfiguration( + args: DeleteBucketInventoryConfigurationCommandInput, + cb: ( + err: any, + data?: DeleteBucketInventoryConfigurationCommandOutput + ) => void + ): void; + deleteBucketInventoryConfiguration( + args: DeleteBucketInventoryConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: DeleteBucketInventoryConfigurationCommandOutput + ) => void + ): void; + deleteBucketLifecycle( + args: DeleteBucketLifecycleCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteBucketLifecycle( + args: DeleteBucketLifecycleCommandInput, + cb: (err: any, data?: DeleteBucketLifecycleCommandOutput) => void + ): void; + deleteBucketLifecycle( + args: DeleteBucketLifecycleCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteBucketLifecycleCommandOutput) => void + ): void; + deleteBucketMetadataTableConfiguration( + args: DeleteBucketMetadataTableConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteBucketMetadataTableConfiguration( + args: DeleteBucketMetadataTableConfigurationCommandInput, + cb: ( + err: any, + data?: DeleteBucketMetadataTableConfigurationCommandOutput + ) => void + ): void; + deleteBucketMetadataTableConfiguration( + args: DeleteBucketMetadataTableConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: DeleteBucketMetadataTableConfigurationCommandOutput + ) => void + ): void; + deleteBucketMetricsConfiguration( + args: DeleteBucketMetricsConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteBucketMetricsConfiguration( + args: DeleteBucketMetricsConfigurationCommandInput, + cb: (err: any, data?: DeleteBucketMetricsConfigurationCommandOutput) => void + ): void; + deleteBucketMetricsConfiguration( + args: DeleteBucketMetricsConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteBucketMetricsConfigurationCommandOutput) => void + ): void; + deleteBucketOwnershipControls( + args: DeleteBucketOwnershipControlsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteBucketOwnershipControls( + args: DeleteBucketOwnershipControlsCommandInput, + cb: (err: any, data?: DeleteBucketOwnershipControlsCommandOutput) => void + ): void; + deleteBucketOwnershipControls( + args: DeleteBucketOwnershipControlsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteBucketOwnershipControlsCommandOutput) => void + ): void; + deleteBucketPolicy( + args: DeleteBucketPolicyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteBucketPolicy( + args: DeleteBucketPolicyCommandInput, + cb: (err: any, data?: DeleteBucketPolicyCommandOutput) => void + ): void; + deleteBucketPolicy( + args: DeleteBucketPolicyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteBucketPolicyCommandOutput) => void + ): void; + deleteBucketReplication( + args: DeleteBucketReplicationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteBucketReplication( + args: DeleteBucketReplicationCommandInput, + cb: (err: any, data?: DeleteBucketReplicationCommandOutput) => void + ): void; + deleteBucketReplication( + args: DeleteBucketReplicationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteBucketReplicationCommandOutput) => void + ): void; + deleteBucketTagging( + args: DeleteBucketTaggingCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteBucketTagging( + args: DeleteBucketTaggingCommandInput, + cb: (err: any, data?: DeleteBucketTaggingCommandOutput) => void + ): void; + deleteBucketTagging( + args: DeleteBucketTaggingCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteBucketTaggingCommandOutput) => void + ): void; + deleteBucketWebsite( + args: DeleteBucketWebsiteCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteBucketWebsite( + args: DeleteBucketWebsiteCommandInput, + cb: (err: any, data?: DeleteBucketWebsiteCommandOutput) => void + ): void; + deleteBucketWebsite( + args: DeleteBucketWebsiteCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteBucketWebsiteCommandOutput) => void + ): void; + deleteObject( + args: DeleteObjectCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteObject( + args: DeleteObjectCommandInput, + cb: (err: any, data?: DeleteObjectCommandOutput) => void + ): void; + deleteObject( + args: DeleteObjectCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteObjectCommandOutput) => void + ): void; + deleteObjects( + args: DeleteObjectsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteObjects( + args: DeleteObjectsCommandInput, + cb: (err: any, data?: DeleteObjectsCommandOutput) => void + ): void; + deleteObjects( + args: DeleteObjectsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteObjectsCommandOutput) => void + ): void; + deleteObjectTagging( + args: DeleteObjectTaggingCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteObjectTagging( + args: DeleteObjectTaggingCommandInput, + cb: (err: any, data?: DeleteObjectTaggingCommandOutput) => void + ): void; + deleteObjectTagging( + args: DeleteObjectTaggingCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteObjectTaggingCommandOutput) => void + ): void; + deletePublicAccessBlock( + args: DeletePublicAccessBlockCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deletePublicAccessBlock( + args: DeletePublicAccessBlockCommandInput, + cb: (err: any, data?: DeletePublicAccessBlockCommandOutput) => void + ): void; + deletePublicAccessBlock( + args: DeletePublicAccessBlockCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeletePublicAccessBlockCommandOutput) => void + ): void; + getBucketAccelerateConfiguration( + args: GetBucketAccelerateConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketAccelerateConfiguration( + args: GetBucketAccelerateConfigurationCommandInput, + cb: (err: any, data?: GetBucketAccelerateConfigurationCommandOutput) => void + ): void; + getBucketAccelerateConfiguration( + args: GetBucketAccelerateConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBucketAccelerateConfigurationCommandOutput) => void + ): void; + getBucketAcl( + args: GetBucketAclCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketAcl( + args: GetBucketAclCommandInput, + cb: (err: any, data?: GetBucketAclCommandOutput) => void + ): void; + getBucketAcl( + args: GetBucketAclCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBucketAclCommandOutput) => void + ): void; + getBucketAnalyticsConfiguration( + args: GetBucketAnalyticsConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketAnalyticsConfiguration( + args: GetBucketAnalyticsConfigurationCommandInput, + cb: (err: any, data?: GetBucketAnalyticsConfigurationCommandOutput) => void + ): void; + getBucketAnalyticsConfiguration( + args: GetBucketAnalyticsConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBucketAnalyticsConfigurationCommandOutput) => void + ): void; + getBucketCors( + args: GetBucketCorsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketCors( + args: GetBucketCorsCommandInput, + cb: (err: any, data?: GetBucketCorsCommandOutput) => void + ): void; + getBucketCors( + args: GetBucketCorsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBucketCorsCommandOutput) => void + ): void; + getBucketEncryption( + args: GetBucketEncryptionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketEncryption( + args: GetBucketEncryptionCommandInput, + cb: (err: any, data?: GetBucketEncryptionCommandOutput) => void + ): void; + getBucketEncryption( + args: GetBucketEncryptionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBucketEncryptionCommandOutput) => void + ): void; + getBucketIntelligentTieringConfiguration( + args: GetBucketIntelligentTieringConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketIntelligentTieringConfiguration( + args: GetBucketIntelligentTieringConfigurationCommandInput, + cb: ( + err: any, + data?: GetBucketIntelligentTieringConfigurationCommandOutput + ) => void + ): void; + getBucketIntelligentTieringConfiguration( + args: GetBucketIntelligentTieringConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: GetBucketIntelligentTieringConfigurationCommandOutput + ) => void + ): void; + getBucketInventoryConfiguration( + args: GetBucketInventoryConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketInventoryConfiguration( + args: GetBucketInventoryConfigurationCommandInput, + cb: (err: any, data?: GetBucketInventoryConfigurationCommandOutput) => void + ): void; + getBucketInventoryConfiguration( + args: GetBucketInventoryConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBucketInventoryConfigurationCommandOutput) => void + ): void; + getBucketLifecycleConfiguration( + args: GetBucketLifecycleConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketLifecycleConfiguration( + args: GetBucketLifecycleConfigurationCommandInput, + cb: (err: any, data?: GetBucketLifecycleConfigurationCommandOutput) => void + ): void; + getBucketLifecycleConfiguration( + args: GetBucketLifecycleConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBucketLifecycleConfigurationCommandOutput) => void + ): void; + getBucketLocation( + args: GetBucketLocationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketLocation( + args: GetBucketLocationCommandInput, + cb: (err: any, data?: GetBucketLocationCommandOutput) => void + ): void; + getBucketLocation( + args: GetBucketLocationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBucketLocationCommandOutput) => void + ): void; + getBucketLogging( + args: GetBucketLoggingCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketLogging( + args: GetBucketLoggingCommandInput, + cb: (err: any, data?: GetBucketLoggingCommandOutput) => void + ): void; + getBucketLogging( + args: GetBucketLoggingCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBucketLoggingCommandOutput) => void + ): void; + getBucketMetadataTableConfiguration( + args: GetBucketMetadataTableConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketMetadataTableConfiguration( + args: GetBucketMetadataTableConfigurationCommandInput, + cb: ( + err: any, + data?: GetBucketMetadataTableConfigurationCommandOutput + ) => void + ): void; + getBucketMetadataTableConfiguration( + args: GetBucketMetadataTableConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: GetBucketMetadataTableConfigurationCommandOutput + ) => void + ): void; + getBucketMetricsConfiguration( + args: GetBucketMetricsConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketMetricsConfiguration( + args: GetBucketMetricsConfigurationCommandInput, + cb: (err: any, data?: GetBucketMetricsConfigurationCommandOutput) => void + ): void; + getBucketMetricsConfiguration( + args: GetBucketMetricsConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBucketMetricsConfigurationCommandOutput) => void + ): void; + getBucketNotificationConfiguration( + args: GetBucketNotificationConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketNotificationConfiguration( + args: GetBucketNotificationConfigurationCommandInput, + cb: ( + err: any, + data?: GetBucketNotificationConfigurationCommandOutput + ) => void + ): void; + getBucketNotificationConfiguration( + args: GetBucketNotificationConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: GetBucketNotificationConfigurationCommandOutput + ) => void + ): void; + getBucketOwnershipControls( + args: GetBucketOwnershipControlsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketOwnershipControls( + args: GetBucketOwnershipControlsCommandInput, + cb: (err: any, data?: GetBucketOwnershipControlsCommandOutput) => void + ): void; + getBucketOwnershipControls( + args: GetBucketOwnershipControlsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBucketOwnershipControlsCommandOutput) => void + ): void; + getBucketPolicy( + args: GetBucketPolicyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketPolicy( + args: GetBucketPolicyCommandInput, + cb: (err: any, data?: GetBucketPolicyCommandOutput) => void + ): void; + getBucketPolicy( + args: GetBucketPolicyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBucketPolicyCommandOutput) => void + ): void; + getBucketPolicyStatus( + args: GetBucketPolicyStatusCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketPolicyStatus( + args: GetBucketPolicyStatusCommandInput, + cb: (err: any, data?: GetBucketPolicyStatusCommandOutput) => void + ): void; + getBucketPolicyStatus( + args: GetBucketPolicyStatusCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBucketPolicyStatusCommandOutput) => void + ): void; + getBucketReplication( + args: GetBucketReplicationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketReplication( + args: GetBucketReplicationCommandInput, + cb: (err: any, data?: GetBucketReplicationCommandOutput) => void + ): void; + getBucketReplication( + args: GetBucketReplicationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBucketReplicationCommandOutput) => void + ): void; + getBucketRequestPayment( + args: GetBucketRequestPaymentCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketRequestPayment( + args: GetBucketRequestPaymentCommandInput, + cb: (err: any, data?: GetBucketRequestPaymentCommandOutput) => void + ): void; + getBucketRequestPayment( + args: GetBucketRequestPaymentCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBucketRequestPaymentCommandOutput) => void + ): void; + getBucketTagging( + args: GetBucketTaggingCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketTagging( + args: GetBucketTaggingCommandInput, + cb: (err: any, data?: GetBucketTaggingCommandOutput) => void + ): void; + getBucketTagging( + args: GetBucketTaggingCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBucketTaggingCommandOutput) => void + ): void; + getBucketVersioning( + args: GetBucketVersioningCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketVersioning( + args: GetBucketVersioningCommandInput, + cb: (err: any, data?: GetBucketVersioningCommandOutput) => void + ): void; + getBucketVersioning( + args: GetBucketVersioningCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBucketVersioningCommandOutput) => void + ): void; + getBucketWebsite( + args: GetBucketWebsiteCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getBucketWebsite( + args: GetBucketWebsiteCommandInput, + cb: (err: any, data?: GetBucketWebsiteCommandOutput) => void + ): void; + getBucketWebsite( + args: GetBucketWebsiteCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBucketWebsiteCommandOutput) => void + ): void; + getObject( + args: GetObjectCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getObject( + args: GetObjectCommandInput, + cb: (err: any, data?: GetObjectCommandOutput) => void + ): void; + getObject( + args: GetObjectCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetObjectCommandOutput) => void + ): void; + getObjectAcl( + args: GetObjectAclCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getObjectAcl( + args: GetObjectAclCommandInput, + cb: (err: any, data?: GetObjectAclCommandOutput) => void + ): void; + getObjectAcl( + args: GetObjectAclCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetObjectAclCommandOutput) => void + ): void; + getObjectAttributes( + args: GetObjectAttributesCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getObjectAttributes( + args: GetObjectAttributesCommandInput, + cb: (err: any, data?: GetObjectAttributesCommandOutput) => void + ): void; + getObjectAttributes( + args: GetObjectAttributesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetObjectAttributesCommandOutput) => void + ): void; + getObjectLegalHold( + args: GetObjectLegalHoldCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getObjectLegalHold( + args: GetObjectLegalHoldCommandInput, + cb: (err: any, data?: GetObjectLegalHoldCommandOutput) => void + ): void; + getObjectLegalHold( + args: GetObjectLegalHoldCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetObjectLegalHoldCommandOutput) => void + ): void; + getObjectLockConfiguration( + args: GetObjectLockConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getObjectLockConfiguration( + args: GetObjectLockConfigurationCommandInput, + cb: (err: any, data?: GetObjectLockConfigurationCommandOutput) => void + ): void; + getObjectLockConfiguration( + args: GetObjectLockConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetObjectLockConfigurationCommandOutput) => void + ): void; + getObjectRetention( + args: GetObjectRetentionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getObjectRetention( + args: GetObjectRetentionCommandInput, + cb: (err: any, data?: GetObjectRetentionCommandOutput) => void + ): void; + getObjectRetention( + args: GetObjectRetentionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetObjectRetentionCommandOutput) => void + ): void; + getObjectTagging( + args: GetObjectTaggingCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getObjectTagging( + args: GetObjectTaggingCommandInput, + cb: (err: any, data?: GetObjectTaggingCommandOutput) => void + ): void; + getObjectTagging( + args: GetObjectTaggingCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetObjectTaggingCommandOutput) => void + ): void; + getObjectTorrent( + args: GetObjectTorrentCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getObjectTorrent( + args: GetObjectTorrentCommandInput, + cb: (err: any, data?: GetObjectTorrentCommandOutput) => void + ): void; + getObjectTorrent( + args: GetObjectTorrentCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetObjectTorrentCommandOutput) => void + ): void; + getPublicAccessBlock( + args: GetPublicAccessBlockCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getPublicAccessBlock( + args: GetPublicAccessBlockCommandInput, + cb: (err: any, data?: GetPublicAccessBlockCommandOutput) => void + ): void; + getPublicAccessBlock( + args: GetPublicAccessBlockCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetPublicAccessBlockCommandOutput) => void + ): void; + headBucket( + args: HeadBucketCommandInput, + options?: __HttpHandlerOptions + ): Promise; + headBucket( + args: HeadBucketCommandInput, + cb: (err: any, data?: HeadBucketCommandOutput) => void + ): void; + headBucket( + args: HeadBucketCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: HeadBucketCommandOutput) => void + ): void; + headObject( + args: HeadObjectCommandInput, + options?: __HttpHandlerOptions + ): Promise; + headObject( + args: HeadObjectCommandInput, + cb: (err: any, data?: HeadObjectCommandOutput) => void + ): void; + headObject( + args: HeadObjectCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: HeadObjectCommandOutput) => void + ): void; + listBucketAnalyticsConfigurations( + args: ListBucketAnalyticsConfigurationsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listBucketAnalyticsConfigurations( + args: ListBucketAnalyticsConfigurationsCommandInput, + cb: ( + err: any, + data?: ListBucketAnalyticsConfigurationsCommandOutput + ) => void + ): void; + listBucketAnalyticsConfigurations( + args: ListBucketAnalyticsConfigurationsCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: ListBucketAnalyticsConfigurationsCommandOutput + ) => void + ): void; + listBucketIntelligentTieringConfigurations( + args: ListBucketIntelligentTieringConfigurationsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listBucketIntelligentTieringConfigurations( + args: ListBucketIntelligentTieringConfigurationsCommandInput, + cb: ( + err: any, + data?: ListBucketIntelligentTieringConfigurationsCommandOutput + ) => void + ): void; + listBucketIntelligentTieringConfigurations( + args: ListBucketIntelligentTieringConfigurationsCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: ListBucketIntelligentTieringConfigurationsCommandOutput + ) => void + ): void; + listBucketInventoryConfigurations( + args: ListBucketInventoryConfigurationsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listBucketInventoryConfigurations( + args: ListBucketInventoryConfigurationsCommandInput, + cb: ( + err: any, + data?: ListBucketInventoryConfigurationsCommandOutput + ) => void + ): void; + listBucketInventoryConfigurations( + args: ListBucketInventoryConfigurationsCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: ListBucketInventoryConfigurationsCommandOutput + ) => void + ): void; + listBucketMetricsConfigurations( + args: ListBucketMetricsConfigurationsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listBucketMetricsConfigurations( + args: ListBucketMetricsConfigurationsCommandInput, + cb: (err: any, data?: ListBucketMetricsConfigurationsCommandOutput) => void + ): void; + listBucketMetricsConfigurations( + args: ListBucketMetricsConfigurationsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListBucketMetricsConfigurationsCommandOutput) => void + ): void; + listBuckets(): Promise; + listBuckets( + args: ListBucketsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listBuckets( + args: ListBucketsCommandInput, + cb: (err: any, data?: ListBucketsCommandOutput) => void + ): void; + listBuckets( + args: ListBucketsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListBucketsCommandOutput) => void + ): void; + listDirectoryBuckets(): Promise; + listDirectoryBuckets( + args: ListDirectoryBucketsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listDirectoryBuckets( + args: ListDirectoryBucketsCommandInput, + cb: (err: any, data?: ListDirectoryBucketsCommandOutput) => void + ): void; + listDirectoryBuckets( + args: ListDirectoryBucketsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListDirectoryBucketsCommandOutput) => void + ): void; + listMultipartUploads( + args: ListMultipartUploadsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listMultipartUploads( + args: ListMultipartUploadsCommandInput, + cb: (err: any, data?: ListMultipartUploadsCommandOutput) => void + ): void; + listMultipartUploads( + args: ListMultipartUploadsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListMultipartUploadsCommandOutput) => void + ): void; + listObjects( + args: ListObjectsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listObjects( + args: ListObjectsCommandInput, + cb: (err: any, data?: ListObjectsCommandOutput) => void + ): void; + listObjects( + args: ListObjectsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListObjectsCommandOutput) => void + ): void; + listObjectsV2( + args: ListObjectsV2CommandInput, + options?: __HttpHandlerOptions + ): Promise; + listObjectsV2( + args: ListObjectsV2CommandInput, + cb: (err: any, data?: ListObjectsV2CommandOutput) => void + ): void; + listObjectsV2( + args: ListObjectsV2CommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListObjectsV2CommandOutput) => void + ): void; + listObjectVersions( + args: ListObjectVersionsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listObjectVersions( + args: ListObjectVersionsCommandInput, + cb: (err: any, data?: ListObjectVersionsCommandOutput) => void + ): void; + listObjectVersions( + args: ListObjectVersionsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListObjectVersionsCommandOutput) => void + ): void; + listParts( + args: ListPartsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listParts( + args: ListPartsCommandInput, + cb: (err: any, data?: ListPartsCommandOutput) => void + ): void; + listParts( + args: ListPartsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListPartsCommandOutput) => void + ): void; + putBucketAccelerateConfiguration( + args: PutBucketAccelerateConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putBucketAccelerateConfiguration( + args: PutBucketAccelerateConfigurationCommandInput, + cb: (err: any, data?: PutBucketAccelerateConfigurationCommandOutput) => void + ): void; + putBucketAccelerateConfiguration( + args: PutBucketAccelerateConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutBucketAccelerateConfigurationCommandOutput) => void + ): void; + putBucketAcl( + args: PutBucketAclCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putBucketAcl( + args: PutBucketAclCommandInput, + cb: (err: any, data?: PutBucketAclCommandOutput) => void + ): void; + putBucketAcl( + args: PutBucketAclCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutBucketAclCommandOutput) => void + ): void; + putBucketAnalyticsConfiguration( + args: PutBucketAnalyticsConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putBucketAnalyticsConfiguration( + args: PutBucketAnalyticsConfigurationCommandInput, + cb: (err: any, data?: PutBucketAnalyticsConfigurationCommandOutput) => void + ): void; + putBucketAnalyticsConfiguration( + args: PutBucketAnalyticsConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutBucketAnalyticsConfigurationCommandOutput) => void + ): void; + putBucketCors( + args: PutBucketCorsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putBucketCors( + args: PutBucketCorsCommandInput, + cb: (err: any, data?: PutBucketCorsCommandOutput) => void + ): void; + putBucketCors( + args: PutBucketCorsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutBucketCorsCommandOutput) => void + ): void; + putBucketEncryption( + args: PutBucketEncryptionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putBucketEncryption( + args: PutBucketEncryptionCommandInput, + cb: (err: any, data?: PutBucketEncryptionCommandOutput) => void + ): void; + putBucketEncryption( + args: PutBucketEncryptionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutBucketEncryptionCommandOutput) => void + ): void; + putBucketIntelligentTieringConfiguration( + args: PutBucketIntelligentTieringConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putBucketIntelligentTieringConfiguration( + args: PutBucketIntelligentTieringConfigurationCommandInput, + cb: ( + err: any, + data?: PutBucketIntelligentTieringConfigurationCommandOutput + ) => void + ): void; + putBucketIntelligentTieringConfiguration( + args: PutBucketIntelligentTieringConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: PutBucketIntelligentTieringConfigurationCommandOutput + ) => void + ): void; + putBucketInventoryConfiguration( + args: PutBucketInventoryConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putBucketInventoryConfiguration( + args: PutBucketInventoryConfigurationCommandInput, + cb: (err: any, data?: PutBucketInventoryConfigurationCommandOutput) => void + ): void; + putBucketInventoryConfiguration( + args: PutBucketInventoryConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutBucketInventoryConfigurationCommandOutput) => void + ): void; + putBucketLifecycleConfiguration( + args: PutBucketLifecycleConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putBucketLifecycleConfiguration( + args: PutBucketLifecycleConfigurationCommandInput, + cb: (err: any, data?: PutBucketLifecycleConfigurationCommandOutput) => void + ): void; + putBucketLifecycleConfiguration( + args: PutBucketLifecycleConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutBucketLifecycleConfigurationCommandOutput) => void + ): void; + putBucketLogging( + args: PutBucketLoggingCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putBucketLogging( + args: PutBucketLoggingCommandInput, + cb: (err: any, data?: PutBucketLoggingCommandOutput) => void + ): void; + putBucketLogging( + args: PutBucketLoggingCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutBucketLoggingCommandOutput) => void + ): void; + putBucketMetricsConfiguration( + args: PutBucketMetricsConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putBucketMetricsConfiguration( + args: PutBucketMetricsConfigurationCommandInput, + cb: (err: any, data?: PutBucketMetricsConfigurationCommandOutput) => void + ): void; + putBucketMetricsConfiguration( + args: PutBucketMetricsConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutBucketMetricsConfigurationCommandOutput) => void + ): void; + putBucketNotificationConfiguration( + args: PutBucketNotificationConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putBucketNotificationConfiguration( + args: PutBucketNotificationConfigurationCommandInput, + cb: ( + err: any, + data?: PutBucketNotificationConfigurationCommandOutput + ) => void + ): void; + putBucketNotificationConfiguration( + args: PutBucketNotificationConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: PutBucketNotificationConfigurationCommandOutput + ) => void + ): void; + putBucketOwnershipControls( + args: PutBucketOwnershipControlsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putBucketOwnershipControls( + args: PutBucketOwnershipControlsCommandInput, + cb: (err: any, data?: PutBucketOwnershipControlsCommandOutput) => void + ): void; + putBucketOwnershipControls( + args: PutBucketOwnershipControlsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutBucketOwnershipControlsCommandOutput) => void + ): void; + putBucketPolicy( + args: PutBucketPolicyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putBucketPolicy( + args: PutBucketPolicyCommandInput, + cb: (err: any, data?: PutBucketPolicyCommandOutput) => void + ): void; + putBucketPolicy( + args: PutBucketPolicyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutBucketPolicyCommandOutput) => void + ): void; + putBucketReplication( + args: PutBucketReplicationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putBucketReplication( + args: PutBucketReplicationCommandInput, + cb: (err: any, data?: PutBucketReplicationCommandOutput) => void + ): void; + putBucketReplication( + args: PutBucketReplicationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutBucketReplicationCommandOutput) => void + ): void; + putBucketRequestPayment( + args: PutBucketRequestPaymentCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putBucketRequestPayment( + args: PutBucketRequestPaymentCommandInput, + cb: (err: any, data?: PutBucketRequestPaymentCommandOutput) => void + ): void; + putBucketRequestPayment( + args: PutBucketRequestPaymentCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutBucketRequestPaymentCommandOutput) => void + ): void; + putBucketTagging( + args: PutBucketTaggingCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putBucketTagging( + args: PutBucketTaggingCommandInput, + cb: (err: any, data?: PutBucketTaggingCommandOutput) => void + ): void; + putBucketTagging( + args: PutBucketTaggingCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutBucketTaggingCommandOutput) => void + ): void; + putBucketVersioning( + args: PutBucketVersioningCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putBucketVersioning( + args: PutBucketVersioningCommandInput, + cb: (err: any, data?: PutBucketVersioningCommandOutput) => void + ): void; + putBucketVersioning( + args: PutBucketVersioningCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutBucketVersioningCommandOutput) => void + ): void; + putBucketWebsite( + args: PutBucketWebsiteCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putBucketWebsite( + args: PutBucketWebsiteCommandInput, + cb: (err: any, data?: PutBucketWebsiteCommandOutput) => void + ): void; + putBucketWebsite( + args: PutBucketWebsiteCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutBucketWebsiteCommandOutput) => void + ): void; + putObject( + args: PutObjectCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putObject( + args: PutObjectCommandInput, + cb: (err: any, data?: PutObjectCommandOutput) => void + ): void; + putObject( + args: PutObjectCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutObjectCommandOutput) => void + ): void; + putObjectAcl( + args: PutObjectAclCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putObjectAcl( + args: PutObjectAclCommandInput, + cb: (err: any, data?: PutObjectAclCommandOutput) => void + ): void; + putObjectAcl( + args: PutObjectAclCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutObjectAclCommandOutput) => void + ): void; + putObjectLegalHold( + args: PutObjectLegalHoldCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putObjectLegalHold( + args: PutObjectLegalHoldCommandInput, + cb: (err: any, data?: PutObjectLegalHoldCommandOutput) => void + ): void; + putObjectLegalHold( + args: PutObjectLegalHoldCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutObjectLegalHoldCommandOutput) => void + ): void; + putObjectLockConfiguration( + args: PutObjectLockConfigurationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putObjectLockConfiguration( + args: PutObjectLockConfigurationCommandInput, + cb: (err: any, data?: PutObjectLockConfigurationCommandOutput) => void + ): void; + putObjectLockConfiguration( + args: PutObjectLockConfigurationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutObjectLockConfigurationCommandOutput) => void + ): void; + putObjectRetention( + args: PutObjectRetentionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putObjectRetention( + args: PutObjectRetentionCommandInput, + cb: (err: any, data?: PutObjectRetentionCommandOutput) => void + ): void; + putObjectRetention( + args: PutObjectRetentionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutObjectRetentionCommandOutput) => void + ): void; + putObjectTagging( + args: PutObjectTaggingCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putObjectTagging( + args: PutObjectTaggingCommandInput, + cb: (err: any, data?: PutObjectTaggingCommandOutput) => void + ): void; + putObjectTagging( + args: PutObjectTaggingCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutObjectTaggingCommandOutput) => void + ): void; + putPublicAccessBlock( + args: PutPublicAccessBlockCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putPublicAccessBlock( + args: PutPublicAccessBlockCommandInput, + cb: (err: any, data?: PutPublicAccessBlockCommandOutput) => void + ): void; + putPublicAccessBlock( + args: PutPublicAccessBlockCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutPublicAccessBlockCommandOutput) => void + ): void; + restoreObject( + args: RestoreObjectCommandInput, + options?: __HttpHandlerOptions + ): Promise; + restoreObject( + args: RestoreObjectCommandInput, + cb: (err: any, data?: RestoreObjectCommandOutput) => void + ): void; + restoreObject( + args: RestoreObjectCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: RestoreObjectCommandOutput) => void + ): void; + selectObjectContent( + args: SelectObjectContentCommandInput, + options?: __HttpHandlerOptions + ): Promise; + selectObjectContent( + args: SelectObjectContentCommandInput, + cb: (err: any, data?: SelectObjectContentCommandOutput) => void + ): void; + selectObjectContent( + args: SelectObjectContentCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: SelectObjectContentCommandOutput) => void + ): void; + uploadPart( + args: UploadPartCommandInput, + options?: __HttpHandlerOptions + ): Promise; + uploadPart( + args: UploadPartCommandInput, + cb: (err: any, data?: UploadPartCommandOutput) => void + ): void; + uploadPart( + args: UploadPartCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UploadPartCommandOutput) => void + ): void; + uploadPartCopy( + args: UploadPartCopyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + uploadPartCopy( + args: UploadPartCopyCommandInput, + cb: (err: any, data?: UploadPartCopyCommandOutput) => void + ): void; + uploadPartCopy( + args: UploadPartCopyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UploadPartCopyCommandOutput) => void + ): void; + writeGetObjectResponse( + args: WriteGetObjectResponseCommandInput, + options?: __HttpHandlerOptions + ): Promise; + writeGetObjectResponse( + args: WriteGetObjectResponseCommandInput, + cb: (err: any, data?: WriteGetObjectResponseCommandOutput) => void + ): void; + writeGetObjectResponse( + args: WriteGetObjectResponseCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: WriteGetObjectResponseCommandOutput) => void + ): void; +} +export declare class S3 extends S3Client implements S3 {} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/S3Client.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/S3Client.d.ts new file mode 100644 index 00000000..5fa9d249 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/S3Client.d.ts @@ -0,0 +1,732 @@ +import { + FlexibleChecksumsInputConfig, + FlexibleChecksumsResolvedConfig, +} from "@aws-sdk/middleware-flexible-checksums"; +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, +} from "@aws-sdk/middleware-host-header"; +import { S3InputConfig, S3ResolvedConfig } from "@aws-sdk/middleware-sdk-s3"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, +} from "@aws-sdk/middleware-user-agent"; +import { GetAwsChunkedEncodingStream } from "@aws-sdk/types"; +import { + RegionInputConfig, + RegionResolvedConfig, +} from "@smithy/config-resolver"; +import { + EventStreamSerdeInputConfig, + EventStreamSerdeResolvedConfig, +} from "@smithy/eventstream-serde-config-resolver"; +import { + EndpointInputConfig, + EndpointResolvedConfig, +} from "@smithy/middleware-endpoint"; +import { + RetryInputConfig, + RetryResolvedConfig, +} from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { + Client as __Client, + DefaultsMode as __DefaultsMode, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@smithy/smithy-client"; +import { + AwsCredentialIdentityProvider, + BodyLengthCalculator as __BodyLengthCalculator, + CheckOptionalClientConfig as __CheckOptionalClientConfig, + ChecksumConstructor as __ChecksumConstructor, + Decoder as __Decoder, + Encoder as __Encoder, + EventStreamSerdeProvider as __EventStreamSerdeProvider, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + Provider, + SdkStreamMixinInjector as __SdkStreamMixinInjector, + StreamCollector as __StreamCollector, + StreamHasher as __StreamHasher, + UrlParser as __UrlParser, + UserAgent as __UserAgent, +} from "@smithy/types"; +import { Readable } from "stream"; +import { + HttpAuthSchemeInputConfig, + HttpAuthSchemeResolvedConfig, +} from "./auth/httpAuthSchemeProvider"; +import { + AbortMultipartUploadCommandInput, + AbortMultipartUploadCommandOutput, +} from "./commands/AbortMultipartUploadCommand"; +import { + CompleteMultipartUploadCommandInput, + CompleteMultipartUploadCommandOutput, +} from "./commands/CompleteMultipartUploadCommand"; +import { + CopyObjectCommandInput, + CopyObjectCommandOutput, +} from "./commands/CopyObjectCommand"; +import { + CreateBucketCommandInput, + CreateBucketCommandOutput, +} from "./commands/CreateBucketCommand"; +import { + CreateBucketMetadataTableConfigurationCommandInput, + CreateBucketMetadataTableConfigurationCommandOutput, +} from "./commands/CreateBucketMetadataTableConfigurationCommand"; +import { + CreateMultipartUploadCommandInput, + CreateMultipartUploadCommandOutput, +} from "./commands/CreateMultipartUploadCommand"; +import { + CreateSessionCommandInput, + CreateSessionCommandOutput, +} from "./commands/CreateSessionCommand"; +import { + DeleteBucketAnalyticsConfigurationCommandInput, + DeleteBucketAnalyticsConfigurationCommandOutput, +} from "./commands/DeleteBucketAnalyticsConfigurationCommand"; +import { + DeleteBucketCommandInput, + DeleteBucketCommandOutput, +} from "./commands/DeleteBucketCommand"; +import { + DeleteBucketCorsCommandInput, + DeleteBucketCorsCommandOutput, +} from "./commands/DeleteBucketCorsCommand"; +import { + DeleteBucketEncryptionCommandInput, + DeleteBucketEncryptionCommandOutput, +} from "./commands/DeleteBucketEncryptionCommand"; +import { + DeleteBucketIntelligentTieringConfigurationCommandInput, + DeleteBucketIntelligentTieringConfigurationCommandOutput, +} from "./commands/DeleteBucketIntelligentTieringConfigurationCommand"; +import { + DeleteBucketInventoryConfigurationCommandInput, + DeleteBucketInventoryConfigurationCommandOutput, +} from "./commands/DeleteBucketInventoryConfigurationCommand"; +import { + DeleteBucketLifecycleCommandInput, + DeleteBucketLifecycleCommandOutput, +} from "./commands/DeleteBucketLifecycleCommand"; +import { + DeleteBucketMetadataTableConfigurationCommandInput, + DeleteBucketMetadataTableConfigurationCommandOutput, +} from "./commands/DeleteBucketMetadataTableConfigurationCommand"; +import { + DeleteBucketMetricsConfigurationCommandInput, + DeleteBucketMetricsConfigurationCommandOutput, +} from "./commands/DeleteBucketMetricsConfigurationCommand"; +import { + DeleteBucketOwnershipControlsCommandInput, + DeleteBucketOwnershipControlsCommandOutput, +} from "./commands/DeleteBucketOwnershipControlsCommand"; +import { + DeleteBucketPolicyCommandInput, + DeleteBucketPolicyCommandOutput, +} from "./commands/DeleteBucketPolicyCommand"; +import { + DeleteBucketReplicationCommandInput, + DeleteBucketReplicationCommandOutput, +} from "./commands/DeleteBucketReplicationCommand"; +import { + DeleteBucketTaggingCommandInput, + DeleteBucketTaggingCommandOutput, +} from "./commands/DeleteBucketTaggingCommand"; +import { + DeleteBucketWebsiteCommandInput, + DeleteBucketWebsiteCommandOutput, +} from "./commands/DeleteBucketWebsiteCommand"; +import { + DeleteObjectCommandInput, + DeleteObjectCommandOutput, +} from "./commands/DeleteObjectCommand"; +import { + DeleteObjectsCommandInput, + DeleteObjectsCommandOutput, +} from "./commands/DeleteObjectsCommand"; +import { + DeleteObjectTaggingCommandInput, + DeleteObjectTaggingCommandOutput, +} from "./commands/DeleteObjectTaggingCommand"; +import { + DeletePublicAccessBlockCommandInput, + DeletePublicAccessBlockCommandOutput, +} from "./commands/DeletePublicAccessBlockCommand"; +import { + GetBucketAccelerateConfigurationCommandInput, + GetBucketAccelerateConfigurationCommandOutput, +} from "./commands/GetBucketAccelerateConfigurationCommand"; +import { + GetBucketAclCommandInput, + GetBucketAclCommandOutput, +} from "./commands/GetBucketAclCommand"; +import { + GetBucketAnalyticsConfigurationCommandInput, + GetBucketAnalyticsConfigurationCommandOutput, +} from "./commands/GetBucketAnalyticsConfigurationCommand"; +import { + GetBucketCorsCommandInput, + GetBucketCorsCommandOutput, +} from "./commands/GetBucketCorsCommand"; +import { + GetBucketEncryptionCommandInput, + GetBucketEncryptionCommandOutput, +} from "./commands/GetBucketEncryptionCommand"; +import { + GetBucketIntelligentTieringConfigurationCommandInput, + GetBucketIntelligentTieringConfigurationCommandOutput, +} from "./commands/GetBucketIntelligentTieringConfigurationCommand"; +import { + GetBucketInventoryConfigurationCommandInput, + GetBucketInventoryConfigurationCommandOutput, +} from "./commands/GetBucketInventoryConfigurationCommand"; +import { + GetBucketLifecycleConfigurationCommandInput, + GetBucketLifecycleConfigurationCommandOutput, +} from "./commands/GetBucketLifecycleConfigurationCommand"; +import { + GetBucketLocationCommandInput, + GetBucketLocationCommandOutput, +} from "./commands/GetBucketLocationCommand"; +import { + GetBucketLoggingCommandInput, + GetBucketLoggingCommandOutput, +} from "./commands/GetBucketLoggingCommand"; +import { + GetBucketMetadataTableConfigurationCommandInput, + GetBucketMetadataTableConfigurationCommandOutput, +} from "./commands/GetBucketMetadataTableConfigurationCommand"; +import { + GetBucketMetricsConfigurationCommandInput, + GetBucketMetricsConfigurationCommandOutput, +} from "./commands/GetBucketMetricsConfigurationCommand"; +import { + GetBucketNotificationConfigurationCommandInput, + GetBucketNotificationConfigurationCommandOutput, +} from "./commands/GetBucketNotificationConfigurationCommand"; +import { + GetBucketOwnershipControlsCommandInput, + GetBucketOwnershipControlsCommandOutput, +} from "./commands/GetBucketOwnershipControlsCommand"; +import { + GetBucketPolicyCommandInput, + GetBucketPolicyCommandOutput, +} from "./commands/GetBucketPolicyCommand"; +import { + GetBucketPolicyStatusCommandInput, + GetBucketPolicyStatusCommandOutput, +} from "./commands/GetBucketPolicyStatusCommand"; +import { + GetBucketReplicationCommandInput, + GetBucketReplicationCommandOutput, +} from "./commands/GetBucketReplicationCommand"; +import { + GetBucketRequestPaymentCommandInput, + GetBucketRequestPaymentCommandOutput, +} from "./commands/GetBucketRequestPaymentCommand"; +import { + GetBucketTaggingCommandInput, + GetBucketTaggingCommandOutput, +} from "./commands/GetBucketTaggingCommand"; +import { + GetBucketVersioningCommandInput, + GetBucketVersioningCommandOutput, +} from "./commands/GetBucketVersioningCommand"; +import { + GetBucketWebsiteCommandInput, + GetBucketWebsiteCommandOutput, +} from "./commands/GetBucketWebsiteCommand"; +import { + GetObjectAclCommandInput, + GetObjectAclCommandOutput, +} from "./commands/GetObjectAclCommand"; +import { + GetObjectAttributesCommandInput, + GetObjectAttributesCommandOutput, +} from "./commands/GetObjectAttributesCommand"; +import { + GetObjectCommandInput, + GetObjectCommandOutput, +} from "./commands/GetObjectCommand"; +import { + GetObjectLegalHoldCommandInput, + GetObjectLegalHoldCommandOutput, +} from "./commands/GetObjectLegalHoldCommand"; +import { + GetObjectLockConfigurationCommandInput, + GetObjectLockConfigurationCommandOutput, +} from "./commands/GetObjectLockConfigurationCommand"; +import { + GetObjectRetentionCommandInput, + GetObjectRetentionCommandOutput, +} from "./commands/GetObjectRetentionCommand"; +import { + GetObjectTaggingCommandInput, + GetObjectTaggingCommandOutput, +} from "./commands/GetObjectTaggingCommand"; +import { + GetObjectTorrentCommandInput, + GetObjectTorrentCommandOutput, +} from "./commands/GetObjectTorrentCommand"; +import { + GetPublicAccessBlockCommandInput, + GetPublicAccessBlockCommandOutput, +} from "./commands/GetPublicAccessBlockCommand"; +import { + HeadBucketCommandInput, + HeadBucketCommandOutput, +} from "./commands/HeadBucketCommand"; +import { + HeadObjectCommandInput, + HeadObjectCommandOutput, +} from "./commands/HeadObjectCommand"; +import { + ListBucketAnalyticsConfigurationsCommandInput, + ListBucketAnalyticsConfigurationsCommandOutput, +} from "./commands/ListBucketAnalyticsConfigurationsCommand"; +import { + ListBucketIntelligentTieringConfigurationsCommandInput, + ListBucketIntelligentTieringConfigurationsCommandOutput, +} from "./commands/ListBucketIntelligentTieringConfigurationsCommand"; +import { + ListBucketInventoryConfigurationsCommandInput, + ListBucketInventoryConfigurationsCommandOutput, +} from "./commands/ListBucketInventoryConfigurationsCommand"; +import { + ListBucketMetricsConfigurationsCommandInput, + ListBucketMetricsConfigurationsCommandOutput, +} from "./commands/ListBucketMetricsConfigurationsCommand"; +import { + ListBucketsCommandInput, + ListBucketsCommandOutput, +} from "./commands/ListBucketsCommand"; +import { + ListDirectoryBucketsCommandInput, + ListDirectoryBucketsCommandOutput, +} from "./commands/ListDirectoryBucketsCommand"; +import { + ListMultipartUploadsCommandInput, + ListMultipartUploadsCommandOutput, +} from "./commands/ListMultipartUploadsCommand"; +import { + ListObjectsCommandInput, + ListObjectsCommandOutput, +} from "./commands/ListObjectsCommand"; +import { + ListObjectsV2CommandInput, + ListObjectsV2CommandOutput, +} from "./commands/ListObjectsV2Command"; +import { + ListObjectVersionsCommandInput, + ListObjectVersionsCommandOutput, +} from "./commands/ListObjectVersionsCommand"; +import { + ListPartsCommandInput, + ListPartsCommandOutput, +} from "./commands/ListPartsCommand"; +import { + PutBucketAccelerateConfigurationCommandInput, + PutBucketAccelerateConfigurationCommandOutput, +} from "./commands/PutBucketAccelerateConfigurationCommand"; +import { + PutBucketAclCommandInput, + PutBucketAclCommandOutput, +} from "./commands/PutBucketAclCommand"; +import { + PutBucketAnalyticsConfigurationCommandInput, + PutBucketAnalyticsConfigurationCommandOutput, +} from "./commands/PutBucketAnalyticsConfigurationCommand"; +import { + PutBucketCorsCommandInput, + PutBucketCorsCommandOutput, +} from "./commands/PutBucketCorsCommand"; +import { + PutBucketEncryptionCommandInput, + PutBucketEncryptionCommandOutput, +} from "./commands/PutBucketEncryptionCommand"; +import { + PutBucketIntelligentTieringConfigurationCommandInput, + PutBucketIntelligentTieringConfigurationCommandOutput, +} from "./commands/PutBucketIntelligentTieringConfigurationCommand"; +import { + PutBucketInventoryConfigurationCommandInput, + PutBucketInventoryConfigurationCommandOutput, +} from "./commands/PutBucketInventoryConfigurationCommand"; +import { + PutBucketLifecycleConfigurationCommandInput, + PutBucketLifecycleConfigurationCommandOutput, +} from "./commands/PutBucketLifecycleConfigurationCommand"; +import { + PutBucketLoggingCommandInput, + PutBucketLoggingCommandOutput, +} from "./commands/PutBucketLoggingCommand"; +import { + PutBucketMetricsConfigurationCommandInput, + PutBucketMetricsConfigurationCommandOutput, +} from "./commands/PutBucketMetricsConfigurationCommand"; +import { + PutBucketNotificationConfigurationCommandInput, + PutBucketNotificationConfigurationCommandOutput, +} from "./commands/PutBucketNotificationConfigurationCommand"; +import { + PutBucketOwnershipControlsCommandInput, + PutBucketOwnershipControlsCommandOutput, +} from "./commands/PutBucketOwnershipControlsCommand"; +import { + PutBucketPolicyCommandInput, + PutBucketPolicyCommandOutput, +} from "./commands/PutBucketPolicyCommand"; +import { + PutBucketReplicationCommandInput, + PutBucketReplicationCommandOutput, +} from "./commands/PutBucketReplicationCommand"; +import { + PutBucketRequestPaymentCommandInput, + PutBucketRequestPaymentCommandOutput, +} from "./commands/PutBucketRequestPaymentCommand"; +import { + PutBucketTaggingCommandInput, + PutBucketTaggingCommandOutput, +} from "./commands/PutBucketTaggingCommand"; +import { + PutBucketVersioningCommandInput, + PutBucketVersioningCommandOutput, +} from "./commands/PutBucketVersioningCommand"; +import { + PutBucketWebsiteCommandInput, + PutBucketWebsiteCommandOutput, +} from "./commands/PutBucketWebsiteCommand"; +import { + PutObjectAclCommandInput, + PutObjectAclCommandOutput, +} from "./commands/PutObjectAclCommand"; +import { + PutObjectCommandInput, + PutObjectCommandOutput, +} from "./commands/PutObjectCommand"; +import { + PutObjectLegalHoldCommandInput, + PutObjectLegalHoldCommandOutput, +} from "./commands/PutObjectLegalHoldCommand"; +import { + PutObjectLockConfigurationCommandInput, + PutObjectLockConfigurationCommandOutput, +} from "./commands/PutObjectLockConfigurationCommand"; +import { + PutObjectRetentionCommandInput, + PutObjectRetentionCommandOutput, +} from "./commands/PutObjectRetentionCommand"; +import { + PutObjectTaggingCommandInput, + PutObjectTaggingCommandOutput, +} from "./commands/PutObjectTaggingCommand"; +import { + PutPublicAccessBlockCommandInput, + PutPublicAccessBlockCommandOutput, +} from "./commands/PutPublicAccessBlockCommand"; +import { + RestoreObjectCommandInput, + RestoreObjectCommandOutput, +} from "./commands/RestoreObjectCommand"; +import { + SelectObjectContentCommandInput, + SelectObjectContentCommandOutput, +} from "./commands/SelectObjectContentCommand"; +import { + UploadPartCommandInput, + UploadPartCommandOutput, +} from "./commands/UploadPartCommand"; +import { + UploadPartCopyCommandInput, + UploadPartCopyCommandOutput, +} from "./commands/UploadPartCopyCommand"; +import { + WriteGetObjectResponseCommandInput, + WriteGetObjectResponseCommandOutput, +} from "./commands/WriteGetObjectResponseCommand"; +import { + ClientInputEndpointParameters, + ClientResolvedEndpointParameters, + EndpointParameters, +} from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +export type ServiceInputTypes = + | AbortMultipartUploadCommandInput + | CompleteMultipartUploadCommandInput + | CopyObjectCommandInput + | CreateBucketCommandInput + | CreateBucketMetadataTableConfigurationCommandInput + | CreateMultipartUploadCommandInput + | CreateSessionCommandInput + | DeleteBucketAnalyticsConfigurationCommandInput + | DeleteBucketCommandInput + | DeleteBucketCorsCommandInput + | DeleteBucketEncryptionCommandInput + | DeleteBucketIntelligentTieringConfigurationCommandInput + | DeleteBucketInventoryConfigurationCommandInput + | DeleteBucketLifecycleCommandInput + | DeleteBucketMetadataTableConfigurationCommandInput + | DeleteBucketMetricsConfigurationCommandInput + | DeleteBucketOwnershipControlsCommandInput + | DeleteBucketPolicyCommandInput + | DeleteBucketReplicationCommandInput + | DeleteBucketTaggingCommandInput + | DeleteBucketWebsiteCommandInput + | DeleteObjectCommandInput + | DeleteObjectTaggingCommandInput + | DeleteObjectsCommandInput + | DeletePublicAccessBlockCommandInput + | GetBucketAccelerateConfigurationCommandInput + | GetBucketAclCommandInput + | GetBucketAnalyticsConfigurationCommandInput + | GetBucketCorsCommandInput + | GetBucketEncryptionCommandInput + | GetBucketIntelligentTieringConfigurationCommandInput + | GetBucketInventoryConfigurationCommandInput + | GetBucketLifecycleConfigurationCommandInput + | GetBucketLocationCommandInput + | GetBucketLoggingCommandInput + | GetBucketMetadataTableConfigurationCommandInput + | GetBucketMetricsConfigurationCommandInput + | GetBucketNotificationConfigurationCommandInput + | GetBucketOwnershipControlsCommandInput + | GetBucketPolicyCommandInput + | GetBucketPolicyStatusCommandInput + | GetBucketReplicationCommandInput + | GetBucketRequestPaymentCommandInput + | GetBucketTaggingCommandInput + | GetBucketVersioningCommandInput + | GetBucketWebsiteCommandInput + | GetObjectAclCommandInput + | GetObjectAttributesCommandInput + | GetObjectCommandInput + | GetObjectLegalHoldCommandInput + | GetObjectLockConfigurationCommandInput + | GetObjectRetentionCommandInput + | GetObjectTaggingCommandInput + | GetObjectTorrentCommandInput + | GetPublicAccessBlockCommandInput + | HeadBucketCommandInput + | HeadObjectCommandInput + | ListBucketAnalyticsConfigurationsCommandInput + | ListBucketIntelligentTieringConfigurationsCommandInput + | ListBucketInventoryConfigurationsCommandInput + | ListBucketMetricsConfigurationsCommandInput + | ListBucketsCommandInput + | ListDirectoryBucketsCommandInput + | ListMultipartUploadsCommandInput + | ListObjectVersionsCommandInput + | ListObjectsCommandInput + | ListObjectsV2CommandInput + | ListPartsCommandInput + | PutBucketAccelerateConfigurationCommandInput + | PutBucketAclCommandInput + | PutBucketAnalyticsConfigurationCommandInput + | PutBucketCorsCommandInput + | PutBucketEncryptionCommandInput + | PutBucketIntelligentTieringConfigurationCommandInput + | PutBucketInventoryConfigurationCommandInput + | PutBucketLifecycleConfigurationCommandInput + | PutBucketLoggingCommandInput + | PutBucketMetricsConfigurationCommandInput + | PutBucketNotificationConfigurationCommandInput + | PutBucketOwnershipControlsCommandInput + | PutBucketPolicyCommandInput + | PutBucketReplicationCommandInput + | PutBucketRequestPaymentCommandInput + | PutBucketTaggingCommandInput + | PutBucketVersioningCommandInput + | PutBucketWebsiteCommandInput + | PutObjectAclCommandInput + | PutObjectCommandInput + | PutObjectLegalHoldCommandInput + | PutObjectLockConfigurationCommandInput + | PutObjectRetentionCommandInput + | PutObjectTaggingCommandInput + | PutPublicAccessBlockCommandInput + | RestoreObjectCommandInput + | SelectObjectContentCommandInput + | UploadPartCommandInput + | UploadPartCopyCommandInput + | WriteGetObjectResponseCommandInput; +export type ServiceOutputTypes = + | AbortMultipartUploadCommandOutput + | CompleteMultipartUploadCommandOutput + | CopyObjectCommandOutput + | CreateBucketCommandOutput + | CreateBucketMetadataTableConfigurationCommandOutput + | CreateMultipartUploadCommandOutput + | CreateSessionCommandOutput + | DeleteBucketAnalyticsConfigurationCommandOutput + | DeleteBucketCommandOutput + | DeleteBucketCorsCommandOutput + | DeleteBucketEncryptionCommandOutput + | DeleteBucketIntelligentTieringConfigurationCommandOutput + | DeleteBucketInventoryConfigurationCommandOutput + | DeleteBucketLifecycleCommandOutput + | DeleteBucketMetadataTableConfigurationCommandOutput + | DeleteBucketMetricsConfigurationCommandOutput + | DeleteBucketOwnershipControlsCommandOutput + | DeleteBucketPolicyCommandOutput + | DeleteBucketReplicationCommandOutput + | DeleteBucketTaggingCommandOutput + | DeleteBucketWebsiteCommandOutput + | DeleteObjectCommandOutput + | DeleteObjectTaggingCommandOutput + | DeleteObjectsCommandOutput + | DeletePublicAccessBlockCommandOutput + | GetBucketAccelerateConfigurationCommandOutput + | GetBucketAclCommandOutput + | GetBucketAnalyticsConfigurationCommandOutput + | GetBucketCorsCommandOutput + | GetBucketEncryptionCommandOutput + | GetBucketIntelligentTieringConfigurationCommandOutput + | GetBucketInventoryConfigurationCommandOutput + | GetBucketLifecycleConfigurationCommandOutput + | GetBucketLocationCommandOutput + | GetBucketLoggingCommandOutput + | GetBucketMetadataTableConfigurationCommandOutput + | GetBucketMetricsConfigurationCommandOutput + | GetBucketNotificationConfigurationCommandOutput + | GetBucketOwnershipControlsCommandOutput + | GetBucketPolicyCommandOutput + | GetBucketPolicyStatusCommandOutput + | GetBucketReplicationCommandOutput + | GetBucketRequestPaymentCommandOutput + | GetBucketTaggingCommandOutput + | GetBucketVersioningCommandOutput + | GetBucketWebsiteCommandOutput + | GetObjectAclCommandOutput + | GetObjectAttributesCommandOutput + | GetObjectCommandOutput + | GetObjectLegalHoldCommandOutput + | GetObjectLockConfigurationCommandOutput + | GetObjectRetentionCommandOutput + | GetObjectTaggingCommandOutput + | GetObjectTorrentCommandOutput + | GetPublicAccessBlockCommandOutput + | HeadBucketCommandOutput + | HeadObjectCommandOutput + | ListBucketAnalyticsConfigurationsCommandOutput + | ListBucketIntelligentTieringConfigurationsCommandOutput + | ListBucketInventoryConfigurationsCommandOutput + | ListBucketMetricsConfigurationsCommandOutput + | ListBucketsCommandOutput + | ListDirectoryBucketsCommandOutput + | ListMultipartUploadsCommandOutput + | ListObjectVersionsCommandOutput + | ListObjectsCommandOutput + | ListObjectsV2CommandOutput + | ListPartsCommandOutput + | PutBucketAccelerateConfigurationCommandOutput + | PutBucketAclCommandOutput + | PutBucketAnalyticsConfigurationCommandOutput + | PutBucketCorsCommandOutput + | PutBucketEncryptionCommandOutput + | PutBucketIntelligentTieringConfigurationCommandOutput + | PutBucketInventoryConfigurationCommandOutput + | PutBucketLifecycleConfigurationCommandOutput + | PutBucketLoggingCommandOutput + | PutBucketMetricsConfigurationCommandOutput + | PutBucketNotificationConfigurationCommandOutput + | PutBucketOwnershipControlsCommandOutput + | PutBucketPolicyCommandOutput + | PutBucketReplicationCommandOutput + | PutBucketRequestPaymentCommandOutput + | PutBucketTaggingCommandOutput + | PutBucketVersioningCommandOutput + | PutBucketWebsiteCommandOutput + | PutObjectAclCommandOutput + | PutObjectCommandOutput + | PutObjectLegalHoldCommandOutput + | PutObjectLockConfigurationCommandOutput + | PutObjectRetentionCommandOutput + | PutObjectTaggingCommandOutput + | PutPublicAccessBlockCommandOutput + | RestoreObjectCommandOutput + | SelectObjectContentCommandOutput + | UploadPartCommandOutput + | UploadPartCopyCommandOutput + | WriteGetObjectResponseCommandOutput; +export interface ClientDefaults + extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + requestHandler?: __HttpHandlerUserInput; + sha256?: __ChecksumConstructor | __HashConstructor; + urlParser?: __UrlParser; + bodyLengthChecker?: __BodyLengthCalculator; + streamCollector?: __StreamCollector; + base64Decoder?: __Decoder; + base64Encoder?: __Encoder; + utf8Decoder?: __Decoder; + utf8Encoder?: __Encoder; + runtime?: string; + disableHostPrefix?: boolean; + serviceId?: string; + useDualstackEndpoint?: boolean | __Provider; + useFipsEndpoint?: boolean | __Provider; + region?: string | __Provider; + profile?: string; + defaultUserAgentProvider?: Provider<__UserAgent>; + streamHasher?: __StreamHasher | __StreamHasher; + md5?: __ChecksumConstructor | __HashConstructor; + sha1?: __ChecksumConstructor | __HashConstructor; + getAwsChunkedEncodingStream?: GetAwsChunkedEncodingStream; + credentialDefaultProvider?: (input: any) => AwsCredentialIdentityProvider; + maxAttempts?: number | __Provider; + retryMode?: string | __Provider; + logger?: __Logger; + extensions?: RuntimeExtension[]; + eventStreamSerdeProvider?: __EventStreamSerdeProvider; + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; + signingEscapePath?: boolean; + useArnRegion?: boolean | Provider; + sdkStreamMixin?: __SdkStreamMixinInjector; +} +export type S3ClientConfigType = Partial< + __SmithyConfiguration<__HttpHandlerOptions> +> & + ClientDefaults & + UserAgentInputConfig & + FlexibleChecksumsInputConfig & + RetryInputConfig & + RegionInputConfig & + HostHeaderInputConfig & + EndpointInputConfig & + EventStreamSerdeInputConfig & + HttpAuthSchemeInputConfig & + S3InputConfig & + ClientInputEndpointParameters; +export interface S3ClientConfig extends S3ClientConfigType {} +export type S3ClientResolvedConfigType = + __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RuntimeExtensionsConfig & + UserAgentResolvedConfig & + FlexibleChecksumsResolvedConfig & + RetryResolvedConfig & + RegionResolvedConfig & + HostHeaderResolvedConfig & + EndpointResolvedConfig & + EventStreamSerdeResolvedConfig & + HttpAuthSchemeResolvedConfig & + S3ResolvedConfig & + ClientResolvedEndpointParameters; +export interface S3ClientResolvedConfig extends S3ClientResolvedConfigType {} +export declare class S3Client extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + S3ClientResolvedConfig +> { + readonly config: S3ClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + destroy(): void; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 00000000..9a004494 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,32 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + HttpAuthScheme, +} from "@smithy/types"; +import { S3HttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider( + httpAuthSchemeProvider: S3HttpAuthSchemeProvider + ): void; + httpAuthSchemeProvider(): S3HttpAuthSchemeProvider; + setCredentials( + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider + ): void; + credentials(): + | AwsCredentialIdentity + | AwsCredentialIdentityProvider + | undefined; +} +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: S3HttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +export declare const getHttpAuthExtensionConfiguration: ( + runtimeConfig: HttpAuthRuntimeConfig +) => HttpAuthExtensionConfiguration; +export declare const resolveHttpAuthRuntimeConfig: ( + config: HttpAuthExtensionConfiguration +) => HttpAuthRuntimeConfig; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 00000000..1052d4ec --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,58 @@ +import { + AwsSdkSigV4AAuthInputConfig, + AwsSdkSigV4AAuthResolvedConfig, + AwsSdkSigV4APreviouslyResolved, + AwsSdkSigV4AuthInputConfig, + AwsSdkSigV4AuthResolvedConfig, + AwsSdkSigV4PreviouslyResolved, +} from "@aws-sdk/core"; +import { + HandlerExecutionContext, + HttpAuthScheme, + HttpAuthSchemeParameters, + HttpAuthSchemeParametersProvider, + HttpAuthSchemeProvider, + Provider, +} from "@smithy/types"; +import { EndpointParameters } from "../endpoint/EndpointParameters"; +import { S3ClientResolvedConfig } from "../S3Client"; +interface _S3HttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +export interface S3HttpAuthSchemeParameters + extends _S3HttpAuthSchemeParameters, + EndpointParameters { + region?: string; +} +export interface S3HttpAuthSchemeParametersProvider + extends HttpAuthSchemeParametersProvider< + S3ClientResolvedConfig, + HandlerExecutionContext, + S3HttpAuthSchemeParameters, + object + > {} +export declare const defaultS3HttpAuthSchemeParametersProvider: S3HttpAuthSchemeParametersProvider; +export interface S3HttpAuthSchemeProvider + extends HttpAuthSchemeProvider {} +export declare const defaultS3HttpAuthSchemeProvider: S3HttpAuthSchemeProvider; +export interface HttpAuthSchemeInputConfig + extends AwsSdkSigV4AuthInputConfig, + AwsSdkSigV4AAuthInputConfig { + authSchemePreference?: string[] | Provider; + httpAuthSchemes?: HttpAuthScheme[]; + httpAuthSchemeProvider?: S3HttpAuthSchemeProvider; +} +export interface HttpAuthSchemeResolvedConfig + extends AwsSdkSigV4AuthResolvedConfig, + AwsSdkSigV4AAuthResolvedConfig { + readonly authSchemePreference: Provider; + readonly httpAuthSchemes: HttpAuthScheme[]; + readonly httpAuthSchemeProvider: S3HttpAuthSchemeProvider; +} +export declare const resolveHttpAuthSchemeConfig: ( + config: T & + HttpAuthSchemeInputConfig & + AwsSdkSigV4PreviouslyResolved & + AwsSdkSigV4APreviouslyResolved +) => T & HttpAuthSchemeResolvedConfig; +export {}; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/AbortMultipartUploadCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/AbortMultipartUploadCommand.d.ts new file mode 100644 index 00000000..3cfb7f28 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/AbortMultipartUploadCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + AbortMultipartUploadOutput, + AbortMultipartUploadRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface AbortMultipartUploadCommandInput + extends AbortMultipartUploadRequest {} +export interface AbortMultipartUploadCommandOutput + extends AbortMultipartUploadOutput, + __MetadataBearer {} +declare const AbortMultipartUploadCommand_base: { + new ( + input: AbortMultipartUploadCommandInput + ): import("@smithy/smithy-client").CommandImpl< + AbortMultipartUploadCommandInput, + AbortMultipartUploadCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: AbortMultipartUploadCommandInput + ): import("@smithy/smithy-client").CommandImpl< + AbortMultipartUploadCommandInput, + AbortMultipartUploadCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class AbortMultipartUploadCommand extends AbortMultipartUploadCommand_base { + protected static __types: { + api: { + input: AbortMultipartUploadRequest; + output: AbortMultipartUploadOutput; + }; + sdk: { + input: AbortMultipartUploadCommandInput; + output: AbortMultipartUploadCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/CompleteMultipartUploadCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/CompleteMultipartUploadCommand.d.ts new file mode 100644 index 00000000..81fcfafc --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/CompleteMultipartUploadCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + CompleteMultipartUploadOutput, + CompleteMultipartUploadRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface CompleteMultipartUploadCommandInput + extends CompleteMultipartUploadRequest {} +export interface CompleteMultipartUploadCommandOutput + extends CompleteMultipartUploadOutput, + __MetadataBearer {} +declare const CompleteMultipartUploadCommand_base: { + new ( + input: CompleteMultipartUploadCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CompleteMultipartUploadCommandInput, + CompleteMultipartUploadCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: CompleteMultipartUploadCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CompleteMultipartUploadCommandInput, + CompleteMultipartUploadCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class CompleteMultipartUploadCommand extends CompleteMultipartUploadCommand_base { + protected static __types: { + api: { + input: CompleteMultipartUploadRequest; + output: CompleteMultipartUploadOutput; + }; + sdk: { + input: CompleteMultipartUploadCommandInput; + output: CompleteMultipartUploadCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/CopyObjectCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/CopyObjectCommand.d.ts new file mode 100644 index 00000000..e240ec51 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/CopyObjectCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { CopyObjectOutput, CopyObjectRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface CopyObjectCommandInput extends CopyObjectRequest {} +export interface CopyObjectCommandOutput + extends CopyObjectOutput, + __MetadataBearer {} +declare const CopyObjectCommand_base: { + new ( + input: CopyObjectCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CopyObjectCommandInput, + CopyObjectCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: CopyObjectCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CopyObjectCommandInput, + CopyObjectCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class CopyObjectCommand extends CopyObjectCommand_base { + protected static __types: { + api: { + input: CopyObjectRequest; + output: CopyObjectOutput; + }; + sdk: { + input: CopyObjectCommandInput; + output: CopyObjectCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/CreateBucketCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/CreateBucketCommand.d.ts new file mode 100644 index 00000000..077b42df --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/CreateBucketCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { CreateBucketOutput, CreateBucketRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface CreateBucketCommandInput extends CreateBucketRequest {} +export interface CreateBucketCommandOutput + extends CreateBucketOutput, + __MetadataBearer {} +declare const CreateBucketCommand_base: { + new ( + input: CreateBucketCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateBucketCommandInput, + CreateBucketCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: CreateBucketCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateBucketCommandInput, + CreateBucketCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class CreateBucketCommand extends CreateBucketCommand_base { + protected static __types: { + api: { + input: CreateBucketRequest; + output: CreateBucketOutput; + }; + sdk: { + input: CreateBucketCommandInput; + output: CreateBucketCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/CreateBucketMetadataTableConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/CreateBucketMetadataTableConfigurationCommand.d.ts new file mode 100644 index 00000000..7c6bebad --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/CreateBucketMetadataTableConfigurationCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { CreateBucketMetadataTableConfigurationRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface CreateBucketMetadataTableConfigurationCommandInput + extends CreateBucketMetadataTableConfigurationRequest {} +export interface CreateBucketMetadataTableConfigurationCommandOutput + extends __MetadataBearer {} +declare const CreateBucketMetadataTableConfigurationCommand_base: { + new ( + input: CreateBucketMetadataTableConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateBucketMetadataTableConfigurationCommandInput, + CreateBucketMetadataTableConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: CreateBucketMetadataTableConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateBucketMetadataTableConfigurationCommandInput, + CreateBucketMetadataTableConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class CreateBucketMetadataTableConfigurationCommand extends CreateBucketMetadataTableConfigurationCommand_base { + protected static __types: { + api: { + input: CreateBucketMetadataTableConfigurationRequest; + output: {}; + }; + sdk: { + input: CreateBucketMetadataTableConfigurationCommandInput; + output: CreateBucketMetadataTableConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/CreateMultipartUploadCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/CreateMultipartUploadCommand.d.ts new file mode 100644 index 00000000..5cae25e4 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/CreateMultipartUploadCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + CreateMultipartUploadOutput, + CreateMultipartUploadRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface CreateMultipartUploadCommandInput + extends CreateMultipartUploadRequest {} +export interface CreateMultipartUploadCommandOutput + extends CreateMultipartUploadOutput, + __MetadataBearer {} +declare const CreateMultipartUploadCommand_base: { + new ( + input: CreateMultipartUploadCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateMultipartUploadCommandInput, + CreateMultipartUploadCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: CreateMultipartUploadCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateMultipartUploadCommandInput, + CreateMultipartUploadCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class CreateMultipartUploadCommand extends CreateMultipartUploadCommand_base { + protected static __types: { + api: { + input: CreateMultipartUploadRequest; + output: CreateMultipartUploadOutput; + }; + sdk: { + input: CreateMultipartUploadCommandInput; + output: CreateMultipartUploadCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/CreateSessionCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/CreateSessionCommand.d.ts new file mode 100644 index 00000000..d9e7aec8 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/CreateSessionCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { CreateSessionOutput, CreateSessionRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface CreateSessionCommandInput extends CreateSessionRequest {} +export interface CreateSessionCommandOutput + extends CreateSessionOutput, + __MetadataBearer {} +declare const CreateSessionCommand_base: { + new ( + input: CreateSessionCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateSessionCommandInput, + CreateSessionCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: CreateSessionCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateSessionCommandInput, + CreateSessionCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class CreateSessionCommand extends CreateSessionCommand_base { + protected static __types: { + api: { + input: CreateSessionRequest; + output: CreateSessionOutput; + }; + sdk: { + input: CreateSessionCommandInput; + output: CreateSessionCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketAnalyticsConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketAnalyticsConfigurationCommand.d.ts new file mode 100644 index 00000000..baa37e0f --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketAnalyticsConfigurationCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketAnalyticsConfigurationRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteBucketAnalyticsConfigurationCommandInput + extends DeleteBucketAnalyticsConfigurationRequest {} +export interface DeleteBucketAnalyticsConfigurationCommandOutput + extends __MetadataBearer {} +declare const DeleteBucketAnalyticsConfigurationCommand_base: { + new ( + input: DeleteBucketAnalyticsConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketAnalyticsConfigurationCommandInput, + DeleteBucketAnalyticsConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteBucketAnalyticsConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketAnalyticsConfigurationCommandInput, + DeleteBucketAnalyticsConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteBucketAnalyticsConfigurationCommand extends DeleteBucketAnalyticsConfigurationCommand_base { + protected static __types: { + api: { + input: DeleteBucketAnalyticsConfigurationRequest; + output: {}; + }; + sdk: { + input: DeleteBucketAnalyticsConfigurationCommandInput; + output: DeleteBucketAnalyticsConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketCommand.d.ts new file mode 100644 index 00000000..7cb2f99a --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketCommand.d.ts @@ -0,0 +1,45 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteBucketCommandInput extends DeleteBucketRequest {} +export interface DeleteBucketCommandOutput extends __MetadataBearer {} +declare const DeleteBucketCommand_base: { + new ( + input: DeleteBucketCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketCommandInput, + DeleteBucketCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteBucketCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketCommandInput, + DeleteBucketCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteBucketCommand extends DeleteBucketCommand_base { + protected static __types: { + api: { + input: DeleteBucketRequest; + output: {}; + }; + sdk: { + input: DeleteBucketCommandInput; + output: DeleteBucketCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketCorsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketCorsCommand.d.ts new file mode 100644 index 00000000..51fe8596 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketCorsCommand.d.ts @@ -0,0 +1,45 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketCorsRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteBucketCorsCommandInput extends DeleteBucketCorsRequest {} +export interface DeleteBucketCorsCommandOutput extends __MetadataBearer {} +declare const DeleteBucketCorsCommand_base: { + new ( + input: DeleteBucketCorsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketCorsCommandInput, + DeleteBucketCorsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteBucketCorsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketCorsCommandInput, + DeleteBucketCorsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteBucketCorsCommand extends DeleteBucketCorsCommand_base { + protected static __types: { + api: { + input: DeleteBucketCorsRequest; + output: {}; + }; + sdk: { + input: DeleteBucketCorsCommandInput; + output: DeleteBucketCorsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketEncryptionCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketEncryptionCommand.d.ts new file mode 100644 index 00000000..c65fc973 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketEncryptionCommand.d.ts @@ -0,0 +1,46 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketEncryptionRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteBucketEncryptionCommandInput + extends DeleteBucketEncryptionRequest {} +export interface DeleteBucketEncryptionCommandOutput extends __MetadataBearer {} +declare const DeleteBucketEncryptionCommand_base: { + new ( + input: DeleteBucketEncryptionCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketEncryptionCommandInput, + DeleteBucketEncryptionCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteBucketEncryptionCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketEncryptionCommandInput, + DeleteBucketEncryptionCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteBucketEncryptionCommand extends DeleteBucketEncryptionCommand_base { + protected static __types: { + api: { + input: DeleteBucketEncryptionRequest; + output: {}; + }; + sdk: { + input: DeleteBucketEncryptionCommandInput; + output: DeleteBucketEncryptionCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketIntelligentTieringConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketIntelligentTieringConfigurationCommand.d.ts new file mode 100644 index 00000000..e5fe3556 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketIntelligentTieringConfigurationCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketIntelligentTieringConfigurationRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteBucketIntelligentTieringConfigurationCommandInput + extends DeleteBucketIntelligentTieringConfigurationRequest {} +export interface DeleteBucketIntelligentTieringConfigurationCommandOutput + extends __MetadataBearer {} +declare const DeleteBucketIntelligentTieringConfigurationCommand_base: { + new ( + input: DeleteBucketIntelligentTieringConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketIntelligentTieringConfigurationCommandInput, + DeleteBucketIntelligentTieringConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteBucketIntelligentTieringConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketIntelligentTieringConfigurationCommandInput, + DeleteBucketIntelligentTieringConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteBucketIntelligentTieringConfigurationCommand extends DeleteBucketIntelligentTieringConfigurationCommand_base { + protected static __types: { + api: { + input: DeleteBucketIntelligentTieringConfigurationRequest; + output: {}; + }; + sdk: { + input: DeleteBucketIntelligentTieringConfigurationCommandInput; + output: DeleteBucketIntelligentTieringConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketInventoryConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketInventoryConfigurationCommand.d.ts new file mode 100644 index 00000000..528d8114 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketInventoryConfigurationCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketInventoryConfigurationRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteBucketInventoryConfigurationCommandInput + extends DeleteBucketInventoryConfigurationRequest {} +export interface DeleteBucketInventoryConfigurationCommandOutput + extends __MetadataBearer {} +declare const DeleteBucketInventoryConfigurationCommand_base: { + new ( + input: DeleteBucketInventoryConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketInventoryConfigurationCommandInput, + DeleteBucketInventoryConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteBucketInventoryConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketInventoryConfigurationCommandInput, + DeleteBucketInventoryConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteBucketInventoryConfigurationCommand extends DeleteBucketInventoryConfigurationCommand_base { + protected static __types: { + api: { + input: DeleteBucketInventoryConfigurationRequest; + output: {}; + }; + sdk: { + input: DeleteBucketInventoryConfigurationCommandInput; + output: DeleteBucketInventoryConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketLifecycleCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketLifecycleCommand.d.ts new file mode 100644 index 00000000..c24155ab --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketLifecycleCommand.d.ts @@ -0,0 +1,46 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketLifecycleRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteBucketLifecycleCommandInput + extends DeleteBucketLifecycleRequest {} +export interface DeleteBucketLifecycleCommandOutput extends __MetadataBearer {} +declare const DeleteBucketLifecycleCommand_base: { + new ( + input: DeleteBucketLifecycleCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketLifecycleCommandInput, + DeleteBucketLifecycleCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteBucketLifecycleCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketLifecycleCommandInput, + DeleteBucketLifecycleCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteBucketLifecycleCommand extends DeleteBucketLifecycleCommand_base { + protected static __types: { + api: { + input: DeleteBucketLifecycleRequest; + output: {}; + }; + sdk: { + input: DeleteBucketLifecycleCommandInput; + output: DeleteBucketLifecycleCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketMetadataTableConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketMetadataTableConfigurationCommand.d.ts new file mode 100644 index 00000000..d6d5bf80 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketMetadataTableConfigurationCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketMetadataTableConfigurationRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteBucketMetadataTableConfigurationCommandInput + extends DeleteBucketMetadataTableConfigurationRequest {} +export interface DeleteBucketMetadataTableConfigurationCommandOutput + extends __MetadataBearer {} +declare const DeleteBucketMetadataTableConfigurationCommand_base: { + new ( + input: DeleteBucketMetadataTableConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketMetadataTableConfigurationCommandInput, + DeleteBucketMetadataTableConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteBucketMetadataTableConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketMetadataTableConfigurationCommandInput, + DeleteBucketMetadataTableConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteBucketMetadataTableConfigurationCommand extends DeleteBucketMetadataTableConfigurationCommand_base { + protected static __types: { + api: { + input: DeleteBucketMetadataTableConfigurationRequest; + output: {}; + }; + sdk: { + input: DeleteBucketMetadataTableConfigurationCommandInput; + output: DeleteBucketMetadataTableConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketMetricsConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketMetricsConfigurationCommand.d.ts new file mode 100644 index 00000000..e502ab82 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketMetricsConfigurationCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketMetricsConfigurationRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteBucketMetricsConfigurationCommandInput + extends DeleteBucketMetricsConfigurationRequest {} +export interface DeleteBucketMetricsConfigurationCommandOutput + extends __MetadataBearer {} +declare const DeleteBucketMetricsConfigurationCommand_base: { + new ( + input: DeleteBucketMetricsConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketMetricsConfigurationCommandInput, + DeleteBucketMetricsConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteBucketMetricsConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketMetricsConfigurationCommandInput, + DeleteBucketMetricsConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteBucketMetricsConfigurationCommand extends DeleteBucketMetricsConfigurationCommand_base { + protected static __types: { + api: { + input: DeleteBucketMetricsConfigurationRequest; + output: {}; + }; + sdk: { + input: DeleteBucketMetricsConfigurationCommandInput; + output: DeleteBucketMetricsConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketOwnershipControlsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketOwnershipControlsCommand.d.ts new file mode 100644 index 00000000..ef0f6f58 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketOwnershipControlsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketOwnershipControlsRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteBucketOwnershipControlsCommandInput + extends DeleteBucketOwnershipControlsRequest {} +export interface DeleteBucketOwnershipControlsCommandOutput + extends __MetadataBearer {} +declare const DeleteBucketOwnershipControlsCommand_base: { + new ( + input: DeleteBucketOwnershipControlsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketOwnershipControlsCommandInput, + DeleteBucketOwnershipControlsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteBucketOwnershipControlsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketOwnershipControlsCommandInput, + DeleteBucketOwnershipControlsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteBucketOwnershipControlsCommand extends DeleteBucketOwnershipControlsCommand_base { + protected static __types: { + api: { + input: DeleteBucketOwnershipControlsRequest; + output: {}; + }; + sdk: { + input: DeleteBucketOwnershipControlsCommandInput; + output: DeleteBucketOwnershipControlsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketPolicyCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketPolicyCommand.d.ts new file mode 100644 index 00000000..bdc466dd --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketPolicyCommand.d.ts @@ -0,0 +1,46 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketPolicyRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteBucketPolicyCommandInput + extends DeleteBucketPolicyRequest {} +export interface DeleteBucketPolicyCommandOutput extends __MetadataBearer {} +declare const DeleteBucketPolicyCommand_base: { + new ( + input: DeleteBucketPolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketPolicyCommandInput, + DeleteBucketPolicyCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteBucketPolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketPolicyCommandInput, + DeleteBucketPolicyCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteBucketPolicyCommand extends DeleteBucketPolicyCommand_base { + protected static __types: { + api: { + input: DeleteBucketPolicyRequest; + output: {}; + }; + sdk: { + input: DeleteBucketPolicyCommandInput; + output: DeleteBucketPolicyCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketReplicationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketReplicationCommand.d.ts new file mode 100644 index 00000000..3579ebc4 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketReplicationCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketReplicationRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteBucketReplicationCommandInput + extends DeleteBucketReplicationRequest {} +export interface DeleteBucketReplicationCommandOutput + extends __MetadataBearer {} +declare const DeleteBucketReplicationCommand_base: { + new ( + input: DeleteBucketReplicationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketReplicationCommandInput, + DeleteBucketReplicationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteBucketReplicationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketReplicationCommandInput, + DeleteBucketReplicationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteBucketReplicationCommand extends DeleteBucketReplicationCommand_base { + protected static __types: { + api: { + input: DeleteBucketReplicationRequest; + output: {}; + }; + sdk: { + input: DeleteBucketReplicationCommandInput; + output: DeleteBucketReplicationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketTaggingCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketTaggingCommand.d.ts new file mode 100644 index 00000000..d9026f48 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketTaggingCommand.d.ts @@ -0,0 +1,46 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketTaggingRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteBucketTaggingCommandInput + extends DeleteBucketTaggingRequest {} +export interface DeleteBucketTaggingCommandOutput extends __MetadataBearer {} +declare const DeleteBucketTaggingCommand_base: { + new ( + input: DeleteBucketTaggingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketTaggingCommandInput, + DeleteBucketTaggingCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteBucketTaggingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketTaggingCommandInput, + DeleteBucketTaggingCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteBucketTaggingCommand extends DeleteBucketTaggingCommand_base { + protected static __types: { + api: { + input: DeleteBucketTaggingRequest; + output: {}; + }; + sdk: { + input: DeleteBucketTaggingCommandInput; + output: DeleteBucketTaggingCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketWebsiteCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketWebsiteCommand.d.ts new file mode 100644 index 00000000..2f7405ac --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteBucketWebsiteCommand.d.ts @@ -0,0 +1,46 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteBucketWebsiteRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteBucketWebsiteCommandInput + extends DeleteBucketWebsiteRequest {} +export interface DeleteBucketWebsiteCommandOutput extends __MetadataBearer {} +declare const DeleteBucketWebsiteCommand_base: { + new ( + input: DeleteBucketWebsiteCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketWebsiteCommandInput, + DeleteBucketWebsiteCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteBucketWebsiteCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBucketWebsiteCommandInput, + DeleteBucketWebsiteCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteBucketWebsiteCommand extends DeleteBucketWebsiteCommand_base { + protected static __types: { + api: { + input: DeleteBucketWebsiteRequest; + output: {}; + }; + sdk: { + input: DeleteBucketWebsiteCommandInput; + output: DeleteBucketWebsiteCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteObjectCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteObjectCommand.d.ts new file mode 100644 index 00000000..d10b190c --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteObjectCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteObjectOutput, DeleteObjectRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteObjectCommandInput extends DeleteObjectRequest {} +export interface DeleteObjectCommandOutput + extends DeleteObjectOutput, + __MetadataBearer {} +declare const DeleteObjectCommand_base: { + new ( + input: DeleteObjectCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteObjectCommandInput, + DeleteObjectCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteObjectCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteObjectCommandInput, + DeleteObjectCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteObjectCommand extends DeleteObjectCommand_base { + protected static __types: { + api: { + input: DeleteObjectRequest; + output: DeleteObjectOutput; + }; + sdk: { + input: DeleteObjectCommandInput; + output: DeleteObjectCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteObjectTaggingCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteObjectTaggingCommand.d.ts new file mode 100644 index 00000000..6c1905e3 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteObjectTaggingCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DeleteObjectTaggingOutput, + DeleteObjectTaggingRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteObjectTaggingCommandInput + extends DeleteObjectTaggingRequest {} +export interface DeleteObjectTaggingCommandOutput + extends DeleteObjectTaggingOutput, + __MetadataBearer {} +declare const DeleteObjectTaggingCommand_base: { + new ( + input: DeleteObjectTaggingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteObjectTaggingCommandInput, + DeleteObjectTaggingCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteObjectTaggingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteObjectTaggingCommandInput, + DeleteObjectTaggingCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteObjectTaggingCommand extends DeleteObjectTaggingCommand_base { + protected static __types: { + api: { + input: DeleteObjectTaggingRequest; + output: DeleteObjectTaggingOutput; + }; + sdk: { + input: DeleteObjectTaggingCommandInput; + output: DeleteObjectTaggingCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteObjectsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteObjectsCommand.d.ts new file mode 100644 index 00000000..6de1b152 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeleteObjectsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeleteObjectsOutput, DeleteObjectsRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteObjectsCommandInput extends DeleteObjectsRequest {} +export interface DeleteObjectsCommandOutput + extends DeleteObjectsOutput, + __MetadataBearer {} +declare const DeleteObjectsCommand_base: { + new ( + input: DeleteObjectsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteObjectsCommandInput, + DeleteObjectsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteObjectsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteObjectsCommandInput, + DeleteObjectsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteObjectsCommand extends DeleteObjectsCommand_base { + protected static __types: { + api: { + input: DeleteObjectsRequest; + output: DeleteObjectsOutput; + }; + sdk: { + input: DeleteObjectsCommandInput; + output: DeleteObjectsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeletePublicAccessBlockCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeletePublicAccessBlockCommand.d.ts new file mode 100644 index 00000000..e762f1c2 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/DeletePublicAccessBlockCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DeletePublicAccessBlockRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface DeletePublicAccessBlockCommandInput + extends DeletePublicAccessBlockRequest {} +export interface DeletePublicAccessBlockCommandOutput + extends __MetadataBearer {} +declare const DeletePublicAccessBlockCommand_base: { + new ( + input: DeletePublicAccessBlockCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeletePublicAccessBlockCommandInput, + DeletePublicAccessBlockCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeletePublicAccessBlockCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeletePublicAccessBlockCommandInput, + DeletePublicAccessBlockCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeletePublicAccessBlockCommand extends DeletePublicAccessBlockCommand_base { + protected static __types: { + api: { + input: DeletePublicAccessBlockRequest; + output: {}; + }; + sdk: { + input: DeletePublicAccessBlockCommandInput; + output: DeletePublicAccessBlockCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketAccelerateConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketAccelerateConfigurationCommand.d.ts new file mode 100644 index 00000000..f6de8548 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketAccelerateConfigurationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetBucketAccelerateConfigurationOutput, + GetBucketAccelerateConfigurationRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketAccelerateConfigurationCommandInput + extends GetBucketAccelerateConfigurationRequest {} +export interface GetBucketAccelerateConfigurationCommandOutput + extends GetBucketAccelerateConfigurationOutput, + __MetadataBearer {} +declare const GetBucketAccelerateConfigurationCommand_base: { + new ( + input: GetBucketAccelerateConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketAccelerateConfigurationCommandInput, + GetBucketAccelerateConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketAccelerateConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketAccelerateConfigurationCommandInput, + GetBucketAccelerateConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketAccelerateConfigurationCommand extends GetBucketAccelerateConfigurationCommand_base { + protected static __types: { + api: { + input: GetBucketAccelerateConfigurationRequest; + output: GetBucketAccelerateConfigurationOutput; + }; + sdk: { + input: GetBucketAccelerateConfigurationCommandInput; + output: GetBucketAccelerateConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketAclCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketAclCommand.d.ts new file mode 100644 index 00000000..8984c279 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketAclCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketAclOutput, GetBucketAclRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketAclCommandInput extends GetBucketAclRequest {} +export interface GetBucketAclCommandOutput + extends GetBucketAclOutput, + __MetadataBearer {} +declare const GetBucketAclCommand_base: { + new ( + input: GetBucketAclCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketAclCommandInput, + GetBucketAclCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketAclCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketAclCommandInput, + GetBucketAclCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketAclCommand extends GetBucketAclCommand_base { + protected static __types: { + api: { + input: GetBucketAclRequest; + output: GetBucketAclOutput; + }; + sdk: { + input: GetBucketAclCommandInput; + output: GetBucketAclCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketAnalyticsConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketAnalyticsConfigurationCommand.d.ts new file mode 100644 index 00000000..68e9d9f1 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketAnalyticsConfigurationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetBucketAnalyticsConfigurationOutput, + GetBucketAnalyticsConfigurationRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketAnalyticsConfigurationCommandInput + extends GetBucketAnalyticsConfigurationRequest {} +export interface GetBucketAnalyticsConfigurationCommandOutput + extends GetBucketAnalyticsConfigurationOutput, + __MetadataBearer {} +declare const GetBucketAnalyticsConfigurationCommand_base: { + new ( + input: GetBucketAnalyticsConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketAnalyticsConfigurationCommandInput, + GetBucketAnalyticsConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketAnalyticsConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketAnalyticsConfigurationCommandInput, + GetBucketAnalyticsConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketAnalyticsConfigurationCommand extends GetBucketAnalyticsConfigurationCommand_base { + protected static __types: { + api: { + input: GetBucketAnalyticsConfigurationRequest; + output: GetBucketAnalyticsConfigurationOutput; + }; + sdk: { + input: GetBucketAnalyticsConfigurationCommandInput; + output: GetBucketAnalyticsConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketCorsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketCorsCommand.d.ts new file mode 100644 index 00000000..c1877774 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketCorsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetBucketCorsOutput, GetBucketCorsRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketCorsCommandInput extends GetBucketCorsRequest {} +export interface GetBucketCorsCommandOutput + extends GetBucketCorsOutput, + __MetadataBearer {} +declare const GetBucketCorsCommand_base: { + new ( + input: GetBucketCorsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketCorsCommandInput, + GetBucketCorsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketCorsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketCorsCommandInput, + GetBucketCorsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketCorsCommand extends GetBucketCorsCommand_base { + protected static __types: { + api: { + input: GetBucketCorsRequest; + output: GetBucketCorsOutput; + }; + sdk: { + input: GetBucketCorsCommandInput; + output: GetBucketCorsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketEncryptionCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketEncryptionCommand.d.ts new file mode 100644 index 00000000..41a8b812 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketEncryptionCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetBucketEncryptionOutput, + GetBucketEncryptionRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketEncryptionCommandInput + extends GetBucketEncryptionRequest {} +export interface GetBucketEncryptionCommandOutput + extends GetBucketEncryptionOutput, + __MetadataBearer {} +declare const GetBucketEncryptionCommand_base: { + new ( + input: GetBucketEncryptionCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketEncryptionCommandInput, + GetBucketEncryptionCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketEncryptionCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketEncryptionCommandInput, + GetBucketEncryptionCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketEncryptionCommand extends GetBucketEncryptionCommand_base { + protected static __types: { + api: { + input: GetBucketEncryptionRequest; + output: GetBucketEncryptionOutput; + }; + sdk: { + input: GetBucketEncryptionCommandInput; + output: GetBucketEncryptionCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketIntelligentTieringConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketIntelligentTieringConfigurationCommand.d.ts new file mode 100644 index 00000000..34ffe225 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketIntelligentTieringConfigurationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetBucketIntelligentTieringConfigurationOutput, + GetBucketIntelligentTieringConfigurationRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketIntelligentTieringConfigurationCommandInput + extends GetBucketIntelligentTieringConfigurationRequest {} +export interface GetBucketIntelligentTieringConfigurationCommandOutput + extends GetBucketIntelligentTieringConfigurationOutput, + __MetadataBearer {} +declare const GetBucketIntelligentTieringConfigurationCommand_base: { + new ( + input: GetBucketIntelligentTieringConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketIntelligentTieringConfigurationCommandInput, + GetBucketIntelligentTieringConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketIntelligentTieringConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketIntelligentTieringConfigurationCommandInput, + GetBucketIntelligentTieringConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketIntelligentTieringConfigurationCommand extends GetBucketIntelligentTieringConfigurationCommand_base { + protected static __types: { + api: { + input: GetBucketIntelligentTieringConfigurationRequest; + output: GetBucketIntelligentTieringConfigurationOutput; + }; + sdk: { + input: GetBucketIntelligentTieringConfigurationCommandInput; + output: GetBucketIntelligentTieringConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketInventoryConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketInventoryConfigurationCommand.d.ts new file mode 100644 index 00000000..718dea38 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketInventoryConfigurationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetBucketInventoryConfigurationOutput, + GetBucketInventoryConfigurationRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketInventoryConfigurationCommandInput + extends GetBucketInventoryConfigurationRequest {} +export interface GetBucketInventoryConfigurationCommandOutput + extends GetBucketInventoryConfigurationOutput, + __MetadataBearer {} +declare const GetBucketInventoryConfigurationCommand_base: { + new ( + input: GetBucketInventoryConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketInventoryConfigurationCommandInput, + GetBucketInventoryConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketInventoryConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketInventoryConfigurationCommandInput, + GetBucketInventoryConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketInventoryConfigurationCommand extends GetBucketInventoryConfigurationCommand_base { + protected static __types: { + api: { + input: GetBucketInventoryConfigurationRequest; + output: GetBucketInventoryConfigurationOutput; + }; + sdk: { + input: GetBucketInventoryConfigurationCommandInput; + output: GetBucketInventoryConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketLifecycleConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketLifecycleConfigurationCommand.d.ts new file mode 100644 index 00000000..2af00f83 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketLifecycleConfigurationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetBucketLifecycleConfigurationOutput, + GetBucketLifecycleConfigurationRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketLifecycleConfigurationCommandInput + extends GetBucketLifecycleConfigurationRequest {} +export interface GetBucketLifecycleConfigurationCommandOutput + extends GetBucketLifecycleConfigurationOutput, + __MetadataBearer {} +declare const GetBucketLifecycleConfigurationCommand_base: { + new ( + input: GetBucketLifecycleConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketLifecycleConfigurationCommandInput, + GetBucketLifecycleConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketLifecycleConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketLifecycleConfigurationCommandInput, + GetBucketLifecycleConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketLifecycleConfigurationCommand extends GetBucketLifecycleConfigurationCommand_base { + protected static __types: { + api: { + input: GetBucketLifecycleConfigurationRequest; + output: GetBucketLifecycleConfigurationOutput; + }; + sdk: { + input: GetBucketLifecycleConfigurationCommandInput; + output: GetBucketLifecycleConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketLocationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketLocationCommand.d.ts new file mode 100644 index 00000000..8466fe16 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketLocationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetBucketLocationOutput, + GetBucketLocationRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketLocationCommandInput + extends GetBucketLocationRequest {} +export interface GetBucketLocationCommandOutput + extends GetBucketLocationOutput, + __MetadataBearer {} +declare const GetBucketLocationCommand_base: { + new ( + input: GetBucketLocationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketLocationCommandInput, + GetBucketLocationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketLocationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketLocationCommandInput, + GetBucketLocationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketLocationCommand extends GetBucketLocationCommand_base { + protected static __types: { + api: { + input: GetBucketLocationRequest; + output: GetBucketLocationOutput; + }; + sdk: { + input: GetBucketLocationCommandInput; + output: GetBucketLocationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketLoggingCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketLoggingCommand.d.ts new file mode 100644 index 00000000..cb89ef9d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketLoggingCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetBucketLoggingOutput, + GetBucketLoggingRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketLoggingCommandInput extends GetBucketLoggingRequest {} +export interface GetBucketLoggingCommandOutput + extends GetBucketLoggingOutput, + __MetadataBearer {} +declare const GetBucketLoggingCommand_base: { + new ( + input: GetBucketLoggingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketLoggingCommandInput, + GetBucketLoggingCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketLoggingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketLoggingCommandInput, + GetBucketLoggingCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketLoggingCommand extends GetBucketLoggingCommand_base { + protected static __types: { + api: { + input: GetBucketLoggingRequest; + output: GetBucketLoggingOutput; + }; + sdk: { + input: GetBucketLoggingCommandInput; + output: GetBucketLoggingCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketMetadataTableConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketMetadataTableConfigurationCommand.d.ts new file mode 100644 index 00000000..b94b89cb --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketMetadataTableConfigurationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetBucketMetadataTableConfigurationOutput, + GetBucketMetadataTableConfigurationRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketMetadataTableConfigurationCommandInput + extends GetBucketMetadataTableConfigurationRequest {} +export interface GetBucketMetadataTableConfigurationCommandOutput + extends GetBucketMetadataTableConfigurationOutput, + __MetadataBearer {} +declare const GetBucketMetadataTableConfigurationCommand_base: { + new ( + input: GetBucketMetadataTableConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketMetadataTableConfigurationCommandInput, + GetBucketMetadataTableConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketMetadataTableConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketMetadataTableConfigurationCommandInput, + GetBucketMetadataTableConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketMetadataTableConfigurationCommand extends GetBucketMetadataTableConfigurationCommand_base { + protected static __types: { + api: { + input: GetBucketMetadataTableConfigurationRequest; + output: GetBucketMetadataTableConfigurationOutput; + }; + sdk: { + input: GetBucketMetadataTableConfigurationCommandInput; + output: GetBucketMetadataTableConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketMetricsConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketMetricsConfigurationCommand.d.ts new file mode 100644 index 00000000..f6ffc9a3 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketMetricsConfigurationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetBucketMetricsConfigurationOutput, + GetBucketMetricsConfigurationRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketMetricsConfigurationCommandInput + extends GetBucketMetricsConfigurationRequest {} +export interface GetBucketMetricsConfigurationCommandOutput + extends GetBucketMetricsConfigurationOutput, + __MetadataBearer {} +declare const GetBucketMetricsConfigurationCommand_base: { + new ( + input: GetBucketMetricsConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketMetricsConfigurationCommandInput, + GetBucketMetricsConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketMetricsConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketMetricsConfigurationCommandInput, + GetBucketMetricsConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketMetricsConfigurationCommand extends GetBucketMetricsConfigurationCommand_base { + protected static __types: { + api: { + input: GetBucketMetricsConfigurationRequest; + output: GetBucketMetricsConfigurationOutput; + }; + sdk: { + input: GetBucketMetricsConfigurationCommandInput; + output: GetBucketMetricsConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketNotificationConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketNotificationConfigurationCommand.d.ts new file mode 100644 index 00000000..11a45e95 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketNotificationConfigurationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetBucketNotificationConfigurationRequest, + NotificationConfiguration, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketNotificationConfigurationCommandInput + extends GetBucketNotificationConfigurationRequest {} +export interface GetBucketNotificationConfigurationCommandOutput + extends NotificationConfiguration, + __MetadataBearer {} +declare const GetBucketNotificationConfigurationCommand_base: { + new ( + input: GetBucketNotificationConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketNotificationConfigurationCommandInput, + GetBucketNotificationConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketNotificationConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketNotificationConfigurationCommandInput, + GetBucketNotificationConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketNotificationConfigurationCommand extends GetBucketNotificationConfigurationCommand_base { + protected static __types: { + api: { + input: GetBucketNotificationConfigurationRequest; + output: NotificationConfiguration; + }; + sdk: { + input: GetBucketNotificationConfigurationCommandInput; + output: GetBucketNotificationConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketOwnershipControlsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketOwnershipControlsCommand.d.ts new file mode 100644 index 00000000..ff556b97 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketOwnershipControlsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetBucketOwnershipControlsOutput, + GetBucketOwnershipControlsRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketOwnershipControlsCommandInput + extends GetBucketOwnershipControlsRequest {} +export interface GetBucketOwnershipControlsCommandOutput + extends GetBucketOwnershipControlsOutput, + __MetadataBearer {} +declare const GetBucketOwnershipControlsCommand_base: { + new ( + input: GetBucketOwnershipControlsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketOwnershipControlsCommandInput, + GetBucketOwnershipControlsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketOwnershipControlsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketOwnershipControlsCommandInput, + GetBucketOwnershipControlsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketOwnershipControlsCommand extends GetBucketOwnershipControlsCommand_base { + protected static __types: { + api: { + input: GetBucketOwnershipControlsRequest; + output: GetBucketOwnershipControlsOutput; + }; + sdk: { + input: GetBucketOwnershipControlsCommandInput; + output: GetBucketOwnershipControlsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketPolicyCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketPolicyCommand.d.ts new file mode 100644 index 00000000..d8a1e9db --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketPolicyCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetBucketPolicyOutput, + GetBucketPolicyRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketPolicyCommandInput extends GetBucketPolicyRequest {} +export interface GetBucketPolicyCommandOutput + extends GetBucketPolicyOutput, + __MetadataBearer {} +declare const GetBucketPolicyCommand_base: { + new ( + input: GetBucketPolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketPolicyCommandInput, + GetBucketPolicyCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketPolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketPolicyCommandInput, + GetBucketPolicyCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketPolicyCommand extends GetBucketPolicyCommand_base { + protected static __types: { + api: { + input: GetBucketPolicyRequest; + output: GetBucketPolicyOutput; + }; + sdk: { + input: GetBucketPolicyCommandInput; + output: GetBucketPolicyCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketPolicyStatusCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketPolicyStatusCommand.d.ts new file mode 100644 index 00000000..be8a477e --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketPolicyStatusCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetBucketPolicyStatusOutput, + GetBucketPolicyStatusRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketPolicyStatusCommandInput + extends GetBucketPolicyStatusRequest {} +export interface GetBucketPolicyStatusCommandOutput + extends GetBucketPolicyStatusOutput, + __MetadataBearer {} +declare const GetBucketPolicyStatusCommand_base: { + new ( + input: GetBucketPolicyStatusCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketPolicyStatusCommandInput, + GetBucketPolicyStatusCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketPolicyStatusCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketPolicyStatusCommandInput, + GetBucketPolicyStatusCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketPolicyStatusCommand extends GetBucketPolicyStatusCommand_base { + protected static __types: { + api: { + input: GetBucketPolicyStatusRequest; + output: GetBucketPolicyStatusOutput; + }; + sdk: { + input: GetBucketPolicyStatusCommandInput; + output: GetBucketPolicyStatusCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketReplicationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketReplicationCommand.d.ts new file mode 100644 index 00000000..f1b5a962 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketReplicationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetBucketReplicationOutput, + GetBucketReplicationRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketReplicationCommandInput + extends GetBucketReplicationRequest {} +export interface GetBucketReplicationCommandOutput + extends GetBucketReplicationOutput, + __MetadataBearer {} +declare const GetBucketReplicationCommand_base: { + new ( + input: GetBucketReplicationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketReplicationCommandInput, + GetBucketReplicationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketReplicationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketReplicationCommandInput, + GetBucketReplicationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketReplicationCommand extends GetBucketReplicationCommand_base { + protected static __types: { + api: { + input: GetBucketReplicationRequest; + output: GetBucketReplicationOutput; + }; + sdk: { + input: GetBucketReplicationCommandInput; + output: GetBucketReplicationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketRequestPaymentCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketRequestPaymentCommand.d.ts new file mode 100644 index 00000000..326ef922 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketRequestPaymentCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetBucketRequestPaymentOutput, + GetBucketRequestPaymentRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketRequestPaymentCommandInput + extends GetBucketRequestPaymentRequest {} +export interface GetBucketRequestPaymentCommandOutput + extends GetBucketRequestPaymentOutput, + __MetadataBearer {} +declare const GetBucketRequestPaymentCommand_base: { + new ( + input: GetBucketRequestPaymentCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketRequestPaymentCommandInput, + GetBucketRequestPaymentCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketRequestPaymentCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketRequestPaymentCommandInput, + GetBucketRequestPaymentCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketRequestPaymentCommand extends GetBucketRequestPaymentCommand_base { + protected static __types: { + api: { + input: GetBucketRequestPaymentRequest; + output: GetBucketRequestPaymentOutput; + }; + sdk: { + input: GetBucketRequestPaymentCommandInput; + output: GetBucketRequestPaymentCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketTaggingCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketTaggingCommand.d.ts new file mode 100644 index 00000000..a6511dec --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketTaggingCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetBucketTaggingOutput, + GetBucketTaggingRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketTaggingCommandInput extends GetBucketTaggingRequest {} +export interface GetBucketTaggingCommandOutput + extends GetBucketTaggingOutput, + __MetadataBearer {} +declare const GetBucketTaggingCommand_base: { + new ( + input: GetBucketTaggingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketTaggingCommandInput, + GetBucketTaggingCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketTaggingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketTaggingCommandInput, + GetBucketTaggingCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketTaggingCommand extends GetBucketTaggingCommand_base { + protected static __types: { + api: { + input: GetBucketTaggingRequest; + output: GetBucketTaggingOutput; + }; + sdk: { + input: GetBucketTaggingCommandInput; + output: GetBucketTaggingCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketVersioningCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketVersioningCommand.d.ts new file mode 100644 index 00000000..2fe6aca1 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketVersioningCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetBucketVersioningOutput, + GetBucketVersioningRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketVersioningCommandInput + extends GetBucketVersioningRequest {} +export interface GetBucketVersioningCommandOutput + extends GetBucketVersioningOutput, + __MetadataBearer {} +declare const GetBucketVersioningCommand_base: { + new ( + input: GetBucketVersioningCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketVersioningCommandInput, + GetBucketVersioningCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketVersioningCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketVersioningCommandInput, + GetBucketVersioningCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketVersioningCommand extends GetBucketVersioningCommand_base { + protected static __types: { + api: { + input: GetBucketVersioningRequest; + output: GetBucketVersioningOutput; + }; + sdk: { + input: GetBucketVersioningCommandInput; + output: GetBucketVersioningCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketWebsiteCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketWebsiteCommand.d.ts new file mode 100644 index 00000000..f632f942 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetBucketWebsiteCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetBucketWebsiteOutput, + GetBucketWebsiteRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetBucketWebsiteCommandInput extends GetBucketWebsiteRequest {} +export interface GetBucketWebsiteCommandOutput + extends GetBucketWebsiteOutput, + __MetadataBearer {} +declare const GetBucketWebsiteCommand_base: { + new ( + input: GetBucketWebsiteCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketWebsiteCommandInput, + GetBucketWebsiteCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetBucketWebsiteCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetBucketWebsiteCommandInput, + GetBucketWebsiteCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetBucketWebsiteCommand extends GetBucketWebsiteCommand_base { + protected static __types: { + api: { + input: GetBucketWebsiteRequest; + output: GetBucketWebsiteOutput; + }; + sdk: { + input: GetBucketWebsiteCommandInput; + output: GetBucketWebsiteCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectAclCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectAclCommand.d.ts new file mode 100644 index 00000000..877f47ea --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectAclCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetObjectAclOutput, GetObjectAclRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetObjectAclCommandInput extends GetObjectAclRequest {} +export interface GetObjectAclCommandOutput + extends GetObjectAclOutput, + __MetadataBearer {} +declare const GetObjectAclCommand_base: { + new ( + input: GetObjectAclCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetObjectAclCommandInput, + GetObjectAclCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetObjectAclCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetObjectAclCommandInput, + GetObjectAclCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetObjectAclCommand extends GetObjectAclCommand_base { + protected static __types: { + api: { + input: GetObjectAclRequest; + output: GetObjectAclOutput; + }; + sdk: { + input: GetObjectAclCommandInput; + output: GetObjectAclCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectAttributesCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectAttributesCommand.d.ts new file mode 100644 index 00000000..9cffc41d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectAttributesCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetObjectAttributesOutput, + GetObjectAttributesRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetObjectAttributesCommandInput + extends GetObjectAttributesRequest {} +export interface GetObjectAttributesCommandOutput + extends GetObjectAttributesOutput, + __MetadataBearer {} +declare const GetObjectAttributesCommand_base: { + new ( + input: GetObjectAttributesCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetObjectAttributesCommandInput, + GetObjectAttributesCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetObjectAttributesCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetObjectAttributesCommandInput, + GetObjectAttributesCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetObjectAttributesCommand extends GetObjectAttributesCommand_base { + protected static __types: { + api: { + input: GetObjectAttributesRequest; + output: GetObjectAttributesOutput; + }; + sdk: { + input: GetObjectAttributesCommandInput; + output: GetObjectAttributesCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectCommand.d.ts new file mode 100644 index 00000000..4db12f0c --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectCommand.d.ts @@ -0,0 +1,52 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { + MetadataBearer as __MetadataBearer, + StreamingBlobPayloadOutputTypes, +} from "@smithy/types"; +import { GetObjectOutput, GetObjectRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetObjectCommandInput extends GetObjectRequest {} +export interface GetObjectCommandOutput + extends Pick>, + __MetadataBearer { + Body?: StreamingBlobPayloadOutputTypes; +} +declare const GetObjectCommand_base: { + new ( + input: GetObjectCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetObjectCommandInput, + GetObjectCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetObjectCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetObjectCommandInput, + GetObjectCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetObjectCommand extends GetObjectCommand_base { + protected static __types: { + api: { + input: GetObjectRequest; + output: GetObjectOutput; + }; + sdk: { + input: GetObjectCommandInput; + output: GetObjectCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectLegalHoldCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectLegalHoldCommand.d.ts new file mode 100644 index 00000000..325627df --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectLegalHoldCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetObjectLegalHoldOutput, + GetObjectLegalHoldRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetObjectLegalHoldCommandInput + extends GetObjectLegalHoldRequest {} +export interface GetObjectLegalHoldCommandOutput + extends GetObjectLegalHoldOutput, + __MetadataBearer {} +declare const GetObjectLegalHoldCommand_base: { + new ( + input: GetObjectLegalHoldCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetObjectLegalHoldCommandInput, + GetObjectLegalHoldCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetObjectLegalHoldCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetObjectLegalHoldCommandInput, + GetObjectLegalHoldCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetObjectLegalHoldCommand extends GetObjectLegalHoldCommand_base { + protected static __types: { + api: { + input: GetObjectLegalHoldRequest; + output: GetObjectLegalHoldOutput; + }; + sdk: { + input: GetObjectLegalHoldCommandInput; + output: GetObjectLegalHoldCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectLockConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectLockConfigurationCommand.d.ts new file mode 100644 index 00000000..b0b91e46 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectLockConfigurationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetObjectLockConfigurationOutput, + GetObjectLockConfigurationRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetObjectLockConfigurationCommandInput + extends GetObjectLockConfigurationRequest {} +export interface GetObjectLockConfigurationCommandOutput + extends GetObjectLockConfigurationOutput, + __MetadataBearer {} +declare const GetObjectLockConfigurationCommand_base: { + new ( + input: GetObjectLockConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetObjectLockConfigurationCommandInput, + GetObjectLockConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetObjectLockConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetObjectLockConfigurationCommandInput, + GetObjectLockConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetObjectLockConfigurationCommand extends GetObjectLockConfigurationCommand_base { + protected static __types: { + api: { + input: GetObjectLockConfigurationRequest; + output: GetObjectLockConfigurationOutput; + }; + sdk: { + input: GetObjectLockConfigurationCommandInput; + output: GetObjectLockConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectRetentionCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectRetentionCommand.d.ts new file mode 100644 index 00000000..613160c9 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectRetentionCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetObjectRetentionOutput, + GetObjectRetentionRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetObjectRetentionCommandInput + extends GetObjectRetentionRequest {} +export interface GetObjectRetentionCommandOutput + extends GetObjectRetentionOutput, + __MetadataBearer {} +declare const GetObjectRetentionCommand_base: { + new ( + input: GetObjectRetentionCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetObjectRetentionCommandInput, + GetObjectRetentionCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetObjectRetentionCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetObjectRetentionCommandInput, + GetObjectRetentionCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetObjectRetentionCommand extends GetObjectRetentionCommand_base { + protected static __types: { + api: { + input: GetObjectRetentionRequest; + output: GetObjectRetentionOutput; + }; + sdk: { + input: GetObjectRetentionCommandInput; + output: GetObjectRetentionCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectTaggingCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectTaggingCommand.d.ts new file mode 100644 index 00000000..245432e3 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectTaggingCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetObjectTaggingOutput, + GetObjectTaggingRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetObjectTaggingCommandInput extends GetObjectTaggingRequest {} +export interface GetObjectTaggingCommandOutput + extends GetObjectTaggingOutput, + __MetadataBearer {} +declare const GetObjectTaggingCommand_base: { + new ( + input: GetObjectTaggingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetObjectTaggingCommandInput, + GetObjectTaggingCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetObjectTaggingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetObjectTaggingCommandInput, + GetObjectTaggingCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetObjectTaggingCommand extends GetObjectTaggingCommand_base { + protected static __types: { + api: { + input: GetObjectTaggingRequest; + output: GetObjectTaggingOutput; + }; + sdk: { + input: GetObjectTaggingCommandInput; + output: GetObjectTaggingCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectTorrentCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectTorrentCommand.d.ts new file mode 100644 index 00000000..4deaba38 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetObjectTorrentCommand.d.ts @@ -0,0 +1,58 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { + MetadataBearer as __MetadataBearer, + StreamingBlobPayloadOutputTypes, +} from "@smithy/types"; +import { + GetObjectTorrentOutput, + GetObjectTorrentRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetObjectTorrentCommandInput extends GetObjectTorrentRequest {} +export interface GetObjectTorrentCommandOutput + extends Pick< + GetObjectTorrentOutput, + Exclude + >, + __MetadataBearer { + Body?: StreamingBlobPayloadOutputTypes; +} +declare const GetObjectTorrentCommand_base: { + new ( + input: GetObjectTorrentCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetObjectTorrentCommandInput, + GetObjectTorrentCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetObjectTorrentCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetObjectTorrentCommandInput, + GetObjectTorrentCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetObjectTorrentCommand extends GetObjectTorrentCommand_base { + protected static __types: { + api: { + input: GetObjectTorrentRequest; + output: GetObjectTorrentOutput; + }; + sdk: { + input: GetObjectTorrentCommandInput; + output: GetObjectTorrentCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetPublicAccessBlockCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetPublicAccessBlockCommand.d.ts new file mode 100644 index 00000000..45eafb8a --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/GetPublicAccessBlockCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetPublicAccessBlockOutput, + GetPublicAccessBlockRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface GetPublicAccessBlockCommandInput + extends GetPublicAccessBlockRequest {} +export interface GetPublicAccessBlockCommandOutput + extends GetPublicAccessBlockOutput, + __MetadataBearer {} +declare const GetPublicAccessBlockCommand_base: { + new ( + input: GetPublicAccessBlockCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetPublicAccessBlockCommandInput, + GetPublicAccessBlockCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetPublicAccessBlockCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetPublicAccessBlockCommandInput, + GetPublicAccessBlockCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetPublicAccessBlockCommand extends GetPublicAccessBlockCommand_base { + protected static __types: { + api: { + input: GetPublicAccessBlockRequest; + output: GetPublicAccessBlockOutput; + }; + sdk: { + input: GetPublicAccessBlockCommandInput; + output: GetPublicAccessBlockCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/HeadBucketCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/HeadBucketCommand.d.ts new file mode 100644 index 00000000..ebdef919 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/HeadBucketCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { HeadBucketOutput, HeadBucketRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface HeadBucketCommandInput extends HeadBucketRequest {} +export interface HeadBucketCommandOutput + extends HeadBucketOutput, + __MetadataBearer {} +declare const HeadBucketCommand_base: { + new ( + input: HeadBucketCommandInput + ): import("@smithy/smithy-client").CommandImpl< + HeadBucketCommandInput, + HeadBucketCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: HeadBucketCommandInput + ): import("@smithy/smithy-client").CommandImpl< + HeadBucketCommandInput, + HeadBucketCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class HeadBucketCommand extends HeadBucketCommand_base { + protected static __types: { + api: { + input: HeadBucketRequest; + output: HeadBucketOutput; + }; + sdk: { + input: HeadBucketCommandInput; + output: HeadBucketCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/HeadObjectCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/HeadObjectCommand.d.ts new file mode 100644 index 00000000..3158fcb3 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/HeadObjectCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { HeadObjectOutput, HeadObjectRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface HeadObjectCommandInput extends HeadObjectRequest {} +export interface HeadObjectCommandOutput + extends HeadObjectOutput, + __MetadataBearer {} +declare const HeadObjectCommand_base: { + new ( + input: HeadObjectCommandInput + ): import("@smithy/smithy-client").CommandImpl< + HeadObjectCommandInput, + HeadObjectCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: HeadObjectCommandInput + ): import("@smithy/smithy-client").CommandImpl< + HeadObjectCommandInput, + HeadObjectCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class HeadObjectCommand extends HeadObjectCommand_base { + protected static __types: { + api: { + input: HeadObjectRequest; + output: HeadObjectOutput; + }; + sdk: { + input: HeadObjectCommandInput; + output: HeadObjectCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListBucketAnalyticsConfigurationsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListBucketAnalyticsConfigurationsCommand.d.ts new file mode 100644 index 00000000..7669f781 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListBucketAnalyticsConfigurationsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + ListBucketAnalyticsConfigurationsOutput, + ListBucketAnalyticsConfigurationsRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface ListBucketAnalyticsConfigurationsCommandInput + extends ListBucketAnalyticsConfigurationsRequest {} +export interface ListBucketAnalyticsConfigurationsCommandOutput + extends ListBucketAnalyticsConfigurationsOutput, + __MetadataBearer {} +declare const ListBucketAnalyticsConfigurationsCommand_base: { + new ( + input: ListBucketAnalyticsConfigurationsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListBucketAnalyticsConfigurationsCommandInput, + ListBucketAnalyticsConfigurationsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListBucketAnalyticsConfigurationsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListBucketAnalyticsConfigurationsCommandInput, + ListBucketAnalyticsConfigurationsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListBucketAnalyticsConfigurationsCommand extends ListBucketAnalyticsConfigurationsCommand_base { + protected static __types: { + api: { + input: ListBucketAnalyticsConfigurationsRequest; + output: ListBucketAnalyticsConfigurationsOutput; + }; + sdk: { + input: ListBucketAnalyticsConfigurationsCommandInput; + output: ListBucketAnalyticsConfigurationsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListBucketIntelligentTieringConfigurationsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListBucketIntelligentTieringConfigurationsCommand.d.ts new file mode 100644 index 00000000..0034ec8d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListBucketIntelligentTieringConfigurationsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + ListBucketIntelligentTieringConfigurationsOutput, + ListBucketIntelligentTieringConfigurationsRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface ListBucketIntelligentTieringConfigurationsCommandInput + extends ListBucketIntelligentTieringConfigurationsRequest {} +export interface ListBucketIntelligentTieringConfigurationsCommandOutput + extends ListBucketIntelligentTieringConfigurationsOutput, + __MetadataBearer {} +declare const ListBucketIntelligentTieringConfigurationsCommand_base: { + new ( + input: ListBucketIntelligentTieringConfigurationsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListBucketIntelligentTieringConfigurationsCommandInput, + ListBucketIntelligentTieringConfigurationsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListBucketIntelligentTieringConfigurationsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListBucketIntelligentTieringConfigurationsCommandInput, + ListBucketIntelligentTieringConfigurationsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListBucketIntelligentTieringConfigurationsCommand extends ListBucketIntelligentTieringConfigurationsCommand_base { + protected static __types: { + api: { + input: ListBucketIntelligentTieringConfigurationsRequest; + output: ListBucketIntelligentTieringConfigurationsOutput; + }; + sdk: { + input: ListBucketIntelligentTieringConfigurationsCommandInput; + output: ListBucketIntelligentTieringConfigurationsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListBucketInventoryConfigurationsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListBucketInventoryConfigurationsCommand.d.ts new file mode 100644 index 00000000..6f9840a1 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListBucketInventoryConfigurationsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + ListBucketInventoryConfigurationsOutput, + ListBucketInventoryConfigurationsRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface ListBucketInventoryConfigurationsCommandInput + extends ListBucketInventoryConfigurationsRequest {} +export interface ListBucketInventoryConfigurationsCommandOutput + extends ListBucketInventoryConfigurationsOutput, + __MetadataBearer {} +declare const ListBucketInventoryConfigurationsCommand_base: { + new ( + input: ListBucketInventoryConfigurationsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListBucketInventoryConfigurationsCommandInput, + ListBucketInventoryConfigurationsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListBucketInventoryConfigurationsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListBucketInventoryConfigurationsCommandInput, + ListBucketInventoryConfigurationsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListBucketInventoryConfigurationsCommand extends ListBucketInventoryConfigurationsCommand_base { + protected static __types: { + api: { + input: ListBucketInventoryConfigurationsRequest; + output: ListBucketInventoryConfigurationsOutput; + }; + sdk: { + input: ListBucketInventoryConfigurationsCommandInput; + output: ListBucketInventoryConfigurationsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListBucketMetricsConfigurationsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListBucketMetricsConfigurationsCommand.d.ts new file mode 100644 index 00000000..2166dc75 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListBucketMetricsConfigurationsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + ListBucketMetricsConfigurationsOutput, + ListBucketMetricsConfigurationsRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface ListBucketMetricsConfigurationsCommandInput + extends ListBucketMetricsConfigurationsRequest {} +export interface ListBucketMetricsConfigurationsCommandOutput + extends ListBucketMetricsConfigurationsOutput, + __MetadataBearer {} +declare const ListBucketMetricsConfigurationsCommand_base: { + new ( + input: ListBucketMetricsConfigurationsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListBucketMetricsConfigurationsCommandInput, + ListBucketMetricsConfigurationsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListBucketMetricsConfigurationsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListBucketMetricsConfigurationsCommandInput, + ListBucketMetricsConfigurationsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListBucketMetricsConfigurationsCommand extends ListBucketMetricsConfigurationsCommand_base { + protected static __types: { + api: { + input: ListBucketMetricsConfigurationsRequest; + output: ListBucketMetricsConfigurationsOutput; + }; + sdk: { + input: ListBucketMetricsConfigurationsCommandInput; + output: ListBucketMetricsConfigurationsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListBucketsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListBucketsCommand.d.ts new file mode 100644 index 00000000..ab0696ee --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListBucketsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListBucketsOutput, ListBucketsRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface ListBucketsCommandInput extends ListBucketsRequest {} +export interface ListBucketsCommandOutput + extends ListBucketsOutput, + __MetadataBearer {} +declare const ListBucketsCommand_base: { + new ( + input: ListBucketsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListBucketsCommandInput, + ListBucketsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListBucketsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListBucketsCommandInput, + ListBucketsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListBucketsCommand extends ListBucketsCommand_base { + protected static __types: { + api: { + input: ListBucketsRequest; + output: ListBucketsOutput; + }; + sdk: { + input: ListBucketsCommandInput; + output: ListBucketsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListDirectoryBucketsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListDirectoryBucketsCommand.d.ts new file mode 100644 index 00000000..0b0103c7 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListDirectoryBucketsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + ListDirectoryBucketsOutput, + ListDirectoryBucketsRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface ListDirectoryBucketsCommandInput + extends ListDirectoryBucketsRequest {} +export interface ListDirectoryBucketsCommandOutput + extends ListDirectoryBucketsOutput, + __MetadataBearer {} +declare const ListDirectoryBucketsCommand_base: { + new ( + input: ListDirectoryBucketsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListDirectoryBucketsCommandInput, + ListDirectoryBucketsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListDirectoryBucketsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListDirectoryBucketsCommandInput, + ListDirectoryBucketsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListDirectoryBucketsCommand extends ListDirectoryBucketsCommand_base { + protected static __types: { + api: { + input: ListDirectoryBucketsRequest; + output: ListDirectoryBucketsOutput; + }; + sdk: { + input: ListDirectoryBucketsCommandInput; + output: ListDirectoryBucketsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListMultipartUploadsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListMultipartUploadsCommand.d.ts new file mode 100644 index 00000000..58439e63 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListMultipartUploadsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + ListMultipartUploadsOutput, + ListMultipartUploadsRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface ListMultipartUploadsCommandInput + extends ListMultipartUploadsRequest {} +export interface ListMultipartUploadsCommandOutput + extends ListMultipartUploadsOutput, + __MetadataBearer {} +declare const ListMultipartUploadsCommand_base: { + new ( + input: ListMultipartUploadsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListMultipartUploadsCommandInput, + ListMultipartUploadsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListMultipartUploadsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListMultipartUploadsCommandInput, + ListMultipartUploadsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListMultipartUploadsCommand extends ListMultipartUploadsCommand_base { + protected static __types: { + api: { + input: ListMultipartUploadsRequest; + output: ListMultipartUploadsOutput; + }; + sdk: { + input: ListMultipartUploadsCommandInput; + output: ListMultipartUploadsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListObjectVersionsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListObjectVersionsCommand.d.ts new file mode 100644 index 00000000..fd67b617 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListObjectVersionsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + ListObjectVersionsOutput, + ListObjectVersionsRequest, +} from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface ListObjectVersionsCommandInput + extends ListObjectVersionsRequest {} +export interface ListObjectVersionsCommandOutput + extends ListObjectVersionsOutput, + __MetadataBearer {} +declare const ListObjectVersionsCommand_base: { + new ( + input: ListObjectVersionsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListObjectVersionsCommandInput, + ListObjectVersionsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListObjectVersionsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListObjectVersionsCommandInput, + ListObjectVersionsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListObjectVersionsCommand extends ListObjectVersionsCommand_base { + protected static __types: { + api: { + input: ListObjectVersionsRequest; + output: ListObjectVersionsOutput; + }; + sdk: { + input: ListObjectVersionsCommandInput; + output: ListObjectVersionsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListObjectsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListObjectsCommand.d.ts new file mode 100644 index 00000000..ba186092 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListObjectsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListObjectsOutput, ListObjectsRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface ListObjectsCommandInput extends ListObjectsRequest {} +export interface ListObjectsCommandOutput + extends ListObjectsOutput, + __MetadataBearer {} +declare const ListObjectsCommand_base: { + new ( + input: ListObjectsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListObjectsCommandInput, + ListObjectsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListObjectsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListObjectsCommandInput, + ListObjectsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListObjectsCommand extends ListObjectsCommand_base { + protected static __types: { + api: { + input: ListObjectsRequest; + output: ListObjectsOutput; + }; + sdk: { + input: ListObjectsCommandInput; + output: ListObjectsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListObjectsV2Command.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListObjectsV2Command.d.ts new file mode 100644 index 00000000..02261c9a --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListObjectsV2Command.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListObjectsV2Output, ListObjectsV2Request } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface ListObjectsV2CommandInput extends ListObjectsV2Request {} +export interface ListObjectsV2CommandOutput + extends ListObjectsV2Output, + __MetadataBearer {} +declare const ListObjectsV2Command_base: { + new ( + input: ListObjectsV2CommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListObjectsV2CommandInput, + ListObjectsV2CommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListObjectsV2CommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListObjectsV2CommandInput, + ListObjectsV2CommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListObjectsV2Command extends ListObjectsV2Command_base { + protected static __types: { + api: { + input: ListObjectsV2Request; + output: ListObjectsV2Output; + }; + sdk: { + input: ListObjectsV2CommandInput; + output: ListObjectsV2CommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListPartsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListPartsCommand.d.ts new file mode 100644 index 00000000..ff3cebfc --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/ListPartsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListPartsOutput, ListPartsRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface ListPartsCommandInput extends ListPartsRequest {} +export interface ListPartsCommandOutput + extends ListPartsOutput, + __MetadataBearer {} +declare const ListPartsCommand_base: { + new ( + input: ListPartsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListPartsCommandInput, + ListPartsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListPartsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListPartsCommandInput, + ListPartsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListPartsCommand extends ListPartsCommand_base { + protected static __types: { + api: { + input: ListPartsRequest; + output: ListPartsOutput; + }; + sdk: { + input: ListPartsCommandInput; + output: ListPartsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketAccelerateConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketAccelerateConfigurationCommand.d.ts new file mode 100644 index 00000000..d2c09a4b --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketAccelerateConfigurationCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketAccelerateConfigurationRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutBucketAccelerateConfigurationCommandInput + extends PutBucketAccelerateConfigurationRequest {} +export interface PutBucketAccelerateConfigurationCommandOutput + extends __MetadataBearer {} +declare const PutBucketAccelerateConfigurationCommand_base: { + new ( + input: PutBucketAccelerateConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketAccelerateConfigurationCommandInput, + PutBucketAccelerateConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutBucketAccelerateConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketAccelerateConfigurationCommandInput, + PutBucketAccelerateConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutBucketAccelerateConfigurationCommand extends PutBucketAccelerateConfigurationCommand_base { + protected static __types: { + api: { + input: PutBucketAccelerateConfigurationRequest; + output: {}; + }; + sdk: { + input: PutBucketAccelerateConfigurationCommandInput; + output: PutBucketAccelerateConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketAclCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketAclCommand.d.ts new file mode 100644 index 00000000..c5f9c3d8 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketAclCommand.d.ts @@ -0,0 +1,45 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketAclRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutBucketAclCommandInput extends PutBucketAclRequest {} +export interface PutBucketAclCommandOutput extends __MetadataBearer {} +declare const PutBucketAclCommand_base: { + new ( + input: PutBucketAclCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketAclCommandInput, + PutBucketAclCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutBucketAclCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketAclCommandInput, + PutBucketAclCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutBucketAclCommand extends PutBucketAclCommand_base { + protected static __types: { + api: { + input: PutBucketAclRequest; + output: {}; + }; + sdk: { + input: PutBucketAclCommandInput; + output: PutBucketAclCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketAnalyticsConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketAnalyticsConfigurationCommand.d.ts new file mode 100644 index 00000000..96a07ba2 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketAnalyticsConfigurationCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketAnalyticsConfigurationRequest } from "../models/models_0"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutBucketAnalyticsConfigurationCommandInput + extends PutBucketAnalyticsConfigurationRequest {} +export interface PutBucketAnalyticsConfigurationCommandOutput + extends __MetadataBearer {} +declare const PutBucketAnalyticsConfigurationCommand_base: { + new ( + input: PutBucketAnalyticsConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketAnalyticsConfigurationCommandInput, + PutBucketAnalyticsConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutBucketAnalyticsConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketAnalyticsConfigurationCommandInput, + PutBucketAnalyticsConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutBucketAnalyticsConfigurationCommand extends PutBucketAnalyticsConfigurationCommand_base { + protected static __types: { + api: { + input: PutBucketAnalyticsConfigurationRequest; + output: {}; + }; + sdk: { + input: PutBucketAnalyticsConfigurationCommandInput; + output: PutBucketAnalyticsConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketCorsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketCorsCommand.d.ts new file mode 100644 index 00000000..011695f5 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketCorsCommand.d.ts @@ -0,0 +1,45 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketCorsRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutBucketCorsCommandInput extends PutBucketCorsRequest {} +export interface PutBucketCorsCommandOutput extends __MetadataBearer {} +declare const PutBucketCorsCommand_base: { + new ( + input: PutBucketCorsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketCorsCommandInput, + PutBucketCorsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutBucketCorsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketCorsCommandInput, + PutBucketCorsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutBucketCorsCommand extends PutBucketCorsCommand_base { + protected static __types: { + api: { + input: PutBucketCorsRequest; + output: {}; + }; + sdk: { + input: PutBucketCorsCommandInput; + output: PutBucketCorsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketEncryptionCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketEncryptionCommand.d.ts new file mode 100644 index 00000000..afd8e976 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketEncryptionCommand.d.ts @@ -0,0 +1,46 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketEncryptionRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutBucketEncryptionCommandInput + extends PutBucketEncryptionRequest {} +export interface PutBucketEncryptionCommandOutput extends __MetadataBearer {} +declare const PutBucketEncryptionCommand_base: { + new ( + input: PutBucketEncryptionCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketEncryptionCommandInput, + PutBucketEncryptionCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutBucketEncryptionCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketEncryptionCommandInput, + PutBucketEncryptionCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutBucketEncryptionCommand extends PutBucketEncryptionCommand_base { + protected static __types: { + api: { + input: PutBucketEncryptionRequest; + output: {}; + }; + sdk: { + input: PutBucketEncryptionCommandInput; + output: PutBucketEncryptionCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketIntelligentTieringConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketIntelligentTieringConfigurationCommand.d.ts new file mode 100644 index 00000000..01b185bb --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketIntelligentTieringConfigurationCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketIntelligentTieringConfigurationRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutBucketIntelligentTieringConfigurationCommandInput + extends PutBucketIntelligentTieringConfigurationRequest {} +export interface PutBucketIntelligentTieringConfigurationCommandOutput + extends __MetadataBearer {} +declare const PutBucketIntelligentTieringConfigurationCommand_base: { + new ( + input: PutBucketIntelligentTieringConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketIntelligentTieringConfigurationCommandInput, + PutBucketIntelligentTieringConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutBucketIntelligentTieringConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketIntelligentTieringConfigurationCommandInput, + PutBucketIntelligentTieringConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutBucketIntelligentTieringConfigurationCommand extends PutBucketIntelligentTieringConfigurationCommand_base { + protected static __types: { + api: { + input: PutBucketIntelligentTieringConfigurationRequest; + output: {}; + }; + sdk: { + input: PutBucketIntelligentTieringConfigurationCommandInput; + output: PutBucketIntelligentTieringConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketInventoryConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketInventoryConfigurationCommand.d.ts new file mode 100644 index 00000000..776b81f7 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketInventoryConfigurationCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketInventoryConfigurationRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutBucketInventoryConfigurationCommandInput + extends PutBucketInventoryConfigurationRequest {} +export interface PutBucketInventoryConfigurationCommandOutput + extends __MetadataBearer {} +declare const PutBucketInventoryConfigurationCommand_base: { + new ( + input: PutBucketInventoryConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketInventoryConfigurationCommandInput, + PutBucketInventoryConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutBucketInventoryConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketInventoryConfigurationCommandInput, + PutBucketInventoryConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutBucketInventoryConfigurationCommand extends PutBucketInventoryConfigurationCommand_base { + protected static __types: { + api: { + input: PutBucketInventoryConfigurationRequest; + output: {}; + }; + sdk: { + input: PutBucketInventoryConfigurationCommandInput; + output: PutBucketInventoryConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketLifecycleConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketLifecycleConfigurationCommand.d.ts new file mode 100644 index 00000000..f3a569b1 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketLifecycleConfigurationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + PutBucketLifecycleConfigurationOutput, + PutBucketLifecycleConfigurationRequest, +} from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutBucketLifecycleConfigurationCommandInput + extends PutBucketLifecycleConfigurationRequest {} +export interface PutBucketLifecycleConfigurationCommandOutput + extends PutBucketLifecycleConfigurationOutput, + __MetadataBearer {} +declare const PutBucketLifecycleConfigurationCommand_base: { + new ( + input: PutBucketLifecycleConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketLifecycleConfigurationCommandInput, + PutBucketLifecycleConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutBucketLifecycleConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketLifecycleConfigurationCommandInput, + PutBucketLifecycleConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutBucketLifecycleConfigurationCommand extends PutBucketLifecycleConfigurationCommand_base { + protected static __types: { + api: { + input: PutBucketLifecycleConfigurationRequest; + output: PutBucketLifecycleConfigurationOutput; + }; + sdk: { + input: PutBucketLifecycleConfigurationCommandInput; + output: PutBucketLifecycleConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketLoggingCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketLoggingCommand.d.ts new file mode 100644 index 00000000..595a33f8 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketLoggingCommand.d.ts @@ -0,0 +1,45 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketLoggingRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutBucketLoggingCommandInput extends PutBucketLoggingRequest {} +export interface PutBucketLoggingCommandOutput extends __MetadataBearer {} +declare const PutBucketLoggingCommand_base: { + new ( + input: PutBucketLoggingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketLoggingCommandInput, + PutBucketLoggingCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutBucketLoggingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketLoggingCommandInput, + PutBucketLoggingCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutBucketLoggingCommand extends PutBucketLoggingCommand_base { + protected static __types: { + api: { + input: PutBucketLoggingRequest; + output: {}; + }; + sdk: { + input: PutBucketLoggingCommandInput; + output: PutBucketLoggingCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketMetricsConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketMetricsConfigurationCommand.d.ts new file mode 100644 index 00000000..212aab81 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketMetricsConfigurationCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketMetricsConfigurationRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutBucketMetricsConfigurationCommandInput + extends PutBucketMetricsConfigurationRequest {} +export interface PutBucketMetricsConfigurationCommandOutput + extends __MetadataBearer {} +declare const PutBucketMetricsConfigurationCommand_base: { + new ( + input: PutBucketMetricsConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketMetricsConfigurationCommandInput, + PutBucketMetricsConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutBucketMetricsConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketMetricsConfigurationCommandInput, + PutBucketMetricsConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutBucketMetricsConfigurationCommand extends PutBucketMetricsConfigurationCommand_base { + protected static __types: { + api: { + input: PutBucketMetricsConfigurationRequest; + output: {}; + }; + sdk: { + input: PutBucketMetricsConfigurationCommandInput; + output: PutBucketMetricsConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketNotificationConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketNotificationConfigurationCommand.d.ts new file mode 100644 index 00000000..019674ee --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketNotificationConfigurationCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketNotificationConfigurationRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutBucketNotificationConfigurationCommandInput + extends PutBucketNotificationConfigurationRequest {} +export interface PutBucketNotificationConfigurationCommandOutput + extends __MetadataBearer {} +declare const PutBucketNotificationConfigurationCommand_base: { + new ( + input: PutBucketNotificationConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketNotificationConfigurationCommandInput, + PutBucketNotificationConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutBucketNotificationConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketNotificationConfigurationCommandInput, + PutBucketNotificationConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutBucketNotificationConfigurationCommand extends PutBucketNotificationConfigurationCommand_base { + protected static __types: { + api: { + input: PutBucketNotificationConfigurationRequest; + output: {}; + }; + sdk: { + input: PutBucketNotificationConfigurationCommandInput; + output: PutBucketNotificationConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketOwnershipControlsCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketOwnershipControlsCommand.d.ts new file mode 100644 index 00000000..7c2fab2d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketOwnershipControlsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketOwnershipControlsRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutBucketOwnershipControlsCommandInput + extends PutBucketOwnershipControlsRequest {} +export interface PutBucketOwnershipControlsCommandOutput + extends __MetadataBearer {} +declare const PutBucketOwnershipControlsCommand_base: { + new ( + input: PutBucketOwnershipControlsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketOwnershipControlsCommandInput, + PutBucketOwnershipControlsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutBucketOwnershipControlsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketOwnershipControlsCommandInput, + PutBucketOwnershipControlsCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutBucketOwnershipControlsCommand extends PutBucketOwnershipControlsCommand_base { + protected static __types: { + api: { + input: PutBucketOwnershipControlsRequest; + output: {}; + }; + sdk: { + input: PutBucketOwnershipControlsCommandInput; + output: PutBucketOwnershipControlsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketPolicyCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketPolicyCommand.d.ts new file mode 100644 index 00000000..0377307d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketPolicyCommand.d.ts @@ -0,0 +1,45 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketPolicyRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutBucketPolicyCommandInput extends PutBucketPolicyRequest {} +export interface PutBucketPolicyCommandOutput extends __MetadataBearer {} +declare const PutBucketPolicyCommand_base: { + new ( + input: PutBucketPolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketPolicyCommandInput, + PutBucketPolicyCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutBucketPolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketPolicyCommandInput, + PutBucketPolicyCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutBucketPolicyCommand extends PutBucketPolicyCommand_base { + protected static __types: { + api: { + input: PutBucketPolicyRequest; + output: {}; + }; + sdk: { + input: PutBucketPolicyCommandInput; + output: PutBucketPolicyCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketReplicationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketReplicationCommand.d.ts new file mode 100644 index 00000000..45c1c3b8 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketReplicationCommand.d.ts @@ -0,0 +1,46 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketReplicationRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutBucketReplicationCommandInput + extends PutBucketReplicationRequest {} +export interface PutBucketReplicationCommandOutput extends __MetadataBearer {} +declare const PutBucketReplicationCommand_base: { + new ( + input: PutBucketReplicationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketReplicationCommandInput, + PutBucketReplicationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutBucketReplicationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketReplicationCommandInput, + PutBucketReplicationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutBucketReplicationCommand extends PutBucketReplicationCommand_base { + protected static __types: { + api: { + input: PutBucketReplicationRequest; + output: {}; + }; + sdk: { + input: PutBucketReplicationCommandInput; + output: PutBucketReplicationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketRequestPaymentCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketRequestPaymentCommand.d.ts new file mode 100644 index 00000000..2a6c73f8 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketRequestPaymentCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketRequestPaymentRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutBucketRequestPaymentCommandInput + extends PutBucketRequestPaymentRequest {} +export interface PutBucketRequestPaymentCommandOutput + extends __MetadataBearer {} +declare const PutBucketRequestPaymentCommand_base: { + new ( + input: PutBucketRequestPaymentCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketRequestPaymentCommandInput, + PutBucketRequestPaymentCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutBucketRequestPaymentCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketRequestPaymentCommandInput, + PutBucketRequestPaymentCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutBucketRequestPaymentCommand extends PutBucketRequestPaymentCommand_base { + protected static __types: { + api: { + input: PutBucketRequestPaymentRequest; + output: {}; + }; + sdk: { + input: PutBucketRequestPaymentCommandInput; + output: PutBucketRequestPaymentCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketTaggingCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketTaggingCommand.d.ts new file mode 100644 index 00000000..427e3020 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketTaggingCommand.d.ts @@ -0,0 +1,45 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketTaggingRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutBucketTaggingCommandInput extends PutBucketTaggingRequest {} +export interface PutBucketTaggingCommandOutput extends __MetadataBearer {} +declare const PutBucketTaggingCommand_base: { + new ( + input: PutBucketTaggingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketTaggingCommandInput, + PutBucketTaggingCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutBucketTaggingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketTaggingCommandInput, + PutBucketTaggingCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutBucketTaggingCommand extends PutBucketTaggingCommand_base { + protected static __types: { + api: { + input: PutBucketTaggingRequest; + output: {}; + }; + sdk: { + input: PutBucketTaggingCommandInput; + output: PutBucketTaggingCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketVersioningCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketVersioningCommand.d.ts new file mode 100644 index 00000000..1e990821 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketVersioningCommand.d.ts @@ -0,0 +1,46 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketVersioningRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutBucketVersioningCommandInput + extends PutBucketVersioningRequest {} +export interface PutBucketVersioningCommandOutput extends __MetadataBearer {} +declare const PutBucketVersioningCommand_base: { + new ( + input: PutBucketVersioningCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketVersioningCommandInput, + PutBucketVersioningCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutBucketVersioningCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketVersioningCommandInput, + PutBucketVersioningCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutBucketVersioningCommand extends PutBucketVersioningCommand_base { + protected static __types: { + api: { + input: PutBucketVersioningRequest; + output: {}; + }; + sdk: { + input: PutBucketVersioningCommandInput; + output: PutBucketVersioningCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketWebsiteCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketWebsiteCommand.d.ts new file mode 100644 index 00000000..12b277ea --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutBucketWebsiteCommand.d.ts @@ -0,0 +1,45 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutBucketWebsiteRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutBucketWebsiteCommandInput extends PutBucketWebsiteRequest {} +export interface PutBucketWebsiteCommandOutput extends __MetadataBearer {} +declare const PutBucketWebsiteCommand_base: { + new ( + input: PutBucketWebsiteCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketWebsiteCommandInput, + PutBucketWebsiteCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutBucketWebsiteCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutBucketWebsiteCommandInput, + PutBucketWebsiteCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutBucketWebsiteCommand extends PutBucketWebsiteCommand_base { + protected static __types: { + api: { + input: PutBucketWebsiteRequest; + output: {}; + }; + sdk: { + input: PutBucketWebsiteCommandInput; + output: PutBucketWebsiteCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutObjectAclCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutObjectAclCommand.d.ts new file mode 100644 index 00000000..d13c5f98 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutObjectAclCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutObjectAclOutput, PutObjectAclRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutObjectAclCommandInput extends PutObjectAclRequest {} +export interface PutObjectAclCommandOutput + extends PutObjectAclOutput, + __MetadataBearer {} +declare const PutObjectAclCommand_base: { + new ( + input: PutObjectAclCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutObjectAclCommandInput, + PutObjectAclCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutObjectAclCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutObjectAclCommandInput, + PutObjectAclCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutObjectAclCommand extends PutObjectAclCommand_base { + protected static __types: { + api: { + input: PutObjectAclRequest; + output: PutObjectAclOutput; + }; + sdk: { + input: PutObjectAclCommandInput; + output: PutObjectAclCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutObjectCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutObjectCommand.d.ts new file mode 100644 index 00000000..11a568fe --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutObjectCommand.d.ts @@ -0,0 +1,53 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { + MetadataBearer as __MetadataBearer, + StreamingBlobPayloadInputTypes, +} from "@smithy/types"; +import { PutObjectOutput, PutObjectRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutObjectCommandInput + extends Pick> { + Body?: StreamingBlobPayloadInputTypes; +} +export interface PutObjectCommandOutput + extends PutObjectOutput, + __MetadataBearer {} +declare const PutObjectCommand_base: { + new ( + input: PutObjectCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutObjectCommandInput, + PutObjectCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutObjectCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutObjectCommandInput, + PutObjectCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutObjectCommand extends PutObjectCommand_base { + protected static __types: { + api: { + input: PutObjectRequest; + output: PutObjectOutput; + }; + sdk: { + input: PutObjectCommandInput; + output: PutObjectCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutObjectLegalHoldCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutObjectLegalHoldCommand.d.ts new file mode 100644 index 00000000..e38efc94 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutObjectLegalHoldCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + PutObjectLegalHoldOutput, + PutObjectLegalHoldRequest, +} from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutObjectLegalHoldCommandInput + extends PutObjectLegalHoldRequest {} +export interface PutObjectLegalHoldCommandOutput + extends PutObjectLegalHoldOutput, + __MetadataBearer {} +declare const PutObjectLegalHoldCommand_base: { + new ( + input: PutObjectLegalHoldCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutObjectLegalHoldCommandInput, + PutObjectLegalHoldCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutObjectLegalHoldCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutObjectLegalHoldCommandInput, + PutObjectLegalHoldCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutObjectLegalHoldCommand extends PutObjectLegalHoldCommand_base { + protected static __types: { + api: { + input: PutObjectLegalHoldRequest; + output: PutObjectLegalHoldOutput; + }; + sdk: { + input: PutObjectLegalHoldCommandInput; + output: PutObjectLegalHoldCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutObjectLockConfigurationCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutObjectLockConfigurationCommand.d.ts new file mode 100644 index 00000000..97183b4e --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutObjectLockConfigurationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + PutObjectLockConfigurationOutput, + PutObjectLockConfigurationRequest, +} from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutObjectLockConfigurationCommandInput + extends PutObjectLockConfigurationRequest {} +export interface PutObjectLockConfigurationCommandOutput + extends PutObjectLockConfigurationOutput, + __MetadataBearer {} +declare const PutObjectLockConfigurationCommand_base: { + new ( + input: PutObjectLockConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutObjectLockConfigurationCommandInput, + PutObjectLockConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutObjectLockConfigurationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutObjectLockConfigurationCommandInput, + PutObjectLockConfigurationCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutObjectLockConfigurationCommand extends PutObjectLockConfigurationCommand_base { + protected static __types: { + api: { + input: PutObjectLockConfigurationRequest; + output: PutObjectLockConfigurationOutput; + }; + sdk: { + input: PutObjectLockConfigurationCommandInput; + output: PutObjectLockConfigurationCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutObjectRetentionCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutObjectRetentionCommand.d.ts new file mode 100644 index 00000000..eed2b2f7 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutObjectRetentionCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + PutObjectRetentionOutput, + PutObjectRetentionRequest, +} from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutObjectRetentionCommandInput + extends PutObjectRetentionRequest {} +export interface PutObjectRetentionCommandOutput + extends PutObjectRetentionOutput, + __MetadataBearer {} +declare const PutObjectRetentionCommand_base: { + new ( + input: PutObjectRetentionCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutObjectRetentionCommandInput, + PutObjectRetentionCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutObjectRetentionCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutObjectRetentionCommandInput, + PutObjectRetentionCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutObjectRetentionCommand extends PutObjectRetentionCommand_base { + protected static __types: { + api: { + input: PutObjectRetentionRequest; + output: PutObjectRetentionOutput; + }; + sdk: { + input: PutObjectRetentionCommandInput; + output: PutObjectRetentionCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutObjectTaggingCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutObjectTaggingCommand.d.ts new file mode 100644 index 00000000..b058eeef --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutObjectTaggingCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + PutObjectTaggingOutput, + PutObjectTaggingRequest, +} from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutObjectTaggingCommandInput extends PutObjectTaggingRequest {} +export interface PutObjectTaggingCommandOutput + extends PutObjectTaggingOutput, + __MetadataBearer {} +declare const PutObjectTaggingCommand_base: { + new ( + input: PutObjectTaggingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutObjectTaggingCommandInput, + PutObjectTaggingCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutObjectTaggingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutObjectTaggingCommandInput, + PutObjectTaggingCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutObjectTaggingCommand extends PutObjectTaggingCommand_base { + protected static __types: { + api: { + input: PutObjectTaggingRequest; + output: PutObjectTaggingOutput; + }; + sdk: { + input: PutObjectTaggingCommandInput; + output: PutObjectTaggingCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutPublicAccessBlockCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutPublicAccessBlockCommand.d.ts new file mode 100644 index 00000000..47659a8d --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/PutPublicAccessBlockCommand.d.ts @@ -0,0 +1,46 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { PutPublicAccessBlockRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface PutPublicAccessBlockCommandInput + extends PutPublicAccessBlockRequest {} +export interface PutPublicAccessBlockCommandOutput extends __MetadataBearer {} +declare const PutPublicAccessBlockCommand_base: { + new ( + input: PutPublicAccessBlockCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutPublicAccessBlockCommandInput, + PutPublicAccessBlockCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutPublicAccessBlockCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutPublicAccessBlockCommandInput, + PutPublicAccessBlockCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutPublicAccessBlockCommand extends PutPublicAccessBlockCommand_base { + protected static __types: { + api: { + input: PutPublicAccessBlockRequest; + output: {}; + }; + sdk: { + input: PutPublicAccessBlockCommandInput; + output: PutPublicAccessBlockCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/RestoreObjectCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/RestoreObjectCommand.d.ts new file mode 100644 index 00000000..6a5274bd --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/RestoreObjectCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { RestoreObjectOutput, RestoreObjectRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface RestoreObjectCommandInput extends RestoreObjectRequest {} +export interface RestoreObjectCommandOutput + extends RestoreObjectOutput, + __MetadataBearer {} +declare const RestoreObjectCommand_base: { + new ( + input: RestoreObjectCommandInput + ): import("@smithy/smithy-client").CommandImpl< + RestoreObjectCommandInput, + RestoreObjectCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: RestoreObjectCommandInput + ): import("@smithy/smithy-client").CommandImpl< + RestoreObjectCommandInput, + RestoreObjectCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class RestoreObjectCommand extends RestoreObjectCommand_base { + protected static __types: { + api: { + input: RestoreObjectRequest; + output: RestoreObjectOutput; + }; + sdk: { + input: RestoreObjectCommandInput; + output: RestoreObjectCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/SelectObjectContentCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/SelectObjectContentCommand.d.ts new file mode 100644 index 00000000..5ca56e12 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/SelectObjectContentCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + SelectObjectContentOutput, + SelectObjectContentRequest, +} from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface SelectObjectContentCommandInput + extends SelectObjectContentRequest {} +export interface SelectObjectContentCommandOutput + extends SelectObjectContentOutput, + __MetadataBearer {} +declare const SelectObjectContentCommand_base: { + new ( + input: SelectObjectContentCommandInput + ): import("@smithy/smithy-client").CommandImpl< + SelectObjectContentCommandInput, + SelectObjectContentCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: SelectObjectContentCommandInput + ): import("@smithy/smithy-client").CommandImpl< + SelectObjectContentCommandInput, + SelectObjectContentCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class SelectObjectContentCommand extends SelectObjectContentCommand_base { + protected static __types: { + api: { + input: SelectObjectContentRequest; + output: SelectObjectContentOutput; + }; + sdk: { + input: SelectObjectContentCommandInput; + output: SelectObjectContentCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/UploadPartCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/UploadPartCommand.d.ts new file mode 100644 index 00000000..51f8ac52 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/UploadPartCommand.d.ts @@ -0,0 +1,53 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { + MetadataBearer as __MetadataBearer, + StreamingBlobPayloadInputTypes, +} from "@smithy/types"; +import { UploadPartOutput, UploadPartRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface UploadPartCommandInput + extends Pick> { + Body?: StreamingBlobPayloadInputTypes; +} +export interface UploadPartCommandOutput + extends UploadPartOutput, + __MetadataBearer {} +declare const UploadPartCommand_base: { + new ( + input: UploadPartCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UploadPartCommandInput, + UploadPartCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UploadPartCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UploadPartCommandInput, + UploadPartCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UploadPartCommand extends UploadPartCommand_base { + protected static __types: { + api: { + input: UploadPartRequest; + output: UploadPartOutput; + }; + sdk: { + input: UploadPartCommandInput; + output: UploadPartCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/UploadPartCopyCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/UploadPartCopyCommand.d.ts new file mode 100644 index 00000000..f82870d3 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/UploadPartCopyCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + UploadPartCopyOutput, + UploadPartCopyRequest, +} from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface UploadPartCopyCommandInput extends UploadPartCopyRequest {} +export interface UploadPartCopyCommandOutput + extends UploadPartCopyOutput, + __MetadataBearer {} +declare const UploadPartCopyCommand_base: { + new ( + input: UploadPartCopyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UploadPartCopyCommandInput, + UploadPartCopyCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UploadPartCopyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UploadPartCopyCommandInput, + UploadPartCopyCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UploadPartCopyCommand extends UploadPartCopyCommand_base { + protected static __types: { + api: { + input: UploadPartCopyRequest; + output: UploadPartCopyOutput; + }; + sdk: { + input: UploadPartCopyCommandInput; + output: UploadPartCopyCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/WriteGetObjectResponseCommand.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/WriteGetObjectResponseCommand.d.ts new file mode 100644 index 00000000..e9099a08 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/WriteGetObjectResponseCommand.d.ts @@ -0,0 +1,54 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { + MetadataBearer as __MetadataBearer, + StreamingBlobPayloadInputTypes, +} from "@smithy/types"; +import { WriteGetObjectResponseRequest } from "../models/models_1"; +import { + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../S3Client"; +export { __MetadataBearer }; +export { $Command }; +export interface WriteGetObjectResponseCommandInput + extends Pick< + WriteGetObjectResponseRequest, + Exclude + > { + Body?: StreamingBlobPayloadInputTypes; +} +export interface WriteGetObjectResponseCommandOutput extends __MetadataBearer {} +declare const WriteGetObjectResponseCommand_base: { + new ( + input: WriteGetObjectResponseCommandInput + ): import("@smithy/smithy-client").CommandImpl< + WriteGetObjectResponseCommandInput, + WriteGetObjectResponseCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: WriteGetObjectResponseCommandInput + ): import("@smithy/smithy-client").CommandImpl< + WriteGetObjectResponseCommandInput, + WriteGetObjectResponseCommandOutput, + S3ClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class WriteGetObjectResponseCommand extends WriteGetObjectResponseCommand_base { + protected static __types: { + api: { + input: WriteGetObjectResponseRequest; + output: {}; + }; + sdk: { + input: WriteGetObjectResponseCommandInput; + output: WriteGetObjectResponseCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/index.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/index.d.ts new file mode 100644 index 00000000..a25a95d9 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/commands/index.d.ts @@ -0,0 +1,98 @@ +export * from "./AbortMultipartUploadCommand"; +export * from "./CompleteMultipartUploadCommand"; +export * from "./CopyObjectCommand"; +export * from "./CreateBucketCommand"; +export * from "./CreateBucketMetadataTableConfigurationCommand"; +export * from "./CreateMultipartUploadCommand"; +export * from "./CreateSessionCommand"; +export * from "./DeleteBucketAnalyticsConfigurationCommand"; +export * from "./DeleteBucketCommand"; +export * from "./DeleteBucketCorsCommand"; +export * from "./DeleteBucketEncryptionCommand"; +export * from "./DeleteBucketIntelligentTieringConfigurationCommand"; +export * from "./DeleteBucketInventoryConfigurationCommand"; +export * from "./DeleteBucketLifecycleCommand"; +export * from "./DeleteBucketMetadataTableConfigurationCommand"; +export * from "./DeleteBucketMetricsConfigurationCommand"; +export * from "./DeleteBucketOwnershipControlsCommand"; +export * from "./DeleteBucketPolicyCommand"; +export * from "./DeleteBucketReplicationCommand"; +export * from "./DeleteBucketTaggingCommand"; +export * from "./DeleteBucketWebsiteCommand"; +export * from "./DeleteObjectCommand"; +export * from "./DeleteObjectTaggingCommand"; +export * from "./DeleteObjectsCommand"; +export * from "./DeletePublicAccessBlockCommand"; +export * from "./GetBucketAccelerateConfigurationCommand"; +export * from "./GetBucketAclCommand"; +export * from "./GetBucketAnalyticsConfigurationCommand"; +export * from "./GetBucketCorsCommand"; +export * from "./GetBucketEncryptionCommand"; +export * from "./GetBucketIntelligentTieringConfigurationCommand"; +export * from "./GetBucketInventoryConfigurationCommand"; +export * from "./GetBucketLifecycleConfigurationCommand"; +export * from "./GetBucketLocationCommand"; +export * from "./GetBucketLoggingCommand"; +export * from "./GetBucketMetadataTableConfigurationCommand"; +export * from "./GetBucketMetricsConfigurationCommand"; +export * from "./GetBucketNotificationConfigurationCommand"; +export * from "./GetBucketOwnershipControlsCommand"; +export * from "./GetBucketPolicyCommand"; +export * from "./GetBucketPolicyStatusCommand"; +export * from "./GetBucketReplicationCommand"; +export * from "./GetBucketRequestPaymentCommand"; +export * from "./GetBucketTaggingCommand"; +export * from "./GetBucketVersioningCommand"; +export * from "./GetBucketWebsiteCommand"; +export * from "./GetObjectAclCommand"; +export * from "./GetObjectAttributesCommand"; +export * from "./GetObjectCommand"; +export * from "./GetObjectLegalHoldCommand"; +export * from "./GetObjectLockConfigurationCommand"; +export * from "./GetObjectRetentionCommand"; +export * from "./GetObjectTaggingCommand"; +export * from "./GetObjectTorrentCommand"; +export * from "./GetPublicAccessBlockCommand"; +export * from "./HeadBucketCommand"; +export * from "./HeadObjectCommand"; +export * from "./ListBucketAnalyticsConfigurationsCommand"; +export * from "./ListBucketIntelligentTieringConfigurationsCommand"; +export * from "./ListBucketInventoryConfigurationsCommand"; +export * from "./ListBucketMetricsConfigurationsCommand"; +export * from "./ListBucketsCommand"; +export * from "./ListDirectoryBucketsCommand"; +export * from "./ListMultipartUploadsCommand"; +export * from "./ListObjectVersionsCommand"; +export * from "./ListObjectsCommand"; +export * from "./ListObjectsV2Command"; +export * from "./ListPartsCommand"; +export * from "./PutBucketAccelerateConfigurationCommand"; +export * from "./PutBucketAclCommand"; +export * from "./PutBucketAnalyticsConfigurationCommand"; +export * from "./PutBucketCorsCommand"; +export * from "./PutBucketEncryptionCommand"; +export * from "./PutBucketIntelligentTieringConfigurationCommand"; +export * from "./PutBucketInventoryConfigurationCommand"; +export * from "./PutBucketLifecycleConfigurationCommand"; +export * from "./PutBucketLoggingCommand"; +export * from "./PutBucketMetricsConfigurationCommand"; +export * from "./PutBucketNotificationConfigurationCommand"; +export * from "./PutBucketOwnershipControlsCommand"; +export * from "./PutBucketPolicyCommand"; +export * from "./PutBucketReplicationCommand"; +export * from "./PutBucketRequestPaymentCommand"; +export * from "./PutBucketTaggingCommand"; +export * from "./PutBucketVersioningCommand"; +export * from "./PutBucketWebsiteCommand"; +export * from "./PutObjectAclCommand"; +export * from "./PutObjectCommand"; +export * from "./PutObjectLegalHoldCommand"; +export * from "./PutObjectLockConfigurationCommand"; +export * from "./PutObjectRetentionCommand"; +export * from "./PutObjectTaggingCommand"; +export * from "./PutPublicAccessBlockCommand"; +export * from "./RestoreObjectCommand"; +export * from "./SelectObjectContentCommand"; +export * from "./UploadPartCommand"; +export * from "./UploadPartCopyCommand"; +export * from "./WriteGetObjectResponseCommand"; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/endpoint/EndpointParameters.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/endpoint/EndpointParameters.d.ts new file mode 100644 index 00000000..cefd9c7f --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/endpoint/EndpointParameters.d.ts @@ -0,0 +1,94 @@ +import { + Endpoint, + EndpointParameters as __EndpointParameters, + EndpointV2, + Provider, +} from "@smithy/types"; +export interface ClientInputEndpointParameters { + region?: string | Provider; + useFipsEndpoint?: boolean | Provider; + useDualstackEndpoint?: boolean | Provider; + endpoint?: + | string + | Provider + | Endpoint + | Provider + | EndpointV2 + | Provider; + forcePathStyle?: boolean | Provider; + useAccelerateEndpoint?: boolean | Provider; + useGlobalEndpoint?: boolean | Provider; + disableMultiregionAccessPoints?: boolean | Provider; + useArnRegion?: boolean | Provider; + disableS3ExpressSessionAuth?: boolean | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: ( + options: T & ClientInputEndpointParameters +) => T & + ClientInputEndpointParameters & { + defaultSigningName: string; + }; +export declare const commonParams: { + readonly ForcePathStyle: { + readonly type: "clientContextParams"; + readonly name: "forcePathStyle"; + }; + readonly UseArnRegion: { + readonly type: "clientContextParams"; + readonly name: "useArnRegion"; + }; + readonly DisableMultiRegionAccessPoints: { + readonly type: "clientContextParams"; + readonly name: "disableMultiregionAccessPoints"; + }; + readonly Accelerate: { + readonly type: "clientContextParams"; + readonly name: "useAccelerateEndpoint"; + }; + readonly DisableS3ExpressSessionAuth: { + readonly type: "clientContextParams"; + readonly name: "disableS3ExpressSessionAuth"; + }; + readonly UseGlobalEndpoint: { + readonly type: "builtInParams"; + readonly name: "useGlobalEndpoint"; + }; + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Bucket?: string; + Region?: string; + UseFIPS?: boolean; + UseDualStack?: boolean; + Endpoint?: string; + ForcePathStyle?: boolean; + Accelerate?: boolean; + UseGlobalEndpoint?: boolean; + UseObjectLambdaEndpoint?: boolean; + Key?: string; + Prefix?: string; + CopySource?: string; + DisableAccessPoints?: boolean; + DisableMultiRegionAccessPoints?: boolean; + UseArnRegion?: boolean; + UseS3ExpressControlEndpoint?: boolean; + DisableS3ExpressSessionAuth?: boolean; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/endpoint/endpointResolver.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/endpoint/endpointResolver.d.ts new file mode 100644 index 00000000..59099254 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/endpoint/endpointResolver.d.ts @@ -0,0 +1,8 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: ( + endpointParams: EndpointParameters, + context?: { + logger?: Logger; + } +) => EndpointV2; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/endpoint/ruleset.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/endpoint/ruleset.d.ts new file mode 100644 index 00000000..4b238994 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/extensionConfiguration.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/extensionConfiguration.d.ts new file mode 100644 index 00000000..b559effd --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +export interface S3ExtensionConfiguration + extends HttpHandlerExtensionConfiguration, + DefaultExtensionConfiguration, + AwsRegionExtensionConfiguration, + HttpAuthExtensionConfiguration {} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..86b01703 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/index.d.ts @@ -0,0 +1,10 @@ +export * from "./S3Client"; +export * from "./S3"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export { RuntimeExtension } from "./runtimeExtensions"; +export { S3ExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./pagination"; +export * from "./waiters"; +export * from "./models"; +export { S3ServiceException } from "./models/S3ServiceException"; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/models/S3ServiceException.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/models/S3ServiceException.d.ts new file mode 100644 index 00000000..04a0d0a1 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/models/S3ServiceException.d.ts @@ -0,0 +1,9 @@ +import { + ServiceException as __ServiceException, + ServiceExceptionOptions as __ServiceExceptionOptions, +} from "@smithy/smithy-client"; +export { __ServiceExceptionOptions }; +export { __ServiceException }; +export declare class S3ServiceException extends __ServiceException { + constructor(options: __ServiceExceptionOptions); +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/models/index.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/models/index.d.ts new file mode 100644 index 00000000..ae1cfffa --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/models/index.d.ts @@ -0,0 +1,2 @@ +export * from "./models_0"; +export * from "./models_1"; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/models/models_0.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/models/models_0.d.ts new file mode 100644 index 00000000..84e9350a --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/models/models_0.d.ts @@ -0,0 +1,2105 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { StreamingBlobTypes } from "@smithy/types"; +import { S3ServiceException as __BaseException } from "./S3ServiceException"; +export interface AbortIncompleteMultipartUpload { + DaysAfterInitiation?: number | undefined; +} +export declare const RequestCharged: { + readonly requester: "requester"; +}; +export type RequestCharged = + (typeof RequestCharged)[keyof typeof RequestCharged]; +export interface AbortMultipartUploadOutput { + RequestCharged?: RequestCharged | undefined; +} +export declare const RequestPayer: { + readonly requester: "requester"; +}; +export type RequestPayer = (typeof RequestPayer)[keyof typeof RequestPayer]; +export interface AbortMultipartUploadRequest { + Bucket: string | undefined; + Key: string | undefined; + UploadId: string | undefined; + RequestPayer?: RequestPayer | undefined; + ExpectedBucketOwner?: string | undefined; + IfMatchInitiatedTime?: Date | undefined; +} +export declare class NoSuchUpload extends __BaseException { + readonly name: "NoSuchUpload"; + readonly $fault: "client"; + constructor(opts: __ExceptionOptionType); +} +export declare const BucketAccelerateStatus: { + readonly Enabled: "Enabled"; + readonly Suspended: "Suspended"; +}; +export type BucketAccelerateStatus = + (typeof BucketAccelerateStatus)[keyof typeof BucketAccelerateStatus]; +export interface AccelerateConfiguration { + Status?: BucketAccelerateStatus | undefined; +} +export declare const Type: { + readonly AmazonCustomerByEmail: "AmazonCustomerByEmail"; + readonly CanonicalUser: "CanonicalUser"; + readonly Group: "Group"; +}; +export type Type = (typeof Type)[keyof typeof Type]; +export interface Grantee { + DisplayName?: string | undefined; + EmailAddress?: string | undefined; + ID?: string | undefined; + URI?: string | undefined; + Type: Type | undefined; +} +export declare const Permission: { + readonly FULL_CONTROL: "FULL_CONTROL"; + readonly READ: "READ"; + readonly READ_ACP: "READ_ACP"; + readonly WRITE: "WRITE"; + readonly WRITE_ACP: "WRITE_ACP"; +}; +export type Permission = (typeof Permission)[keyof typeof Permission]; +export interface Grant { + Grantee?: Grantee | undefined; + Permission?: Permission | undefined; +} +export interface Owner { + DisplayName?: string | undefined; + ID?: string | undefined; +} +export interface AccessControlPolicy { + Grants?: Grant[] | undefined; + Owner?: Owner | undefined; +} +export declare const OwnerOverride: { + readonly Destination: "Destination"; +}; +export type OwnerOverride = (typeof OwnerOverride)[keyof typeof OwnerOverride]; +export interface AccessControlTranslation { + Owner: OwnerOverride | undefined; +} +export declare const ChecksumType: { + readonly COMPOSITE: "COMPOSITE"; + readonly FULL_OBJECT: "FULL_OBJECT"; +}; +export type ChecksumType = (typeof ChecksumType)[keyof typeof ChecksumType]; +export declare const ServerSideEncryption: { + readonly AES256: "AES256"; + readonly aws_kms: "aws:kms"; + readonly aws_kms_dsse: "aws:kms:dsse"; +}; +export type ServerSideEncryption = + (typeof ServerSideEncryption)[keyof typeof ServerSideEncryption]; +export interface CompleteMultipartUploadOutput { + Location?: string | undefined; + Bucket?: string | undefined; + Key?: string | undefined; + Expiration?: string | undefined; + ETag?: string | undefined; + ChecksumCRC32?: string | undefined; + ChecksumCRC32C?: string | undefined; + ChecksumCRC64NVME?: string | undefined; + ChecksumSHA1?: string | undefined; + ChecksumSHA256?: string | undefined; + ChecksumType?: ChecksumType | undefined; + ServerSideEncryption?: ServerSideEncryption | undefined; + VersionId?: string | undefined; + SSEKMSKeyId?: string | undefined; + BucketKeyEnabled?: boolean | undefined; + RequestCharged?: RequestCharged | undefined; +} +export interface CompletedPart { + ETag?: string | undefined; + ChecksumCRC32?: string | undefined; + ChecksumCRC32C?: string | undefined; + ChecksumCRC64NVME?: string | undefined; + ChecksumSHA1?: string | undefined; + ChecksumSHA256?: string | undefined; + PartNumber?: number | undefined; +} +export interface CompletedMultipartUpload { + Parts?: CompletedPart[] | undefined; +} +export interface CompleteMultipartUploadRequest { + Bucket: string | undefined; + Key: string | undefined; + MultipartUpload?: CompletedMultipartUpload | undefined; + UploadId: string | undefined; + ChecksumCRC32?: string | undefined; + ChecksumCRC32C?: string | undefined; + ChecksumCRC64NVME?: string | undefined; + ChecksumSHA1?: string | undefined; + ChecksumSHA256?: string | undefined; + ChecksumType?: ChecksumType | undefined; + MpuObjectSize?: number | undefined; + RequestPayer?: RequestPayer | undefined; + ExpectedBucketOwner?: string | undefined; + IfMatch?: string | undefined; + IfNoneMatch?: string | undefined; + SSECustomerAlgorithm?: string | undefined; + SSECustomerKey?: string | undefined; + SSECustomerKeyMD5?: string | undefined; +} +export interface CopyObjectResult { + ETag?: string | undefined; + LastModified?: Date | undefined; + ChecksumType?: ChecksumType | undefined; + ChecksumCRC32?: string | undefined; + ChecksumCRC32C?: string | undefined; + ChecksumCRC64NVME?: string | undefined; + ChecksumSHA1?: string | undefined; + ChecksumSHA256?: string | undefined; +} +export interface CopyObjectOutput { + CopyObjectResult?: CopyObjectResult | undefined; + Expiration?: string | undefined; + CopySourceVersionId?: string | undefined; + VersionId?: string | undefined; + ServerSideEncryption?: ServerSideEncryption | undefined; + SSECustomerAlgorithm?: string | undefined; + SSECustomerKeyMD5?: string | undefined; + SSEKMSKeyId?: string | undefined; + SSEKMSEncryptionContext?: string | undefined; + BucketKeyEnabled?: boolean | undefined; + RequestCharged?: RequestCharged | undefined; +} +export declare const ObjectCannedACL: { + readonly authenticated_read: "authenticated-read"; + readonly aws_exec_read: "aws-exec-read"; + readonly bucket_owner_full_control: "bucket-owner-full-control"; + readonly bucket_owner_read: "bucket-owner-read"; + readonly private: "private"; + readonly public_read: "public-read"; + readonly public_read_write: "public-read-write"; +}; +export type ObjectCannedACL = + (typeof ObjectCannedACL)[keyof typeof ObjectCannedACL]; +export declare const ChecksumAlgorithm: { + readonly CRC32: "CRC32"; + readonly CRC32C: "CRC32C"; + readonly CRC64NVME: "CRC64NVME"; + readonly SHA1: "SHA1"; + readonly SHA256: "SHA256"; +}; +export type ChecksumAlgorithm = + (typeof ChecksumAlgorithm)[keyof typeof ChecksumAlgorithm]; +export declare const MetadataDirective: { + readonly COPY: "COPY"; + readonly REPLACE: "REPLACE"; +}; +export type MetadataDirective = + (typeof MetadataDirective)[keyof typeof MetadataDirective]; +export declare const ObjectLockLegalHoldStatus: { + readonly OFF: "OFF"; + readonly ON: "ON"; +}; +export type ObjectLockLegalHoldStatus = + (typeof ObjectLockLegalHoldStatus)[keyof typeof ObjectLockLegalHoldStatus]; +export declare const ObjectLockMode: { + readonly COMPLIANCE: "COMPLIANCE"; + readonly GOVERNANCE: "GOVERNANCE"; +}; +export type ObjectLockMode = + (typeof ObjectLockMode)[keyof typeof ObjectLockMode]; +export declare const StorageClass: { + readonly DEEP_ARCHIVE: "DEEP_ARCHIVE"; + readonly EXPRESS_ONEZONE: "EXPRESS_ONEZONE"; + readonly GLACIER: "GLACIER"; + readonly GLACIER_IR: "GLACIER_IR"; + readonly INTELLIGENT_TIERING: "INTELLIGENT_TIERING"; + readonly ONEZONE_IA: "ONEZONE_IA"; + readonly OUTPOSTS: "OUTPOSTS"; + readonly REDUCED_REDUNDANCY: "REDUCED_REDUNDANCY"; + readonly SNOW: "SNOW"; + readonly STANDARD: "STANDARD"; + readonly STANDARD_IA: "STANDARD_IA"; +}; +export type StorageClass = (typeof StorageClass)[keyof typeof StorageClass]; +export declare const TaggingDirective: { + readonly COPY: "COPY"; + readonly REPLACE: "REPLACE"; +}; +export type TaggingDirective = + (typeof TaggingDirective)[keyof typeof TaggingDirective]; +export interface CopyObjectRequest { + ACL?: ObjectCannedACL | undefined; + Bucket: string | undefined; + CacheControl?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + ContentDisposition?: string | undefined; + ContentEncoding?: string | undefined; + ContentLanguage?: string | undefined; + ContentType?: string | undefined; + CopySource: string | undefined; + CopySourceIfMatch?: string | undefined; + CopySourceIfModifiedSince?: Date | undefined; + CopySourceIfNoneMatch?: string | undefined; + CopySourceIfUnmodifiedSince?: Date | undefined; + Expires?: Date | undefined; + GrantFullControl?: string | undefined; + GrantRead?: string | undefined; + GrantReadACP?: string | undefined; + GrantWriteACP?: string | undefined; + Key: string | undefined; + Metadata?: Record | undefined; + MetadataDirective?: MetadataDirective | undefined; + TaggingDirective?: TaggingDirective | undefined; + ServerSideEncryption?: ServerSideEncryption | undefined; + StorageClass?: StorageClass | undefined; + WebsiteRedirectLocation?: string | undefined; + SSECustomerAlgorithm?: string | undefined; + SSECustomerKey?: string | undefined; + SSECustomerKeyMD5?: string | undefined; + SSEKMSKeyId?: string | undefined; + SSEKMSEncryptionContext?: string | undefined; + BucketKeyEnabled?: boolean | undefined; + CopySourceSSECustomerAlgorithm?: string | undefined; + CopySourceSSECustomerKey?: string | undefined; + CopySourceSSECustomerKeyMD5?: string | undefined; + RequestPayer?: RequestPayer | undefined; + Tagging?: string | undefined; + ObjectLockMode?: ObjectLockMode | undefined; + ObjectLockRetainUntilDate?: Date | undefined; + ObjectLockLegalHoldStatus?: ObjectLockLegalHoldStatus | undefined; + ExpectedBucketOwner?: string | undefined; + ExpectedSourceBucketOwner?: string | undefined; +} +export declare class ObjectNotInActiveTierError extends __BaseException { + readonly name: "ObjectNotInActiveTierError"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class BucketAlreadyExists extends __BaseException { + readonly name: "BucketAlreadyExists"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class BucketAlreadyOwnedByYou extends __BaseException { + readonly name: "BucketAlreadyOwnedByYou"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface CreateBucketOutput { + Location?: string | undefined; +} +export declare const BucketCannedACL: { + readonly authenticated_read: "authenticated-read"; + readonly private: "private"; + readonly public_read: "public-read"; + readonly public_read_write: "public-read-write"; +}; +export type BucketCannedACL = + (typeof BucketCannedACL)[keyof typeof BucketCannedACL]; +export declare const DataRedundancy: { + readonly SingleAvailabilityZone: "SingleAvailabilityZone"; + readonly SingleLocalZone: "SingleLocalZone"; +}; +export type DataRedundancy = + (typeof DataRedundancy)[keyof typeof DataRedundancy]; +export declare const BucketType: { + readonly Directory: "Directory"; +}; +export type BucketType = (typeof BucketType)[keyof typeof BucketType]; +export interface BucketInfo { + DataRedundancy?: DataRedundancy | undefined; + Type?: BucketType | undefined; +} +export declare const LocationType: { + readonly AvailabilityZone: "AvailabilityZone"; + readonly LocalZone: "LocalZone"; +}; +export type LocationType = (typeof LocationType)[keyof typeof LocationType]; +export interface LocationInfo { + Type?: LocationType | undefined; + Name?: string | undefined; +} +export declare const BucketLocationConstraint: { + readonly EU: "EU"; + readonly af_south_1: "af-south-1"; + readonly ap_east_1: "ap-east-1"; + readonly ap_northeast_1: "ap-northeast-1"; + readonly ap_northeast_2: "ap-northeast-2"; + readonly ap_northeast_3: "ap-northeast-3"; + readonly ap_south_1: "ap-south-1"; + readonly ap_south_2: "ap-south-2"; + readonly ap_southeast_1: "ap-southeast-1"; + readonly ap_southeast_2: "ap-southeast-2"; + readonly ap_southeast_3: "ap-southeast-3"; + readonly ap_southeast_4: "ap-southeast-4"; + readonly ap_southeast_5: "ap-southeast-5"; + readonly ca_central_1: "ca-central-1"; + readonly cn_north_1: "cn-north-1"; + readonly cn_northwest_1: "cn-northwest-1"; + readonly eu_central_1: "eu-central-1"; + readonly eu_central_2: "eu-central-2"; + readonly eu_north_1: "eu-north-1"; + readonly eu_south_1: "eu-south-1"; + readonly eu_south_2: "eu-south-2"; + readonly eu_west_1: "eu-west-1"; + readonly eu_west_2: "eu-west-2"; + readonly eu_west_3: "eu-west-3"; + readonly il_central_1: "il-central-1"; + readonly me_central_1: "me-central-1"; + readonly me_south_1: "me-south-1"; + readonly sa_east_1: "sa-east-1"; + readonly us_east_2: "us-east-2"; + readonly us_gov_east_1: "us-gov-east-1"; + readonly us_gov_west_1: "us-gov-west-1"; + readonly us_west_1: "us-west-1"; + readonly us_west_2: "us-west-2"; +}; +export type BucketLocationConstraint = + (typeof BucketLocationConstraint)[keyof typeof BucketLocationConstraint]; +export interface CreateBucketConfiguration { + LocationConstraint?: BucketLocationConstraint | undefined; + Location?: LocationInfo | undefined; + Bucket?: BucketInfo | undefined; +} +export declare const ObjectOwnership: { + readonly BucketOwnerEnforced: "BucketOwnerEnforced"; + readonly BucketOwnerPreferred: "BucketOwnerPreferred"; + readonly ObjectWriter: "ObjectWriter"; +}; +export type ObjectOwnership = + (typeof ObjectOwnership)[keyof typeof ObjectOwnership]; +export interface CreateBucketRequest { + ACL?: BucketCannedACL | undefined; + Bucket: string | undefined; + CreateBucketConfiguration?: CreateBucketConfiguration | undefined; + GrantFullControl?: string | undefined; + GrantRead?: string | undefined; + GrantReadACP?: string | undefined; + GrantWrite?: string | undefined; + GrantWriteACP?: string | undefined; + ObjectLockEnabledForBucket?: boolean | undefined; + ObjectOwnership?: ObjectOwnership | undefined; +} +export interface S3TablesDestination { + TableBucketArn: string | undefined; + TableName: string | undefined; +} +export interface MetadataTableConfiguration { + S3TablesDestination: S3TablesDestination | undefined; +} +export interface CreateBucketMetadataTableConfigurationRequest { + Bucket: string | undefined; + ContentMD5?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + MetadataTableConfiguration: MetadataTableConfiguration | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface CreateMultipartUploadOutput { + AbortDate?: Date | undefined; + AbortRuleId?: string | undefined; + Bucket?: string | undefined; + Key?: string | undefined; + UploadId?: string | undefined; + ServerSideEncryption?: ServerSideEncryption | undefined; + SSECustomerAlgorithm?: string | undefined; + SSECustomerKeyMD5?: string | undefined; + SSEKMSKeyId?: string | undefined; + SSEKMSEncryptionContext?: string | undefined; + BucketKeyEnabled?: boolean | undefined; + RequestCharged?: RequestCharged | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + ChecksumType?: ChecksumType | undefined; +} +export interface CreateMultipartUploadRequest { + ACL?: ObjectCannedACL | undefined; + Bucket: string | undefined; + CacheControl?: string | undefined; + ContentDisposition?: string | undefined; + ContentEncoding?: string | undefined; + ContentLanguage?: string | undefined; + ContentType?: string | undefined; + Expires?: Date | undefined; + GrantFullControl?: string | undefined; + GrantRead?: string | undefined; + GrantReadACP?: string | undefined; + GrantWriteACP?: string | undefined; + Key: string | undefined; + Metadata?: Record | undefined; + ServerSideEncryption?: ServerSideEncryption | undefined; + StorageClass?: StorageClass | undefined; + WebsiteRedirectLocation?: string | undefined; + SSECustomerAlgorithm?: string | undefined; + SSECustomerKey?: string | undefined; + SSECustomerKeyMD5?: string | undefined; + SSEKMSKeyId?: string | undefined; + SSEKMSEncryptionContext?: string | undefined; + BucketKeyEnabled?: boolean | undefined; + RequestPayer?: RequestPayer | undefined; + Tagging?: string | undefined; + ObjectLockMode?: ObjectLockMode | undefined; + ObjectLockRetainUntilDate?: Date | undefined; + ObjectLockLegalHoldStatus?: ObjectLockLegalHoldStatus | undefined; + ExpectedBucketOwner?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + ChecksumType?: ChecksumType | undefined; +} +export interface SessionCredentials { + AccessKeyId: string | undefined; + SecretAccessKey: string | undefined; + SessionToken: string | undefined; + Expiration: Date | undefined; +} +export interface CreateSessionOutput { + ServerSideEncryption?: ServerSideEncryption | undefined; + SSEKMSKeyId?: string | undefined; + SSEKMSEncryptionContext?: string | undefined; + BucketKeyEnabled?: boolean | undefined; + Credentials: SessionCredentials | undefined; +} +export declare const SessionMode: { + readonly ReadOnly: "ReadOnly"; + readonly ReadWrite: "ReadWrite"; +}; +export type SessionMode = (typeof SessionMode)[keyof typeof SessionMode]; +export interface CreateSessionRequest { + SessionMode?: SessionMode | undefined; + Bucket: string | undefined; + ServerSideEncryption?: ServerSideEncryption | undefined; + SSEKMSKeyId?: string | undefined; + SSEKMSEncryptionContext?: string | undefined; + BucketKeyEnabled?: boolean | undefined; +} +export declare class NoSuchBucket extends __BaseException { + readonly name: "NoSuchBucket"; + readonly $fault: "client"; + constructor(opts: __ExceptionOptionType); +} +export interface DeleteBucketRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface DeleteBucketAnalyticsConfigurationRequest { + Bucket: string | undefined; + Id: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface DeleteBucketCorsRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface DeleteBucketEncryptionRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface DeleteBucketIntelligentTieringConfigurationRequest { + Bucket: string | undefined; + Id: string | undefined; +} +export interface DeleteBucketInventoryConfigurationRequest { + Bucket: string | undefined; + Id: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface DeleteBucketLifecycleRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface DeleteBucketMetadataTableConfigurationRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface DeleteBucketMetricsConfigurationRequest { + Bucket: string | undefined; + Id: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface DeleteBucketOwnershipControlsRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface DeleteBucketPolicyRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface DeleteBucketReplicationRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface DeleteBucketTaggingRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface DeleteBucketWebsiteRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface DeleteObjectOutput { + DeleteMarker?: boolean | undefined; + VersionId?: string | undefined; + RequestCharged?: RequestCharged | undefined; +} +export interface DeleteObjectRequest { + Bucket: string | undefined; + Key: string | undefined; + MFA?: string | undefined; + VersionId?: string | undefined; + RequestPayer?: RequestPayer | undefined; + BypassGovernanceRetention?: boolean | undefined; + ExpectedBucketOwner?: string | undefined; + IfMatch?: string | undefined; + IfMatchLastModifiedTime?: Date | undefined; + IfMatchSize?: number | undefined; +} +export interface DeletedObject { + Key?: string | undefined; + VersionId?: string | undefined; + DeleteMarker?: boolean | undefined; + DeleteMarkerVersionId?: string | undefined; +} +export interface _Error { + Key?: string | undefined; + VersionId?: string | undefined; + Code?: string | undefined; + Message?: string | undefined; +} +export interface DeleteObjectsOutput { + Deleted?: DeletedObject[] | undefined; + RequestCharged?: RequestCharged | undefined; + Errors?: _Error[] | undefined; +} +export interface ObjectIdentifier { + Key: string | undefined; + VersionId?: string | undefined; + ETag?: string | undefined; + LastModifiedTime?: Date | undefined; + Size?: number | undefined; +} +export interface Delete { + Objects: ObjectIdentifier[] | undefined; + Quiet?: boolean | undefined; +} +export interface DeleteObjectsRequest { + Bucket: string | undefined; + Delete: Delete | undefined; + MFA?: string | undefined; + RequestPayer?: RequestPayer | undefined; + BypassGovernanceRetention?: boolean | undefined; + ExpectedBucketOwner?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; +} +export interface DeleteObjectTaggingOutput { + VersionId?: string | undefined; +} +export interface DeleteObjectTaggingRequest { + Bucket: string | undefined; + Key: string | undefined; + VersionId?: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface DeletePublicAccessBlockRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface GetBucketAccelerateConfigurationOutput { + Status?: BucketAccelerateStatus | undefined; + RequestCharged?: RequestCharged | undefined; +} +export interface GetBucketAccelerateConfigurationRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; + RequestPayer?: RequestPayer | undefined; +} +export interface GetBucketAclOutput { + Owner?: Owner | undefined; + Grants?: Grant[] | undefined; +} +export interface GetBucketAclRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface Tag { + Key: string | undefined; + Value: string | undefined; +} +export interface AnalyticsAndOperator { + Prefix?: string | undefined; + Tags?: Tag[] | undefined; +} +export type AnalyticsFilter = + | AnalyticsFilter.AndMember + | AnalyticsFilter.PrefixMember + | AnalyticsFilter.TagMember + | AnalyticsFilter.$UnknownMember; +export declare namespace AnalyticsFilter { + interface PrefixMember { + Prefix: string; + Tag?: never; + And?: never; + $unknown?: never; + } + interface TagMember { + Prefix?: never; + Tag: Tag; + And?: never; + $unknown?: never; + } + interface AndMember { + Prefix?: never; + Tag?: never; + And: AnalyticsAndOperator; + $unknown?: never; + } + interface $UnknownMember { + Prefix?: never; + Tag?: never; + And?: never; + $unknown: [string, any]; + } + interface Visitor { + Prefix: (value: string) => T; + Tag: (value: Tag) => T; + And: (value: AnalyticsAndOperator) => T; + _: (name: string, value: any) => T; + } + const visit: (value: AnalyticsFilter, visitor: Visitor) => T; +} +export declare const AnalyticsS3ExportFileFormat: { + readonly CSV: "CSV"; +}; +export type AnalyticsS3ExportFileFormat = + (typeof AnalyticsS3ExportFileFormat)[keyof typeof AnalyticsS3ExportFileFormat]; +export interface AnalyticsS3BucketDestination { + Format: AnalyticsS3ExportFileFormat | undefined; + BucketAccountId?: string | undefined; + Bucket: string | undefined; + Prefix?: string | undefined; +} +export interface AnalyticsExportDestination { + S3BucketDestination: AnalyticsS3BucketDestination | undefined; +} +export declare const StorageClassAnalysisSchemaVersion: { + readonly V_1: "V_1"; +}; +export type StorageClassAnalysisSchemaVersion = + (typeof StorageClassAnalysisSchemaVersion)[keyof typeof StorageClassAnalysisSchemaVersion]; +export interface StorageClassAnalysisDataExport { + OutputSchemaVersion: StorageClassAnalysisSchemaVersion | undefined; + Destination: AnalyticsExportDestination | undefined; +} +export interface StorageClassAnalysis { + DataExport?: StorageClassAnalysisDataExport | undefined; +} +export interface AnalyticsConfiguration { + Id: string | undefined; + Filter?: AnalyticsFilter | undefined; + StorageClassAnalysis: StorageClassAnalysis | undefined; +} +export interface GetBucketAnalyticsConfigurationOutput { + AnalyticsConfiguration?: AnalyticsConfiguration | undefined; +} +export interface GetBucketAnalyticsConfigurationRequest { + Bucket: string | undefined; + Id: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface CORSRule { + ID?: string | undefined; + AllowedHeaders?: string[] | undefined; + AllowedMethods: string[] | undefined; + AllowedOrigins: string[] | undefined; + ExposeHeaders?: string[] | undefined; + MaxAgeSeconds?: number | undefined; +} +export interface GetBucketCorsOutput { + CORSRules?: CORSRule[] | undefined; +} +export interface GetBucketCorsRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface ServerSideEncryptionByDefault { + SSEAlgorithm: ServerSideEncryption | undefined; + KMSMasterKeyID?: string | undefined; +} +export interface ServerSideEncryptionRule { + ApplyServerSideEncryptionByDefault?: + | ServerSideEncryptionByDefault + | undefined; + BucketKeyEnabled?: boolean | undefined; +} +export interface ServerSideEncryptionConfiguration { + Rules: ServerSideEncryptionRule[] | undefined; +} +export interface GetBucketEncryptionOutput { + ServerSideEncryptionConfiguration?: + | ServerSideEncryptionConfiguration + | undefined; +} +export interface GetBucketEncryptionRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface IntelligentTieringAndOperator { + Prefix?: string | undefined; + Tags?: Tag[] | undefined; +} +export interface IntelligentTieringFilter { + Prefix?: string | undefined; + Tag?: Tag | undefined; + And?: IntelligentTieringAndOperator | undefined; +} +export declare const IntelligentTieringStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +export type IntelligentTieringStatus = + (typeof IntelligentTieringStatus)[keyof typeof IntelligentTieringStatus]; +export declare const IntelligentTieringAccessTier: { + readonly ARCHIVE_ACCESS: "ARCHIVE_ACCESS"; + readonly DEEP_ARCHIVE_ACCESS: "DEEP_ARCHIVE_ACCESS"; +}; +export type IntelligentTieringAccessTier = + (typeof IntelligentTieringAccessTier)[keyof typeof IntelligentTieringAccessTier]; +export interface Tiering { + Days: number | undefined; + AccessTier: IntelligentTieringAccessTier | undefined; +} +export interface IntelligentTieringConfiguration { + Id: string | undefined; + Filter?: IntelligentTieringFilter | undefined; + Status: IntelligentTieringStatus | undefined; + Tierings: Tiering[] | undefined; +} +export interface GetBucketIntelligentTieringConfigurationOutput { + IntelligentTieringConfiguration?: IntelligentTieringConfiguration | undefined; +} +export interface GetBucketIntelligentTieringConfigurationRequest { + Bucket: string | undefined; + Id: string | undefined; +} +export interface SSEKMS { + KeyId: string | undefined; +} +export interface SSES3 {} +export interface InventoryEncryption { + SSES3?: SSES3 | undefined; + SSEKMS?: SSEKMS | undefined; +} +export declare const InventoryFormat: { + readonly CSV: "CSV"; + readonly ORC: "ORC"; + readonly Parquet: "Parquet"; +}; +export type InventoryFormat = + (typeof InventoryFormat)[keyof typeof InventoryFormat]; +export interface InventoryS3BucketDestination { + AccountId?: string | undefined; + Bucket: string | undefined; + Format: InventoryFormat | undefined; + Prefix?: string | undefined; + Encryption?: InventoryEncryption | undefined; +} +export interface InventoryDestination { + S3BucketDestination: InventoryS3BucketDestination | undefined; +} +export interface InventoryFilter { + Prefix: string | undefined; +} +export declare const InventoryIncludedObjectVersions: { + readonly All: "All"; + readonly Current: "Current"; +}; +export type InventoryIncludedObjectVersions = + (typeof InventoryIncludedObjectVersions)[keyof typeof InventoryIncludedObjectVersions]; +export declare const InventoryOptionalField: { + readonly BucketKeyStatus: "BucketKeyStatus"; + readonly ChecksumAlgorithm: "ChecksumAlgorithm"; + readonly ETag: "ETag"; + readonly EncryptionStatus: "EncryptionStatus"; + readonly IntelligentTieringAccessTier: "IntelligentTieringAccessTier"; + readonly IsMultipartUploaded: "IsMultipartUploaded"; + readonly LastModifiedDate: "LastModifiedDate"; + readonly ObjectAccessControlList: "ObjectAccessControlList"; + readonly ObjectLockLegalHoldStatus: "ObjectLockLegalHoldStatus"; + readonly ObjectLockMode: "ObjectLockMode"; + readonly ObjectLockRetainUntilDate: "ObjectLockRetainUntilDate"; + readonly ObjectOwner: "ObjectOwner"; + readonly ReplicationStatus: "ReplicationStatus"; + readonly Size: "Size"; + readonly StorageClass: "StorageClass"; +}; +export type InventoryOptionalField = + (typeof InventoryOptionalField)[keyof typeof InventoryOptionalField]; +export declare const InventoryFrequency: { + readonly Daily: "Daily"; + readonly Weekly: "Weekly"; +}; +export type InventoryFrequency = + (typeof InventoryFrequency)[keyof typeof InventoryFrequency]; +export interface InventorySchedule { + Frequency: InventoryFrequency | undefined; +} +export interface InventoryConfiguration { + Destination: InventoryDestination | undefined; + IsEnabled: boolean | undefined; + Filter?: InventoryFilter | undefined; + Id: string | undefined; + IncludedObjectVersions: InventoryIncludedObjectVersions | undefined; + OptionalFields?: InventoryOptionalField[] | undefined; + Schedule: InventorySchedule | undefined; +} +export interface GetBucketInventoryConfigurationOutput { + InventoryConfiguration?: InventoryConfiguration | undefined; +} +export interface GetBucketInventoryConfigurationRequest { + Bucket: string | undefined; + Id: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface LifecycleExpiration { + Date?: Date | undefined; + Days?: number | undefined; + ExpiredObjectDeleteMarker?: boolean | undefined; +} +export interface LifecycleRuleAndOperator { + Prefix?: string | undefined; + Tags?: Tag[] | undefined; + ObjectSizeGreaterThan?: number | undefined; + ObjectSizeLessThan?: number | undefined; +} +export interface LifecycleRuleFilter { + Prefix?: string | undefined; + Tag?: Tag | undefined; + ObjectSizeGreaterThan?: number | undefined; + ObjectSizeLessThan?: number | undefined; + And?: LifecycleRuleAndOperator | undefined; +} +export interface NoncurrentVersionExpiration { + NoncurrentDays?: number | undefined; + NewerNoncurrentVersions?: number | undefined; +} +export declare const TransitionStorageClass: { + readonly DEEP_ARCHIVE: "DEEP_ARCHIVE"; + readonly GLACIER: "GLACIER"; + readonly GLACIER_IR: "GLACIER_IR"; + readonly INTELLIGENT_TIERING: "INTELLIGENT_TIERING"; + readonly ONEZONE_IA: "ONEZONE_IA"; + readonly STANDARD_IA: "STANDARD_IA"; +}; +export type TransitionStorageClass = + (typeof TransitionStorageClass)[keyof typeof TransitionStorageClass]; +export interface NoncurrentVersionTransition { + NoncurrentDays?: number | undefined; + StorageClass?: TransitionStorageClass | undefined; + NewerNoncurrentVersions?: number | undefined; +} +export declare const ExpirationStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +export type ExpirationStatus = + (typeof ExpirationStatus)[keyof typeof ExpirationStatus]; +export interface Transition { + Date?: Date | undefined; + Days?: number | undefined; + StorageClass?: TransitionStorageClass | undefined; +} +export interface LifecycleRule { + Expiration?: LifecycleExpiration | undefined; + ID?: string | undefined; + Prefix?: string | undefined; + Filter?: LifecycleRuleFilter | undefined; + Status: ExpirationStatus | undefined; + Transitions?: Transition[] | undefined; + NoncurrentVersionTransitions?: NoncurrentVersionTransition[] | undefined; + NoncurrentVersionExpiration?: NoncurrentVersionExpiration | undefined; + AbortIncompleteMultipartUpload?: AbortIncompleteMultipartUpload | undefined; +} +export declare const TransitionDefaultMinimumObjectSize: { + readonly all_storage_classes_128K: "all_storage_classes_128K"; + readonly varies_by_storage_class: "varies_by_storage_class"; +}; +export type TransitionDefaultMinimumObjectSize = + (typeof TransitionDefaultMinimumObjectSize)[keyof typeof TransitionDefaultMinimumObjectSize]; +export interface GetBucketLifecycleConfigurationOutput { + Rules?: LifecycleRule[] | undefined; + TransitionDefaultMinimumObjectSize?: + | TransitionDefaultMinimumObjectSize + | undefined; +} +export interface GetBucketLifecycleConfigurationRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface GetBucketLocationOutput { + LocationConstraint?: BucketLocationConstraint | undefined; +} +export interface GetBucketLocationRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export declare const BucketLogsPermission: { + readonly FULL_CONTROL: "FULL_CONTROL"; + readonly READ: "READ"; + readonly WRITE: "WRITE"; +}; +export type BucketLogsPermission = + (typeof BucketLogsPermission)[keyof typeof BucketLogsPermission]; +export interface TargetGrant { + Grantee?: Grantee | undefined; + Permission?: BucketLogsPermission | undefined; +} +export declare const PartitionDateSource: { + readonly DeliveryTime: "DeliveryTime"; + readonly EventTime: "EventTime"; +}; +export type PartitionDateSource = + (typeof PartitionDateSource)[keyof typeof PartitionDateSource]; +export interface PartitionedPrefix { + PartitionDateSource?: PartitionDateSource | undefined; +} +export interface SimplePrefix {} +export interface TargetObjectKeyFormat { + SimplePrefix?: SimplePrefix | undefined; + PartitionedPrefix?: PartitionedPrefix | undefined; +} +export interface LoggingEnabled { + TargetBucket: string | undefined; + TargetGrants?: TargetGrant[] | undefined; + TargetPrefix: string | undefined; + TargetObjectKeyFormat?: TargetObjectKeyFormat | undefined; +} +export interface GetBucketLoggingOutput { + LoggingEnabled?: LoggingEnabled | undefined; +} +export interface GetBucketLoggingRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface ErrorDetails { + ErrorCode?: string | undefined; + ErrorMessage?: string | undefined; +} +export interface S3TablesDestinationResult { + TableBucketArn: string | undefined; + TableName: string | undefined; + TableArn: string | undefined; + TableNamespace: string | undefined; +} +export interface MetadataTableConfigurationResult { + S3TablesDestinationResult: S3TablesDestinationResult | undefined; +} +export interface GetBucketMetadataTableConfigurationResult { + MetadataTableConfigurationResult: + | MetadataTableConfigurationResult + | undefined; + Status: string | undefined; + Error?: ErrorDetails | undefined; +} +export interface GetBucketMetadataTableConfigurationOutput { + GetBucketMetadataTableConfigurationResult?: + | GetBucketMetadataTableConfigurationResult + | undefined; +} +export interface GetBucketMetadataTableConfigurationRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface MetricsAndOperator { + Prefix?: string | undefined; + Tags?: Tag[] | undefined; + AccessPointArn?: string | undefined; +} +export type MetricsFilter = + | MetricsFilter.AccessPointArnMember + | MetricsFilter.AndMember + | MetricsFilter.PrefixMember + | MetricsFilter.TagMember + | MetricsFilter.$UnknownMember; +export declare namespace MetricsFilter { + interface PrefixMember { + Prefix: string; + Tag?: never; + AccessPointArn?: never; + And?: never; + $unknown?: never; + } + interface TagMember { + Prefix?: never; + Tag: Tag; + AccessPointArn?: never; + And?: never; + $unknown?: never; + } + interface AccessPointArnMember { + Prefix?: never; + Tag?: never; + AccessPointArn: string; + And?: never; + $unknown?: never; + } + interface AndMember { + Prefix?: never; + Tag?: never; + AccessPointArn?: never; + And: MetricsAndOperator; + $unknown?: never; + } + interface $UnknownMember { + Prefix?: never; + Tag?: never; + AccessPointArn?: never; + And?: never; + $unknown: [string, any]; + } + interface Visitor { + Prefix: (value: string) => T; + Tag: (value: Tag) => T; + AccessPointArn: (value: string) => T; + And: (value: MetricsAndOperator) => T; + _: (name: string, value: any) => T; + } + const visit: (value: MetricsFilter, visitor: Visitor) => T; +} +export interface MetricsConfiguration { + Id: string | undefined; + Filter?: MetricsFilter | undefined; +} +export interface GetBucketMetricsConfigurationOutput { + MetricsConfiguration?: MetricsConfiguration | undefined; +} +export interface GetBucketMetricsConfigurationRequest { + Bucket: string | undefined; + Id: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface GetBucketNotificationConfigurationRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface EventBridgeConfiguration {} +export declare const Event: { + readonly s3_IntelligentTiering: "s3:IntelligentTiering"; + readonly s3_LifecycleExpiration_: "s3:LifecycleExpiration:*"; + readonly s3_LifecycleExpiration_Delete: "s3:LifecycleExpiration:Delete"; + readonly s3_LifecycleExpiration_DeleteMarkerCreated: "s3:LifecycleExpiration:DeleteMarkerCreated"; + readonly s3_LifecycleTransition: "s3:LifecycleTransition"; + readonly s3_ObjectAcl_Put: "s3:ObjectAcl:Put"; + readonly s3_ObjectCreated_: "s3:ObjectCreated:*"; + readonly s3_ObjectCreated_CompleteMultipartUpload: "s3:ObjectCreated:CompleteMultipartUpload"; + readonly s3_ObjectCreated_Copy: "s3:ObjectCreated:Copy"; + readonly s3_ObjectCreated_Post: "s3:ObjectCreated:Post"; + readonly s3_ObjectCreated_Put: "s3:ObjectCreated:Put"; + readonly s3_ObjectRemoved_: "s3:ObjectRemoved:*"; + readonly s3_ObjectRemoved_Delete: "s3:ObjectRemoved:Delete"; + readonly s3_ObjectRemoved_DeleteMarkerCreated: "s3:ObjectRemoved:DeleteMarkerCreated"; + readonly s3_ObjectRestore_: "s3:ObjectRestore:*"; + readonly s3_ObjectRestore_Completed: "s3:ObjectRestore:Completed"; + readonly s3_ObjectRestore_Delete: "s3:ObjectRestore:Delete"; + readonly s3_ObjectRestore_Post: "s3:ObjectRestore:Post"; + readonly s3_ObjectTagging_: "s3:ObjectTagging:*"; + readonly s3_ObjectTagging_Delete: "s3:ObjectTagging:Delete"; + readonly s3_ObjectTagging_Put: "s3:ObjectTagging:Put"; + readonly s3_ReducedRedundancyLostObject: "s3:ReducedRedundancyLostObject"; + readonly s3_Replication_: "s3:Replication:*"; + readonly s3_Replication_OperationFailedReplication: "s3:Replication:OperationFailedReplication"; + readonly s3_Replication_OperationMissedThreshold: "s3:Replication:OperationMissedThreshold"; + readonly s3_Replication_OperationNotTracked: "s3:Replication:OperationNotTracked"; + readonly s3_Replication_OperationReplicatedAfterThreshold: "s3:Replication:OperationReplicatedAfterThreshold"; +}; +export type Event = (typeof Event)[keyof typeof Event]; +export declare const FilterRuleName: { + readonly prefix: "prefix"; + readonly suffix: "suffix"; +}; +export type FilterRuleName = + (typeof FilterRuleName)[keyof typeof FilterRuleName]; +export interface FilterRule { + Name?: FilterRuleName | undefined; + Value?: string | undefined; +} +export interface S3KeyFilter { + FilterRules?: FilterRule[] | undefined; +} +export interface NotificationConfigurationFilter { + Key?: S3KeyFilter | undefined; +} +export interface LambdaFunctionConfiguration { + Id?: string | undefined; + LambdaFunctionArn: string | undefined; + Events: Event[] | undefined; + Filter?: NotificationConfigurationFilter | undefined; +} +export interface QueueConfiguration { + Id?: string | undefined; + QueueArn: string | undefined; + Events: Event[] | undefined; + Filter?: NotificationConfigurationFilter | undefined; +} +export interface TopicConfiguration { + Id?: string | undefined; + TopicArn: string | undefined; + Events: Event[] | undefined; + Filter?: NotificationConfigurationFilter | undefined; +} +export interface NotificationConfiguration { + TopicConfigurations?: TopicConfiguration[] | undefined; + QueueConfigurations?: QueueConfiguration[] | undefined; + LambdaFunctionConfigurations?: LambdaFunctionConfiguration[] | undefined; + EventBridgeConfiguration?: EventBridgeConfiguration | undefined; +} +export interface OwnershipControlsRule { + ObjectOwnership: ObjectOwnership | undefined; +} +export interface OwnershipControls { + Rules: OwnershipControlsRule[] | undefined; +} +export interface GetBucketOwnershipControlsOutput { + OwnershipControls?: OwnershipControls | undefined; +} +export interface GetBucketOwnershipControlsRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface GetBucketPolicyOutput { + Policy?: string | undefined; +} +export interface GetBucketPolicyRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface PolicyStatus { + IsPublic?: boolean | undefined; +} +export interface GetBucketPolicyStatusOutput { + PolicyStatus?: PolicyStatus | undefined; +} +export interface GetBucketPolicyStatusRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export declare const DeleteMarkerReplicationStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +export type DeleteMarkerReplicationStatus = + (typeof DeleteMarkerReplicationStatus)[keyof typeof DeleteMarkerReplicationStatus]; +export interface DeleteMarkerReplication { + Status?: DeleteMarkerReplicationStatus | undefined; +} +export interface EncryptionConfiguration { + ReplicaKmsKeyID?: string | undefined; +} +export interface ReplicationTimeValue { + Minutes?: number | undefined; +} +export declare const MetricsStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +export type MetricsStatus = (typeof MetricsStatus)[keyof typeof MetricsStatus]; +export interface Metrics { + Status: MetricsStatus | undefined; + EventThreshold?: ReplicationTimeValue | undefined; +} +export declare const ReplicationTimeStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +export type ReplicationTimeStatus = + (typeof ReplicationTimeStatus)[keyof typeof ReplicationTimeStatus]; +export interface ReplicationTime { + Status: ReplicationTimeStatus | undefined; + Time: ReplicationTimeValue | undefined; +} +export interface Destination { + Bucket: string | undefined; + Account?: string | undefined; + StorageClass?: StorageClass | undefined; + AccessControlTranslation?: AccessControlTranslation | undefined; + EncryptionConfiguration?: EncryptionConfiguration | undefined; + ReplicationTime?: ReplicationTime | undefined; + Metrics?: Metrics | undefined; +} +export declare const ExistingObjectReplicationStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +export type ExistingObjectReplicationStatus = + (typeof ExistingObjectReplicationStatus)[keyof typeof ExistingObjectReplicationStatus]; +export interface ExistingObjectReplication { + Status: ExistingObjectReplicationStatus | undefined; +} +export interface ReplicationRuleAndOperator { + Prefix?: string | undefined; + Tags?: Tag[] | undefined; +} +export interface ReplicationRuleFilter { + Prefix?: string | undefined; + Tag?: Tag | undefined; + And?: ReplicationRuleAndOperator | undefined; +} +export declare const ReplicaModificationsStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +export type ReplicaModificationsStatus = + (typeof ReplicaModificationsStatus)[keyof typeof ReplicaModificationsStatus]; +export interface ReplicaModifications { + Status: ReplicaModificationsStatus | undefined; +} +export declare const SseKmsEncryptedObjectsStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +export type SseKmsEncryptedObjectsStatus = + (typeof SseKmsEncryptedObjectsStatus)[keyof typeof SseKmsEncryptedObjectsStatus]; +export interface SseKmsEncryptedObjects { + Status: SseKmsEncryptedObjectsStatus | undefined; +} +export interface SourceSelectionCriteria { + SseKmsEncryptedObjects?: SseKmsEncryptedObjects | undefined; + ReplicaModifications?: ReplicaModifications | undefined; +} +export declare const ReplicationRuleStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +export type ReplicationRuleStatus = + (typeof ReplicationRuleStatus)[keyof typeof ReplicationRuleStatus]; +export interface ReplicationRule { + ID?: string | undefined; + Priority?: number | undefined; + Prefix?: string | undefined; + Filter?: ReplicationRuleFilter | undefined; + Status: ReplicationRuleStatus | undefined; + SourceSelectionCriteria?: SourceSelectionCriteria | undefined; + ExistingObjectReplication?: ExistingObjectReplication | undefined; + Destination: Destination | undefined; + DeleteMarkerReplication?: DeleteMarkerReplication | undefined; +} +export interface ReplicationConfiguration { + Role: string | undefined; + Rules: ReplicationRule[] | undefined; +} +export interface GetBucketReplicationOutput { + ReplicationConfiguration?: ReplicationConfiguration | undefined; +} +export interface GetBucketReplicationRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export declare const Payer: { + readonly BucketOwner: "BucketOwner"; + readonly Requester: "Requester"; +}; +export type Payer = (typeof Payer)[keyof typeof Payer]; +export interface GetBucketRequestPaymentOutput { + Payer?: Payer | undefined; +} +export interface GetBucketRequestPaymentRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface GetBucketTaggingOutput { + TagSet: Tag[] | undefined; +} +export interface GetBucketTaggingRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export declare const MFADeleteStatus: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +export type MFADeleteStatus = + (typeof MFADeleteStatus)[keyof typeof MFADeleteStatus]; +export declare const BucketVersioningStatus: { + readonly Enabled: "Enabled"; + readonly Suspended: "Suspended"; +}; +export type BucketVersioningStatus = + (typeof BucketVersioningStatus)[keyof typeof BucketVersioningStatus]; +export interface GetBucketVersioningOutput { + Status?: BucketVersioningStatus | undefined; + MFADelete?: MFADeleteStatus | undefined; +} +export interface GetBucketVersioningRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface ErrorDocument { + Key: string | undefined; +} +export interface IndexDocument { + Suffix: string | undefined; +} +export declare const Protocol: { + readonly http: "http"; + readonly https: "https"; +}; +export type Protocol = (typeof Protocol)[keyof typeof Protocol]; +export interface RedirectAllRequestsTo { + HostName: string | undefined; + Protocol?: Protocol | undefined; +} +export interface Condition { + HttpErrorCodeReturnedEquals?: string | undefined; + KeyPrefixEquals?: string | undefined; +} +export interface Redirect { + HostName?: string | undefined; + HttpRedirectCode?: string | undefined; + Protocol?: Protocol | undefined; + ReplaceKeyPrefixWith?: string | undefined; + ReplaceKeyWith?: string | undefined; +} +export interface RoutingRule { + Condition?: Condition | undefined; + Redirect: Redirect | undefined; +} +export interface GetBucketWebsiteOutput { + RedirectAllRequestsTo?: RedirectAllRequestsTo | undefined; + IndexDocument?: IndexDocument | undefined; + ErrorDocument?: ErrorDocument | undefined; + RoutingRules?: RoutingRule[] | undefined; +} +export interface GetBucketWebsiteRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export declare const ReplicationStatus: { + readonly COMPLETE: "COMPLETE"; + readonly COMPLETED: "COMPLETED"; + readonly FAILED: "FAILED"; + readonly PENDING: "PENDING"; + readonly REPLICA: "REPLICA"; +}; +export type ReplicationStatus = + (typeof ReplicationStatus)[keyof typeof ReplicationStatus]; +export interface GetObjectOutput { + Body?: StreamingBlobTypes | undefined; + DeleteMarker?: boolean | undefined; + AcceptRanges?: string | undefined; + Expiration?: string | undefined; + Restore?: string | undefined; + LastModified?: Date | undefined; + ContentLength?: number | undefined; + ETag?: string | undefined; + ChecksumCRC32?: string | undefined; + ChecksumCRC32C?: string | undefined; + ChecksumCRC64NVME?: string | undefined; + ChecksumSHA1?: string | undefined; + ChecksumSHA256?: string | undefined; + ChecksumType?: ChecksumType | undefined; + MissingMeta?: number | undefined; + VersionId?: string | undefined; + CacheControl?: string | undefined; + ContentDisposition?: string | undefined; + ContentEncoding?: string | undefined; + ContentLanguage?: string | undefined; + ContentRange?: string | undefined; + ContentType?: string | undefined; + Expires?: Date | undefined; + ExpiresString?: string | undefined; + WebsiteRedirectLocation?: string | undefined; + ServerSideEncryption?: ServerSideEncryption | undefined; + Metadata?: Record | undefined; + SSECustomerAlgorithm?: string | undefined; + SSECustomerKeyMD5?: string | undefined; + SSEKMSKeyId?: string | undefined; + BucketKeyEnabled?: boolean | undefined; + StorageClass?: StorageClass | undefined; + RequestCharged?: RequestCharged | undefined; + ReplicationStatus?: ReplicationStatus | undefined; + PartsCount?: number | undefined; + TagCount?: number | undefined; + ObjectLockMode?: ObjectLockMode | undefined; + ObjectLockRetainUntilDate?: Date | undefined; + ObjectLockLegalHoldStatus?: ObjectLockLegalHoldStatus | undefined; +} +export declare const ChecksumMode: { + readonly ENABLED: "ENABLED"; +}; +export type ChecksumMode = (typeof ChecksumMode)[keyof typeof ChecksumMode]; +export interface GetObjectRequest { + Bucket: string | undefined; + IfMatch?: string | undefined; + IfModifiedSince?: Date | undefined; + IfNoneMatch?: string | undefined; + IfUnmodifiedSince?: Date | undefined; + Key: string | undefined; + Range?: string | undefined; + ResponseCacheControl?: string | undefined; + ResponseContentDisposition?: string | undefined; + ResponseContentEncoding?: string | undefined; + ResponseContentLanguage?: string | undefined; + ResponseContentType?: string | undefined; + ResponseExpires?: Date | undefined; + VersionId?: string | undefined; + SSECustomerAlgorithm?: string | undefined; + SSECustomerKey?: string | undefined; + SSECustomerKeyMD5?: string | undefined; + RequestPayer?: RequestPayer | undefined; + PartNumber?: number | undefined; + ExpectedBucketOwner?: string | undefined; + ChecksumMode?: ChecksumMode | undefined; +} +export declare class InvalidObjectState extends __BaseException { + readonly name: "InvalidObjectState"; + readonly $fault: "client"; + StorageClass?: StorageClass | undefined; + AccessTier?: IntelligentTieringAccessTier | undefined; + constructor(opts: __ExceptionOptionType); +} +export declare class NoSuchKey extends __BaseException { + readonly name: "NoSuchKey"; + readonly $fault: "client"; + constructor(opts: __ExceptionOptionType); +} +export interface GetObjectAclOutput { + Owner?: Owner | undefined; + Grants?: Grant[] | undefined; + RequestCharged?: RequestCharged | undefined; +} +export interface GetObjectAclRequest { + Bucket: string | undefined; + Key: string | undefined; + VersionId?: string | undefined; + RequestPayer?: RequestPayer | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface Checksum { + ChecksumCRC32?: string | undefined; + ChecksumCRC32C?: string | undefined; + ChecksumCRC64NVME?: string | undefined; + ChecksumSHA1?: string | undefined; + ChecksumSHA256?: string | undefined; + ChecksumType?: ChecksumType | undefined; +} +export interface ObjectPart { + PartNumber?: number | undefined; + Size?: number | undefined; + ChecksumCRC32?: string | undefined; + ChecksumCRC32C?: string | undefined; + ChecksumCRC64NVME?: string | undefined; + ChecksumSHA1?: string | undefined; + ChecksumSHA256?: string | undefined; +} +export interface GetObjectAttributesParts { + TotalPartsCount?: number | undefined; + PartNumberMarker?: string | undefined; + NextPartNumberMarker?: string | undefined; + MaxParts?: number | undefined; + IsTruncated?: boolean | undefined; + Parts?: ObjectPart[] | undefined; +} +export interface GetObjectAttributesOutput { + DeleteMarker?: boolean | undefined; + LastModified?: Date | undefined; + VersionId?: string | undefined; + RequestCharged?: RequestCharged | undefined; + ETag?: string | undefined; + Checksum?: Checksum | undefined; + ObjectParts?: GetObjectAttributesParts | undefined; + StorageClass?: StorageClass | undefined; + ObjectSize?: number | undefined; +} +export declare const ObjectAttributes: { + readonly CHECKSUM: "Checksum"; + readonly ETAG: "ETag"; + readonly OBJECT_PARTS: "ObjectParts"; + readonly OBJECT_SIZE: "ObjectSize"; + readonly STORAGE_CLASS: "StorageClass"; +}; +export type ObjectAttributes = + (typeof ObjectAttributes)[keyof typeof ObjectAttributes]; +export interface GetObjectAttributesRequest { + Bucket: string | undefined; + Key: string | undefined; + VersionId?: string | undefined; + MaxParts?: number | undefined; + PartNumberMarker?: string | undefined; + SSECustomerAlgorithm?: string | undefined; + SSECustomerKey?: string | undefined; + SSECustomerKeyMD5?: string | undefined; + RequestPayer?: RequestPayer | undefined; + ExpectedBucketOwner?: string | undefined; + ObjectAttributes: ObjectAttributes[] | undefined; +} +export interface ObjectLockLegalHold { + Status?: ObjectLockLegalHoldStatus | undefined; +} +export interface GetObjectLegalHoldOutput { + LegalHold?: ObjectLockLegalHold | undefined; +} +export interface GetObjectLegalHoldRequest { + Bucket: string | undefined; + Key: string | undefined; + VersionId?: string | undefined; + RequestPayer?: RequestPayer | undefined; + ExpectedBucketOwner?: string | undefined; +} +export declare const ObjectLockEnabled: { + readonly Enabled: "Enabled"; +}; +export type ObjectLockEnabled = + (typeof ObjectLockEnabled)[keyof typeof ObjectLockEnabled]; +export declare const ObjectLockRetentionMode: { + readonly COMPLIANCE: "COMPLIANCE"; + readonly GOVERNANCE: "GOVERNANCE"; +}; +export type ObjectLockRetentionMode = + (typeof ObjectLockRetentionMode)[keyof typeof ObjectLockRetentionMode]; +export interface DefaultRetention { + Mode?: ObjectLockRetentionMode | undefined; + Days?: number | undefined; + Years?: number | undefined; +} +export interface ObjectLockRule { + DefaultRetention?: DefaultRetention | undefined; +} +export interface ObjectLockConfiguration { + ObjectLockEnabled?: ObjectLockEnabled | undefined; + Rule?: ObjectLockRule | undefined; +} +export interface GetObjectLockConfigurationOutput { + ObjectLockConfiguration?: ObjectLockConfiguration | undefined; +} +export interface GetObjectLockConfigurationRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface ObjectLockRetention { + Mode?: ObjectLockRetentionMode | undefined; + RetainUntilDate?: Date | undefined; +} +export interface GetObjectRetentionOutput { + Retention?: ObjectLockRetention | undefined; +} +export interface GetObjectRetentionRequest { + Bucket: string | undefined; + Key: string | undefined; + VersionId?: string | undefined; + RequestPayer?: RequestPayer | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface GetObjectTaggingOutput { + VersionId?: string | undefined; + TagSet: Tag[] | undefined; +} +export interface GetObjectTaggingRequest { + Bucket: string | undefined; + Key: string | undefined; + VersionId?: string | undefined; + ExpectedBucketOwner?: string | undefined; + RequestPayer?: RequestPayer | undefined; +} +export interface GetObjectTorrentOutput { + Body?: StreamingBlobTypes | undefined; + RequestCharged?: RequestCharged | undefined; +} +export interface GetObjectTorrentRequest { + Bucket: string | undefined; + Key: string | undefined; + RequestPayer?: RequestPayer | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface PublicAccessBlockConfiguration { + BlockPublicAcls?: boolean | undefined; + IgnorePublicAcls?: boolean | undefined; + BlockPublicPolicy?: boolean | undefined; + RestrictPublicBuckets?: boolean | undefined; +} +export interface GetPublicAccessBlockOutput { + PublicAccessBlockConfiguration?: PublicAccessBlockConfiguration | undefined; +} +export interface GetPublicAccessBlockRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface HeadBucketOutput { + BucketLocationType?: LocationType | undefined; + BucketLocationName?: string | undefined; + BucketRegion?: string | undefined; + AccessPointAlias?: boolean | undefined; +} +export interface HeadBucketRequest { + Bucket: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export declare class NotFound extends __BaseException { + readonly name: "NotFound"; + readonly $fault: "client"; + constructor(opts: __ExceptionOptionType); +} +export declare const ArchiveStatus: { + readonly ARCHIVE_ACCESS: "ARCHIVE_ACCESS"; + readonly DEEP_ARCHIVE_ACCESS: "DEEP_ARCHIVE_ACCESS"; +}; +export type ArchiveStatus = (typeof ArchiveStatus)[keyof typeof ArchiveStatus]; +export interface HeadObjectOutput { + DeleteMarker?: boolean | undefined; + AcceptRanges?: string | undefined; + Expiration?: string | undefined; + Restore?: string | undefined; + ArchiveStatus?: ArchiveStatus | undefined; + LastModified?: Date | undefined; + ContentLength?: number | undefined; + ChecksumCRC32?: string | undefined; + ChecksumCRC32C?: string | undefined; + ChecksumCRC64NVME?: string | undefined; + ChecksumSHA1?: string | undefined; + ChecksumSHA256?: string | undefined; + ChecksumType?: ChecksumType | undefined; + ETag?: string | undefined; + MissingMeta?: number | undefined; + VersionId?: string | undefined; + CacheControl?: string | undefined; + ContentDisposition?: string | undefined; + ContentEncoding?: string | undefined; + ContentLanguage?: string | undefined; + ContentType?: string | undefined; + ContentRange?: string | undefined; + Expires?: Date | undefined; + ExpiresString?: string | undefined; + WebsiteRedirectLocation?: string | undefined; + ServerSideEncryption?: ServerSideEncryption | undefined; + Metadata?: Record | undefined; + SSECustomerAlgorithm?: string | undefined; + SSECustomerKeyMD5?: string | undefined; + SSEKMSKeyId?: string | undefined; + BucketKeyEnabled?: boolean | undefined; + StorageClass?: StorageClass | undefined; + RequestCharged?: RequestCharged | undefined; + ReplicationStatus?: ReplicationStatus | undefined; + PartsCount?: number | undefined; + ObjectLockMode?: ObjectLockMode | undefined; + ObjectLockRetainUntilDate?: Date | undefined; + ObjectLockLegalHoldStatus?: ObjectLockLegalHoldStatus | undefined; +} +export interface HeadObjectRequest { + Bucket: string | undefined; + IfMatch?: string | undefined; + IfModifiedSince?: Date | undefined; + IfNoneMatch?: string | undefined; + IfUnmodifiedSince?: Date | undefined; + Key: string | undefined; + Range?: string | undefined; + ResponseCacheControl?: string | undefined; + ResponseContentDisposition?: string | undefined; + ResponseContentEncoding?: string | undefined; + ResponseContentLanguage?: string | undefined; + ResponseContentType?: string | undefined; + ResponseExpires?: Date | undefined; + VersionId?: string | undefined; + SSECustomerAlgorithm?: string | undefined; + SSECustomerKey?: string | undefined; + SSECustomerKeyMD5?: string | undefined; + RequestPayer?: RequestPayer | undefined; + PartNumber?: number | undefined; + ExpectedBucketOwner?: string | undefined; + ChecksumMode?: ChecksumMode | undefined; +} +export interface ListBucketAnalyticsConfigurationsOutput { + IsTruncated?: boolean | undefined; + ContinuationToken?: string | undefined; + NextContinuationToken?: string | undefined; + AnalyticsConfigurationList?: AnalyticsConfiguration[] | undefined; +} +export interface ListBucketAnalyticsConfigurationsRequest { + Bucket: string | undefined; + ContinuationToken?: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface ListBucketIntelligentTieringConfigurationsOutput { + IsTruncated?: boolean | undefined; + ContinuationToken?: string | undefined; + NextContinuationToken?: string | undefined; + IntelligentTieringConfigurationList?: + | IntelligentTieringConfiguration[] + | undefined; +} +export interface ListBucketIntelligentTieringConfigurationsRequest { + Bucket: string | undefined; + ContinuationToken?: string | undefined; +} +export interface ListBucketInventoryConfigurationsOutput { + ContinuationToken?: string | undefined; + InventoryConfigurationList?: InventoryConfiguration[] | undefined; + IsTruncated?: boolean | undefined; + NextContinuationToken?: string | undefined; +} +export interface ListBucketInventoryConfigurationsRequest { + Bucket: string | undefined; + ContinuationToken?: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface ListBucketMetricsConfigurationsOutput { + IsTruncated?: boolean | undefined; + ContinuationToken?: string | undefined; + NextContinuationToken?: string | undefined; + MetricsConfigurationList?: MetricsConfiguration[] | undefined; +} +export interface ListBucketMetricsConfigurationsRequest { + Bucket: string | undefined; + ContinuationToken?: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface Bucket { + Name?: string | undefined; + CreationDate?: Date | undefined; + BucketRegion?: string | undefined; +} +export interface ListBucketsOutput { + Buckets?: Bucket[] | undefined; + Owner?: Owner | undefined; + ContinuationToken?: string | undefined; + Prefix?: string | undefined; +} +export interface ListBucketsRequest { + MaxBuckets?: number | undefined; + ContinuationToken?: string | undefined; + Prefix?: string | undefined; + BucketRegion?: string | undefined; +} +export interface ListDirectoryBucketsOutput { + Buckets?: Bucket[] | undefined; + ContinuationToken?: string | undefined; +} +export interface ListDirectoryBucketsRequest { + ContinuationToken?: string | undefined; + MaxDirectoryBuckets?: number | undefined; +} +export interface CommonPrefix { + Prefix?: string | undefined; +} +export declare const EncodingType: { + readonly url: "url"; +}; +export type EncodingType = (typeof EncodingType)[keyof typeof EncodingType]; +export interface Initiator { + ID?: string | undefined; + DisplayName?: string | undefined; +} +export interface MultipartUpload { + UploadId?: string | undefined; + Key?: string | undefined; + Initiated?: Date | undefined; + StorageClass?: StorageClass | undefined; + Owner?: Owner | undefined; + Initiator?: Initiator | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + ChecksumType?: ChecksumType | undefined; +} +export interface ListMultipartUploadsOutput { + Bucket?: string | undefined; + KeyMarker?: string | undefined; + UploadIdMarker?: string | undefined; + NextKeyMarker?: string | undefined; + Prefix?: string | undefined; + Delimiter?: string | undefined; + NextUploadIdMarker?: string | undefined; + MaxUploads?: number | undefined; + IsTruncated?: boolean | undefined; + Uploads?: MultipartUpload[] | undefined; + CommonPrefixes?: CommonPrefix[] | undefined; + EncodingType?: EncodingType | undefined; + RequestCharged?: RequestCharged | undefined; +} +export interface ListMultipartUploadsRequest { + Bucket: string | undefined; + Delimiter?: string | undefined; + EncodingType?: EncodingType | undefined; + KeyMarker?: string | undefined; + MaxUploads?: number | undefined; + Prefix?: string | undefined; + UploadIdMarker?: string | undefined; + ExpectedBucketOwner?: string | undefined; + RequestPayer?: RequestPayer | undefined; +} +export interface RestoreStatus { + IsRestoreInProgress?: boolean | undefined; + RestoreExpiryDate?: Date | undefined; +} +export declare const ObjectStorageClass: { + readonly DEEP_ARCHIVE: "DEEP_ARCHIVE"; + readonly EXPRESS_ONEZONE: "EXPRESS_ONEZONE"; + readonly GLACIER: "GLACIER"; + readonly GLACIER_IR: "GLACIER_IR"; + readonly INTELLIGENT_TIERING: "INTELLIGENT_TIERING"; + readonly ONEZONE_IA: "ONEZONE_IA"; + readonly OUTPOSTS: "OUTPOSTS"; + readonly REDUCED_REDUNDANCY: "REDUCED_REDUNDANCY"; + readonly SNOW: "SNOW"; + readonly STANDARD: "STANDARD"; + readonly STANDARD_IA: "STANDARD_IA"; +}; +export type ObjectStorageClass = + (typeof ObjectStorageClass)[keyof typeof ObjectStorageClass]; +export interface _Object { + Key?: string | undefined; + LastModified?: Date | undefined; + ETag?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm[] | undefined; + ChecksumType?: ChecksumType | undefined; + Size?: number | undefined; + StorageClass?: ObjectStorageClass | undefined; + Owner?: Owner | undefined; + RestoreStatus?: RestoreStatus | undefined; +} +export interface ListObjectsOutput { + IsTruncated?: boolean | undefined; + Marker?: string | undefined; + NextMarker?: string | undefined; + Contents?: _Object[] | undefined; + Name?: string | undefined; + Prefix?: string | undefined; + Delimiter?: string | undefined; + MaxKeys?: number | undefined; + CommonPrefixes?: CommonPrefix[] | undefined; + EncodingType?: EncodingType | undefined; + RequestCharged?: RequestCharged | undefined; +} +export declare const OptionalObjectAttributes: { + readonly RESTORE_STATUS: "RestoreStatus"; +}; +export type OptionalObjectAttributes = + (typeof OptionalObjectAttributes)[keyof typeof OptionalObjectAttributes]; +export interface ListObjectsRequest { + Bucket: string | undefined; + Delimiter?: string | undefined; + EncodingType?: EncodingType | undefined; + Marker?: string | undefined; + MaxKeys?: number | undefined; + Prefix?: string | undefined; + RequestPayer?: RequestPayer | undefined; + ExpectedBucketOwner?: string | undefined; + OptionalObjectAttributes?: OptionalObjectAttributes[] | undefined; +} +export interface ListObjectsV2Output { + IsTruncated?: boolean | undefined; + Contents?: _Object[] | undefined; + Name?: string | undefined; + Prefix?: string | undefined; + Delimiter?: string | undefined; + MaxKeys?: number | undefined; + CommonPrefixes?: CommonPrefix[] | undefined; + EncodingType?: EncodingType | undefined; + KeyCount?: number | undefined; + ContinuationToken?: string | undefined; + NextContinuationToken?: string | undefined; + StartAfter?: string | undefined; + RequestCharged?: RequestCharged | undefined; +} +export interface ListObjectsV2Request { + Bucket: string | undefined; + Delimiter?: string | undefined; + EncodingType?: EncodingType | undefined; + MaxKeys?: number | undefined; + Prefix?: string | undefined; + ContinuationToken?: string | undefined; + FetchOwner?: boolean | undefined; + StartAfter?: string | undefined; + RequestPayer?: RequestPayer | undefined; + ExpectedBucketOwner?: string | undefined; + OptionalObjectAttributes?: OptionalObjectAttributes[] | undefined; +} +export interface DeleteMarkerEntry { + Owner?: Owner | undefined; + Key?: string | undefined; + VersionId?: string | undefined; + IsLatest?: boolean | undefined; + LastModified?: Date | undefined; +} +export declare const ObjectVersionStorageClass: { + readonly STANDARD: "STANDARD"; +}; +export type ObjectVersionStorageClass = + (typeof ObjectVersionStorageClass)[keyof typeof ObjectVersionStorageClass]; +export interface ObjectVersion { + ETag?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm[] | undefined; + ChecksumType?: ChecksumType | undefined; + Size?: number | undefined; + StorageClass?: ObjectVersionStorageClass | undefined; + Key?: string | undefined; + VersionId?: string | undefined; + IsLatest?: boolean | undefined; + LastModified?: Date | undefined; + Owner?: Owner | undefined; + RestoreStatus?: RestoreStatus | undefined; +} +export interface ListObjectVersionsOutput { + IsTruncated?: boolean | undefined; + KeyMarker?: string | undefined; + VersionIdMarker?: string | undefined; + NextKeyMarker?: string | undefined; + NextVersionIdMarker?: string | undefined; + Versions?: ObjectVersion[] | undefined; + DeleteMarkers?: DeleteMarkerEntry[] | undefined; + Name?: string | undefined; + Prefix?: string | undefined; + Delimiter?: string | undefined; + MaxKeys?: number | undefined; + CommonPrefixes?: CommonPrefix[] | undefined; + EncodingType?: EncodingType | undefined; + RequestCharged?: RequestCharged | undefined; +} +export interface ListObjectVersionsRequest { + Bucket: string | undefined; + Delimiter?: string | undefined; + EncodingType?: EncodingType | undefined; + KeyMarker?: string | undefined; + MaxKeys?: number | undefined; + Prefix?: string | undefined; + VersionIdMarker?: string | undefined; + ExpectedBucketOwner?: string | undefined; + RequestPayer?: RequestPayer | undefined; + OptionalObjectAttributes?: OptionalObjectAttributes[] | undefined; +} +export interface Part { + PartNumber?: number | undefined; + LastModified?: Date | undefined; + ETag?: string | undefined; + Size?: number | undefined; + ChecksumCRC32?: string | undefined; + ChecksumCRC32C?: string | undefined; + ChecksumCRC64NVME?: string | undefined; + ChecksumSHA1?: string | undefined; + ChecksumSHA256?: string | undefined; +} +export interface ListPartsOutput { + AbortDate?: Date | undefined; + AbortRuleId?: string | undefined; + Bucket?: string | undefined; + Key?: string | undefined; + UploadId?: string | undefined; + PartNumberMarker?: string | undefined; + NextPartNumberMarker?: string | undefined; + MaxParts?: number | undefined; + IsTruncated?: boolean | undefined; + Parts?: Part[] | undefined; + Initiator?: Initiator | undefined; + Owner?: Owner | undefined; + StorageClass?: StorageClass | undefined; + RequestCharged?: RequestCharged | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + ChecksumType?: ChecksumType | undefined; +} +export interface ListPartsRequest { + Bucket: string | undefined; + Key: string | undefined; + MaxParts?: number | undefined; + PartNumberMarker?: string | undefined; + UploadId: string | undefined; + RequestPayer?: RequestPayer | undefined; + ExpectedBucketOwner?: string | undefined; + SSECustomerAlgorithm?: string | undefined; + SSECustomerKey?: string | undefined; + SSECustomerKeyMD5?: string | undefined; +} +export interface PutBucketAccelerateConfigurationRequest { + Bucket: string | undefined; + AccelerateConfiguration: AccelerateConfiguration | undefined; + ExpectedBucketOwner?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; +} +export interface PutBucketAclRequest { + ACL?: BucketCannedACL | undefined; + AccessControlPolicy?: AccessControlPolicy | undefined; + Bucket: string | undefined; + ContentMD5?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + GrantFullControl?: string | undefined; + GrantRead?: string | undefined; + GrantReadACP?: string | undefined; + GrantWrite?: string | undefined; + GrantWriteACP?: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface PutBucketAnalyticsConfigurationRequest { + Bucket: string | undefined; + Id: string | undefined; + AnalyticsConfiguration: AnalyticsConfiguration | undefined; + ExpectedBucketOwner?: string | undefined; +} +export declare const CompleteMultipartUploadOutputFilterSensitiveLog: ( + obj: CompleteMultipartUploadOutput +) => any; +export declare const CompleteMultipartUploadRequestFilterSensitiveLog: ( + obj: CompleteMultipartUploadRequest +) => any; +export declare const CopyObjectOutputFilterSensitiveLog: ( + obj: CopyObjectOutput +) => any; +export declare const CopyObjectRequestFilterSensitiveLog: ( + obj: CopyObjectRequest +) => any; +export declare const CreateMultipartUploadOutputFilterSensitiveLog: ( + obj: CreateMultipartUploadOutput +) => any; +export declare const CreateMultipartUploadRequestFilterSensitiveLog: ( + obj: CreateMultipartUploadRequest +) => any; +export declare const SessionCredentialsFilterSensitiveLog: ( + obj: SessionCredentials +) => any; +export declare const CreateSessionOutputFilterSensitiveLog: ( + obj: CreateSessionOutput +) => any; +export declare const CreateSessionRequestFilterSensitiveLog: ( + obj: CreateSessionRequest +) => any; +export declare const ServerSideEncryptionByDefaultFilterSensitiveLog: ( + obj: ServerSideEncryptionByDefault +) => any; +export declare const ServerSideEncryptionRuleFilterSensitiveLog: ( + obj: ServerSideEncryptionRule +) => any; +export declare const ServerSideEncryptionConfigurationFilterSensitiveLog: ( + obj: ServerSideEncryptionConfiguration +) => any; +export declare const GetBucketEncryptionOutputFilterSensitiveLog: ( + obj: GetBucketEncryptionOutput +) => any; +export declare const SSEKMSFilterSensitiveLog: (obj: SSEKMS) => any; +export declare const InventoryEncryptionFilterSensitiveLog: ( + obj: InventoryEncryption +) => any; +export declare const InventoryS3BucketDestinationFilterSensitiveLog: ( + obj: InventoryS3BucketDestination +) => any; +export declare const InventoryDestinationFilterSensitiveLog: ( + obj: InventoryDestination +) => any; +export declare const InventoryConfigurationFilterSensitiveLog: ( + obj: InventoryConfiguration +) => any; +export declare const GetBucketInventoryConfigurationOutputFilterSensitiveLog: ( + obj: GetBucketInventoryConfigurationOutput +) => any; +export declare const GetObjectOutputFilterSensitiveLog: ( + obj: GetObjectOutput +) => any; +export declare const GetObjectRequestFilterSensitiveLog: ( + obj: GetObjectRequest +) => any; +export declare const GetObjectAttributesRequestFilterSensitiveLog: ( + obj: GetObjectAttributesRequest +) => any; +export declare const GetObjectTorrentOutputFilterSensitiveLog: ( + obj: GetObjectTorrentOutput +) => any; +export declare const HeadObjectOutputFilterSensitiveLog: ( + obj: HeadObjectOutput +) => any; +export declare const HeadObjectRequestFilterSensitiveLog: ( + obj: HeadObjectRequest +) => any; +export declare const ListBucketInventoryConfigurationsOutputFilterSensitiveLog: ( + obj: ListBucketInventoryConfigurationsOutput +) => any; +export declare const ListPartsRequestFilterSensitiveLog: ( + obj: ListPartsRequest +) => any; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/models/models_1.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/models/models_1.d.ts new file mode 100644 index 00000000..737f72a0 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/models/models_1.d.ts @@ -0,0 +1,753 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { StreamingBlobTypes } from "@smithy/types"; +import { + AccessControlPolicy, + BucketVersioningStatus, + ChecksumAlgorithm, + ChecksumType, + CORSRule, + ErrorDocument, + Grant, + IndexDocument, + IntelligentTieringConfiguration, + InventoryConfiguration, + LifecycleRule, + LoggingEnabled, + MetricsConfiguration, + NotificationConfiguration, + ObjectCannedACL, + ObjectLockConfiguration, + ObjectLockLegalHold, + ObjectLockLegalHoldStatus, + ObjectLockMode, + ObjectLockRetention, + OwnershipControls, + Payer, + PublicAccessBlockConfiguration, + RedirectAllRequestsTo, + ReplicationConfiguration, + ReplicationStatus, + RequestCharged, + RequestPayer, + RoutingRule, + ServerSideEncryption, + ServerSideEncryptionConfiguration, + StorageClass, + Tag, + TransitionDefaultMinimumObjectSize, +} from "./models_0"; +import { S3ServiceException as __BaseException } from "./S3ServiceException"; +export interface CORSConfiguration { + CORSRules: CORSRule[] | undefined; +} +export interface PutBucketCorsRequest { + Bucket: string | undefined; + CORSConfiguration: CORSConfiguration | undefined; + ContentMD5?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface PutBucketEncryptionRequest { + Bucket: string | undefined; + ContentMD5?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + ServerSideEncryptionConfiguration: + | ServerSideEncryptionConfiguration + | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface PutBucketIntelligentTieringConfigurationRequest { + Bucket: string | undefined; + Id: string | undefined; + IntelligentTieringConfiguration: IntelligentTieringConfiguration | undefined; +} +export interface PutBucketInventoryConfigurationRequest { + Bucket: string | undefined; + Id: string | undefined; + InventoryConfiguration: InventoryConfiguration | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface PutBucketLifecycleConfigurationOutput { + TransitionDefaultMinimumObjectSize?: + | TransitionDefaultMinimumObjectSize + | undefined; +} +export interface BucketLifecycleConfiguration { + Rules: LifecycleRule[] | undefined; +} +export interface PutBucketLifecycleConfigurationRequest { + Bucket: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + LifecycleConfiguration?: BucketLifecycleConfiguration | undefined; + ExpectedBucketOwner?: string | undefined; + TransitionDefaultMinimumObjectSize?: + | TransitionDefaultMinimumObjectSize + | undefined; +} +export interface BucketLoggingStatus { + LoggingEnabled?: LoggingEnabled | undefined; +} +export interface PutBucketLoggingRequest { + Bucket: string | undefined; + BucketLoggingStatus: BucketLoggingStatus | undefined; + ContentMD5?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface PutBucketMetricsConfigurationRequest { + Bucket: string | undefined; + Id: string | undefined; + MetricsConfiguration: MetricsConfiguration | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface PutBucketNotificationConfigurationRequest { + Bucket: string | undefined; + NotificationConfiguration: NotificationConfiguration | undefined; + ExpectedBucketOwner?: string | undefined; + SkipDestinationValidation?: boolean | undefined; +} +export interface PutBucketOwnershipControlsRequest { + Bucket: string | undefined; + ContentMD5?: string | undefined; + ExpectedBucketOwner?: string | undefined; + OwnershipControls: OwnershipControls | undefined; +} +export interface PutBucketPolicyRequest { + Bucket: string | undefined; + ContentMD5?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + ConfirmRemoveSelfBucketAccess?: boolean | undefined; + Policy: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface PutBucketReplicationRequest { + Bucket: string | undefined; + ContentMD5?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + ReplicationConfiguration: ReplicationConfiguration | undefined; + Token?: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface RequestPaymentConfiguration { + Payer: Payer | undefined; +} +export interface PutBucketRequestPaymentRequest { + Bucket: string | undefined; + ContentMD5?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + RequestPaymentConfiguration: RequestPaymentConfiguration | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface Tagging { + TagSet: Tag[] | undefined; +} +export interface PutBucketTaggingRequest { + Bucket: string | undefined; + ContentMD5?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + Tagging: Tagging | undefined; + ExpectedBucketOwner?: string | undefined; +} +export declare const MFADelete: { + readonly Disabled: "Disabled"; + readonly Enabled: "Enabled"; +}; +export type MFADelete = (typeof MFADelete)[keyof typeof MFADelete]; +export interface VersioningConfiguration { + MFADelete?: MFADelete | undefined; + Status?: BucketVersioningStatus | undefined; +} +export interface PutBucketVersioningRequest { + Bucket: string | undefined; + ContentMD5?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + MFA?: string | undefined; + VersioningConfiguration: VersioningConfiguration | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface WebsiteConfiguration { + ErrorDocument?: ErrorDocument | undefined; + IndexDocument?: IndexDocument | undefined; + RedirectAllRequestsTo?: RedirectAllRequestsTo | undefined; + RoutingRules?: RoutingRule[] | undefined; +} +export interface PutBucketWebsiteRequest { + Bucket: string | undefined; + ContentMD5?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + WebsiteConfiguration: WebsiteConfiguration | undefined; + ExpectedBucketOwner?: string | undefined; +} +export declare class EncryptionTypeMismatch extends __BaseException { + readonly name: "EncryptionTypeMismatch"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidRequest extends __BaseException { + readonly name: "InvalidRequest"; + readonly $fault: "client"; + constructor(opts: __ExceptionOptionType); +} +export declare class InvalidWriteOffset extends __BaseException { + readonly name: "InvalidWriteOffset"; + readonly $fault: "client"; + constructor(opts: __ExceptionOptionType); +} +export interface PutObjectOutput { + Expiration?: string | undefined; + ETag?: string | undefined; + ChecksumCRC32?: string | undefined; + ChecksumCRC32C?: string | undefined; + ChecksumCRC64NVME?: string | undefined; + ChecksumSHA1?: string | undefined; + ChecksumSHA256?: string | undefined; + ChecksumType?: ChecksumType | undefined; + ServerSideEncryption?: ServerSideEncryption | undefined; + VersionId?: string | undefined; + SSECustomerAlgorithm?: string | undefined; + SSECustomerKeyMD5?: string | undefined; + SSEKMSKeyId?: string | undefined; + SSEKMSEncryptionContext?: string | undefined; + BucketKeyEnabled?: boolean | undefined; + Size?: number | undefined; + RequestCharged?: RequestCharged | undefined; +} +export interface PutObjectRequest { + ACL?: ObjectCannedACL | undefined; + Body?: StreamingBlobTypes | undefined; + Bucket: string | undefined; + CacheControl?: string | undefined; + ContentDisposition?: string | undefined; + ContentEncoding?: string | undefined; + ContentLanguage?: string | undefined; + ContentLength?: number | undefined; + ContentMD5?: string | undefined; + ContentType?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + ChecksumCRC32?: string | undefined; + ChecksumCRC32C?: string | undefined; + ChecksumCRC64NVME?: string | undefined; + ChecksumSHA1?: string | undefined; + ChecksumSHA256?: string | undefined; + Expires?: Date | undefined; + IfMatch?: string | undefined; + IfNoneMatch?: string | undefined; + GrantFullControl?: string | undefined; + GrantRead?: string | undefined; + GrantReadACP?: string | undefined; + GrantWriteACP?: string | undefined; + Key: string | undefined; + WriteOffsetBytes?: number | undefined; + Metadata?: Record | undefined; + ServerSideEncryption?: ServerSideEncryption | undefined; + StorageClass?: StorageClass | undefined; + WebsiteRedirectLocation?: string | undefined; + SSECustomerAlgorithm?: string | undefined; + SSECustomerKey?: string | undefined; + SSECustomerKeyMD5?: string | undefined; + SSEKMSKeyId?: string | undefined; + SSEKMSEncryptionContext?: string | undefined; + BucketKeyEnabled?: boolean | undefined; + RequestPayer?: RequestPayer | undefined; + Tagging?: string | undefined; + ObjectLockMode?: ObjectLockMode | undefined; + ObjectLockRetainUntilDate?: Date | undefined; + ObjectLockLegalHoldStatus?: ObjectLockLegalHoldStatus | undefined; + ExpectedBucketOwner?: string | undefined; +} +export declare class TooManyParts extends __BaseException { + readonly name: "TooManyParts"; + readonly $fault: "client"; + constructor(opts: __ExceptionOptionType); +} +export interface PutObjectAclOutput { + RequestCharged?: RequestCharged | undefined; +} +export interface PutObjectAclRequest { + ACL?: ObjectCannedACL | undefined; + AccessControlPolicy?: AccessControlPolicy | undefined; + Bucket: string | undefined; + ContentMD5?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + GrantFullControl?: string | undefined; + GrantRead?: string | undefined; + GrantReadACP?: string | undefined; + GrantWrite?: string | undefined; + GrantWriteACP?: string | undefined; + Key: string | undefined; + RequestPayer?: RequestPayer | undefined; + VersionId?: string | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface PutObjectLegalHoldOutput { + RequestCharged?: RequestCharged | undefined; +} +export interface PutObjectLegalHoldRequest { + Bucket: string | undefined; + Key: string | undefined; + LegalHold?: ObjectLockLegalHold | undefined; + RequestPayer?: RequestPayer | undefined; + VersionId?: string | undefined; + ContentMD5?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface PutObjectLockConfigurationOutput { + RequestCharged?: RequestCharged | undefined; +} +export interface PutObjectLockConfigurationRequest { + Bucket: string | undefined; + ObjectLockConfiguration?: ObjectLockConfiguration | undefined; + RequestPayer?: RequestPayer | undefined; + Token?: string | undefined; + ContentMD5?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface PutObjectRetentionOutput { + RequestCharged?: RequestCharged | undefined; +} +export interface PutObjectRetentionRequest { + Bucket: string | undefined; + Key: string | undefined; + Retention?: ObjectLockRetention | undefined; + RequestPayer?: RequestPayer | undefined; + VersionId?: string | undefined; + BypassGovernanceRetention?: boolean | undefined; + ContentMD5?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface PutObjectTaggingOutput { + VersionId?: string | undefined; +} +export interface PutObjectTaggingRequest { + Bucket: string | undefined; + Key: string | undefined; + VersionId?: string | undefined; + ContentMD5?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + Tagging: Tagging | undefined; + ExpectedBucketOwner?: string | undefined; + RequestPayer?: RequestPayer | undefined; +} +export interface PutPublicAccessBlockRequest { + Bucket: string | undefined; + ContentMD5?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + PublicAccessBlockConfiguration: PublicAccessBlockConfiguration | undefined; + ExpectedBucketOwner?: string | undefined; +} +export declare class ObjectAlreadyInActiveTierError extends __BaseException { + readonly name: "ObjectAlreadyInActiveTierError"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface RestoreObjectOutput { + RequestCharged?: RequestCharged | undefined; + RestoreOutputPath?: string | undefined; +} +export declare const Tier: { + readonly Bulk: "Bulk"; + readonly Expedited: "Expedited"; + readonly Standard: "Standard"; +}; +export type Tier = (typeof Tier)[keyof typeof Tier]; +export interface GlacierJobParameters { + Tier: Tier | undefined; +} +export interface Encryption { + EncryptionType: ServerSideEncryption | undefined; + KMSKeyId?: string | undefined; + KMSContext?: string | undefined; +} +export interface MetadataEntry { + Name?: string | undefined; + Value?: string | undefined; +} +export interface S3Location { + BucketName: string | undefined; + Prefix: string | undefined; + Encryption?: Encryption | undefined; + CannedACL?: ObjectCannedACL | undefined; + AccessControlList?: Grant[] | undefined; + Tagging?: Tagging | undefined; + UserMetadata?: MetadataEntry[] | undefined; + StorageClass?: StorageClass | undefined; +} +export interface OutputLocation { + S3?: S3Location | undefined; +} +export declare const ExpressionType: { + readonly SQL: "SQL"; +}; +export type ExpressionType = + (typeof ExpressionType)[keyof typeof ExpressionType]; +export declare const CompressionType: { + readonly BZIP2: "BZIP2"; + readonly GZIP: "GZIP"; + readonly NONE: "NONE"; +}; +export type CompressionType = + (typeof CompressionType)[keyof typeof CompressionType]; +export declare const FileHeaderInfo: { + readonly IGNORE: "IGNORE"; + readonly NONE: "NONE"; + readonly USE: "USE"; +}; +export type FileHeaderInfo = + (typeof FileHeaderInfo)[keyof typeof FileHeaderInfo]; +export interface CSVInput { + FileHeaderInfo?: FileHeaderInfo | undefined; + Comments?: string | undefined; + QuoteEscapeCharacter?: string | undefined; + RecordDelimiter?: string | undefined; + FieldDelimiter?: string | undefined; + QuoteCharacter?: string | undefined; + AllowQuotedRecordDelimiter?: boolean | undefined; +} +export declare const JSONType: { + readonly DOCUMENT: "DOCUMENT"; + readonly LINES: "LINES"; +}; +export type JSONType = (typeof JSONType)[keyof typeof JSONType]; +export interface JSONInput { + Type?: JSONType | undefined; +} +export interface ParquetInput {} +export interface InputSerialization { + CSV?: CSVInput | undefined; + CompressionType?: CompressionType | undefined; + JSON?: JSONInput | undefined; + Parquet?: ParquetInput | undefined; +} +export declare const QuoteFields: { + readonly ALWAYS: "ALWAYS"; + readonly ASNEEDED: "ASNEEDED"; +}; +export type QuoteFields = (typeof QuoteFields)[keyof typeof QuoteFields]; +export interface CSVOutput { + QuoteFields?: QuoteFields | undefined; + QuoteEscapeCharacter?: string | undefined; + RecordDelimiter?: string | undefined; + FieldDelimiter?: string | undefined; + QuoteCharacter?: string | undefined; +} +export interface JSONOutput { + RecordDelimiter?: string | undefined; +} +export interface OutputSerialization { + CSV?: CSVOutput | undefined; + JSON?: JSONOutput | undefined; +} +export interface SelectParameters { + InputSerialization: InputSerialization | undefined; + ExpressionType: ExpressionType | undefined; + Expression: string | undefined; + OutputSerialization: OutputSerialization | undefined; +} +export declare const RestoreRequestType: { + readonly SELECT: "SELECT"; +}; +export type RestoreRequestType = + (typeof RestoreRequestType)[keyof typeof RestoreRequestType]; +export interface RestoreRequest { + Days?: number | undefined; + GlacierJobParameters?: GlacierJobParameters | undefined; + Type?: RestoreRequestType | undefined; + Tier?: Tier | undefined; + Description?: string | undefined; + SelectParameters?: SelectParameters | undefined; + OutputLocation?: OutputLocation | undefined; +} +export interface RestoreObjectRequest { + Bucket: string | undefined; + Key: string | undefined; + VersionId?: string | undefined; + RestoreRequest?: RestoreRequest | undefined; + RequestPayer?: RequestPayer | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface ContinuationEvent {} +export interface EndEvent {} +export interface Progress { + BytesScanned?: number | undefined; + BytesProcessed?: number | undefined; + BytesReturned?: number | undefined; +} +export interface ProgressEvent { + Details?: Progress | undefined; +} +export interface RecordsEvent { + Payload?: Uint8Array | undefined; +} +export interface Stats { + BytesScanned?: number | undefined; + BytesProcessed?: number | undefined; + BytesReturned?: number | undefined; +} +export interface StatsEvent { + Details?: Stats | undefined; +} +export type SelectObjectContentEventStream = + | SelectObjectContentEventStream.ContMember + | SelectObjectContentEventStream.EndMember + | SelectObjectContentEventStream.ProgressMember + | SelectObjectContentEventStream.RecordsMember + | SelectObjectContentEventStream.StatsMember + | SelectObjectContentEventStream.$UnknownMember; +export declare namespace SelectObjectContentEventStream { + interface RecordsMember { + Records: RecordsEvent; + Stats?: never; + Progress?: never; + Cont?: never; + End?: never; + $unknown?: never; + } + interface StatsMember { + Records?: never; + Stats: StatsEvent; + Progress?: never; + Cont?: never; + End?: never; + $unknown?: never; + } + interface ProgressMember { + Records?: never; + Stats?: never; + Progress: ProgressEvent; + Cont?: never; + End?: never; + $unknown?: never; + } + interface ContMember { + Records?: never; + Stats?: never; + Progress?: never; + Cont: ContinuationEvent; + End?: never; + $unknown?: never; + } + interface EndMember { + Records?: never; + Stats?: never; + Progress?: never; + Cont?: never; + End: EndEvent; + $unknown?: never; + } + interface $UnknownMember { + Records?: never; + Stats?: never; + Progress?: never; + Cont?: never; + End?: never; + $unknown: [string, any]; + } + interface Visitor { + Records: (value: RecordsEvent) => T; + Stats: (value: StatsEvent) => T; + Progress: (value: ProgressEvent) => T; + Cont: (value: ContinuationEvent) => T; + End: (value: EndEvent) => T; + _: (name: string, value: any) => T; + } + const visit: ( + value: SelectObjectContentEventStream, + visitor: Visitor + ) => T; +} +export interface SelectObjectContentOutput { + Payload?: AsyncIterable | undefined; +} +export interface RequestProgress { + Enabled?: boolean | undefined; +} +export interface ScanRange { + Start?: number | undefined; + End?: number | undefined; +} +export interface SelectObjectContentRequest { + Bucket: string | undefined; + Key: string | undefined; + SSECustomerAlgorithm?: string | undefined; + SSECustomerKey?: string | undefined; + SSECustomerKeyMD5?: string | undefined; + Expression: string | undefined; + ExpressionType: ExpressionType | undefined; + RequestProgress?: RequestProgress | undefined; + InputSerialization: InputSerialization | undefined; + OutputSerialization: OutputSerialization | undefined; + ScanRange?: ScanRange | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface UploadPartOutput { + ServerSideEncryption?: ServerSideEncryption | undefined; + ETag?: string | undefined; + ChecksumCRC32?: string | undefined; + ChecksumCRC32C?: string | undefined; + ChecksumCRC64NVME?: string | undefined; + ChecksumSHA1?: string | undefined; + ChecksumSHA256?: string | undefined; + SSECustomerAlgorithm?: string | undefined; + SSECustomerKeyMD5?: string | undefined; + SSEKMSKeyId?: string | undefined; + BucketKeyEnabled?: boolean | undefined; + RequestCharged?: RequestCharged | undefined; +} +export interface UploadPartRequest { + Body?: StreamingBlobTypes | undefined; + Bucket: string | undefined; + ContentLength?: number | undefined; + ContentMD5?: string | undefined; + ChecksumAlgorithm?: ChecksumAlgorithm | undefined; + ChecksumCRC32?: string | undefined; + ChecksumCRC32C?: string | undefined; + ChecksumCRC64NVME?: string | undefined; + ChecksumSHA1?: string | undefined; + ChecksumSHA256?: string | undefined; + Key: string | undefined; + PartNumber: number | undefined; + UploadId: string | undefined; + SSECustomerAlgorithm?: string | undefined; + SSECustomerKey?: string | undefined; + SSECustomerKeyMD5?: string | undefined; + RequestPayer?: RequestPayer | undefined; + ExpectedBucketOwner?: string | undefined; +} +export interface CopyPartResult { + ETag?: string | undefined; + LastModified?: Date | undefined; + ChecksumCRC32?: string | undefined; + ChecksumCRC32C?: string | undefined; + ChecksumCRC64NVME?: string | undefined; + ChecksumSHA1?: string | undefined; + ChecksumSHA256?: string | undefined; +} +export interface UploadPartCopyOutput { + CopySourceVersionId?: string | undefined; + CopyPartResult?: CopyPartResult | undefined; + ServerSideEncryption?: ServerSideEncryption | undefined; + SSECustomerAlgorithm?: string | undefined; + SSECustomerKeyMD5?: string | undefined; + SSEKMSKeyId?: string | undefined; + BucketKeyEnabled?: boolean | undefined; + RequestCharged?: RequestCharged | undefined; +} +export interface UploadPartCopyRequest { + Bucket: string | undefined; + CopySource: string | undefined; + CopySourceIfMatch?: string | undefined; + CopySourceIfModifiedSince?: Date | undefined; + CopySourceIfNoneMatch?: string | undefined; + CopySourceIfUnmodifiedSince?: Date | undefined; + CopySourceRange?: string | undefined; + Key: string | undefined; + PartNumber: number | undefined; + UploadId: string | undefined; + SSECustomerAlgorithm?: string | undefined; + SSECustomerKey?: string | undefined; + SSECustomerKeyMD5?: string | undefined; + CopySourceSSECustomerAlgorithm?: string | undefined; + CopySourceSSECustomerKey?: string | undefined; + CopySourceSSECustomerKeyMD5?: string | undefined; + RequestPayer?: RequestPayer | undefined; + ExpectedBucketOwner?: string | undefined; + ExpectedSourceBucketOwner?: string | undefined; +} +export interface WriteGetObjectResponseRequest { + RequestRoute: string | undefined; + RequestToken: string | undefined; + Body?: StreamingBlobTypes | undefined; + StatusCode?: number | undefined; + ErrorCode?: string | undefined; + ErrorMessage?: string | undefined; + AcceptRanges?: string | undefined; + CacheControl?: string | undefined; + ContentDisposition?: string | undefined; + ContentEncoding?: string | undefined; + ContentLanguage?: string | undefined; + ContentLength?: number | undefined; + ContentRange?: string | undefined; + ContentType?: string | undefined; + ChecksumCRC32?: string | undefined; + ChecksumCRC32C?: string | undefined; + ChecksumCRC64NVME?: string | undefined; + ChecksumSHA1?: string | undefined; + ChecksumSHA256?: string | undefined; + DeleteMarker?: boolean | undefined; + ETag?: string | undefined; + Expires?: Date | undefined; + Expiration?: string | undefined; + LastModified?: Date | undefined; + MissingMeta?: number | undefined; + Metadata?: Record | undefined; + ObjectLockMode?: ObjectLockMode | undefined; + ObjectLockLegalHoldStatus?: ObjectLockLegalHoldStatus | undefined; + ObjectLockRetainUntilDate?: Date | undefined; + PartsCount?: number | undefined; + ReplicationStatus?: ReplicationStatus | undefined; + RequestCharged?: RequestCharged | undefined; + Restore?: string | undefined; + ServerSideEncryption?: ServerSideEncryption | undefined; + SSECustomerAlgorithm?: string | undefined; + SSEKMSKeyId?: string | undefined; + SSECustomerKeyMD5?: string | undefined; + StorageClass?: StorageClass | undefined; + TagCount?: number | undefined; + VersionId?: string | undefined; + BucketKeyEnabled?: boolean | undefined; +} +export declare const PutBucketEncryptionRequestFilterSensitiveLog: ( + obj: PutBucketEncryptionRequest +) => any; +export declare const PutBucketInventoryConfigurationRequestFilterSensitiveLog: ( + obj: PutBucketInventoryConfigurationRequest +) => any; +export declare const PutObjectOutputFilterSensitiveLog: ( + obj: PutObjectOutput +) => any; +export declare const PutObjectRequestFilterSensitiveLog: ( + obj: PutObjectRequest +) => any; +export declare const EncryptionFilterSensitiveLog: (obj: Encryption) => any; +export declare const S3LocationFilterSensitiveLog: (obj: S3Location) => any; +export declare const OutputLocationFilterSensitiveLog: ( + obj: OutputLocation +) => any; +export declare const RestoreRequestFilterSensitiveLog: ( + obj: RestoreRequest +) => any; +export declare const RestoreObjectRequestFilterSensitiveLog: ( + obj: RestoreObjectRequest +) => any; +export declare const SelectObjectContentEventStreamFilterSensitiveLog: ( + obj: SelectObjectContentEventStream +) => any; +export declare const SelectObjectContentOutputFilterSensitiveLog: ( + obj: SelectObjectContentOutput +) => any; +export declare const SelectObjectContentRequestFilterSensitiveLog: ( + obj: SelectObjectContentRequest +) => any; +export declare const UploadPartOutputFilterSensitiveLog: ( + obj: UploadPartOutput +) => any; +export declare const UploadPartRequestFilterSensitiveLog: ( + obj: UploadPartRequest +) => any; +export declare const UploadPartCopyOutputFilterSensitiveLog: ( + obj: UploadPartCopyOutput +) => any; +export declare const UploadPartCopyRequestFilterSensitiveLog: ( + obj: UploadPartCopyRequest +) => any; +export declare const WriteGetObjectResponseRequestFilterSensitiveLog: ( + obj: WriteGetObjectResponseRequest +) => any; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/pagination/Interfaces.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/pagination/Interfaces.d.ts new file mode 100644 index 00000000..0c7a2b1c --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/pagination/Interfaces.d.ts @@ -0,0 +1,5 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { S3Client } from "../S3Client"; +export interface S3PaginationConfiguration extends PaginationConfiguration { + client: S3Client; +} diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/pagination/ListBucketsPaginator.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/pagination/ListBucketsPaginator.d.ts new file mode 100644 index 00000000..55b27e67 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/pagination/ListBucketsPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListBucketsCommandInput, + ListBucketsCommandOutput, +} from "../commands/ListBucketsCommand"; +import { S3PaginationConfiguration } from "./Interfaces"; +export declare const paginateListBuckets: ( + config: S3PaginationConfiguration, + input: ListBucketsCommandInput, + ...rest: any[] +) => Paginator; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/pagination/ListDirectoryBucketsPaginator.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/pagination/ListDirectoryBucketsPaginator.d.ts new file mode 100644 index 00000000..30dc9d90 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/pagination/ListDirectoryBucketsPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListDirectoryBucketsCommandInput, + ListDirectoryBucketsCommandOutput, +} from "../commands/ListDirectoryBucketsCommand"; +import { S3PaginationConfiguration } from "./Interfaces"; +export declare const paginateListDirectoryBuckets: ( + config: S3PaginationConfiguration, + input: ListDirectoryBucketsCommandInput, + ...rest: any[] +) => Paginator; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/pagination/ListObjectsV2Paginator.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/pagination/ListObjectsV2Paginator.d.ts new file mode 100644 index 00000000..84168dda --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/pagination/ListObjectsV2Paginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListObjectsV2CommandInput, + ListObjectsV2CommandOutput, +} from "../commands/ListObjectsV2Command"; +import { S3PaginationConfiguration } from "./Interfaces"; +export declare const paginateListObjectsV2: ( + config: S3PaginationConfiguration, + input: ListObjectsV2CommandInput, + ...rest: any[] +) => Paginator; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/pagination/ListPartsPaginator.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/pagination/ListPartsPaginator.d.ts new file mode 100644 index 00000000..b3585dbd --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/pagination/ListPartsPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListPartsCommandInput, + ListPartsCommandOutput, +} from "../commands/ListPartsCommand"; +import { S3PaginationConfiguration } from "./Interfaces"; +export declare const paginateListParts: ( + config: S3PaginationConfiguration, + input: ListPartsCommandInput, + ...rest: any[] +) => Paginator; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/pagination/index.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/pagination/index.d.ts new file mode 100644 index 00000000..9438ebe6 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/pagination/index.d.ts @@ -0,0 +1,5 @@ +export * from "./Interfaces"; +export * from "./ListBucketsPaginator"; +export * from "./ListDirectoryBucketsPaginator"; +export * from "./ListObjectsV2Paginator"; +export * from "./ListPartsPaginator"; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/protocols/Aws_restXml.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/protocols/Aws_restXml.d.ts new file mode 100644 index 00000000..f7ba61e9 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/protocols/Aws_restXml.d.ts @@ -0,0 +1,1185 @@ +import { + HttpRequest as __HttpRequest, + HttpResponse as __HttpResponse, +} from "@smithy/protocol-http"; +import { + EventStreamSerdeContext as __EventStreamSerdeContext, + SdkStreamSerdeContext as __SdkStreamSerdeContext, + SerdeContext as __SerdeContext, +} from "@smithy/types"; +import { + AbortMultipartUploadCommandInput, + AbortMultipartUploadCommandOutput, +} from "../commands/AbortMultipartUploadCommand"; +import { + CompleteMultipartUploadCommandInput, + CompleteMultipartUploadCommandOutput, +} from "../commands/CompleteMultipartUploadCommand"; +import { + CopyObjectCommandInput, + CopyObjectCommandOutput, +} from "../commands/CopyObjectCommand"; +import { + CreateBucketCommandInput, + CreateBucketCommandOutput, +} from "../commands/CreateBucketCommand"; +import { + CreateBucketMetadataTableConfigurationCommandInput, + CreateBucketMetadataTableConfigurationCommandOutput, +} from "../commands/CreateBucketMetadataTableConfigurationCommand"; +import { + CreateMultipartUploadCommandInput, + CreateMultipartUploadCommandOutput, +} from "../commands/CreateMultipartUploadCommand"; +import { + CreateSessionCommandInput, + CreateSessionCommandOutput, +} from "../commands/CreateSessionCommand"; +import { + DeleteBucketAnalyticsConfigurationCommandInput, + DeleteBucketAnalyticsConfigurationCommandOutput, +} from "../commands/DeleteBucketAnalyticsConfigurationCommand"; +import { + DeleteBucketCommandInput, + DeleteBucketCommandOutput, +} from "../commands/DeleteBucketCommand"; +import { + DeleteBucketCorsCommandInput, + DeleteBucketCorsCommandOutput, +} from "../commands/DeleteBucketCorsCommand"; +import { + DeleteBucketEncryptionCommandInput, + DeleteBucketEncryptionCommandOutput, +} from "../commands/DeleteBucketEncryptionCommand"; +import { + DeleteBucketIntelligentTieringConfigurationCommandInput, + DeleteBucketIntelligentTieringConfigurationCommandOutput, +} from "../commands/DeleteBucketIntelligentTieringConfigurationCommand"; +import { + DeleteBucketInventoryConfigurationCommandInput, + DeleteBucketInventoryConfigurationCommandOutput, +} from "../commands/DeleteBucketInventoryConfigurationCommand"; +import { + DeleteBucketLifecycleCommandInput, + DeleteBucketLifecycleCommandOutput, +} from "../commands/DeleteBucketLifecycleCommand"; +import { + DeleteBucketMetadataTableConfigurationCommandInput, + DeleteBucketMetadataTableConfigurationCommandOutput, +} from "../commands/DeleteBucketMetadataTableConfigurationCommand"; +import { + DeleteBucketMetricsConfigurationCommandInput, + DeleteBucketMetricsConfigurationCommandOutput, +} from "../commands/DeleteBucketMetricsConfigurationCommand"; +import { + DeleteBucketOwnershipControlsCommandInput, + DeleteBucketOwnershipControlsCommandOutput, +} from "../commands/DeleteBucketOwnershipControlsCommand"; +import { + DeleteBucketPolicyCommandInput, + DeleteBucketPolicyCommandOutput, +} from "../commands/DeleteBucketPolicyCommand"; +import { + DeleteBucketReplicationCommandInput, + DeleteBucketReplicationCommandOutput, +} from "../commands/DeleteBucketReplicationCommand"; +import { + DeleteBucketTaggingCommandInput, + DeleteBucketTaggingCommandOutput, +} from "../commands/DeleteBucketTaggingCommand"; +import { + DeleteBucketWebsiteCommandInput, + DeleteBucketWebsiteCommandOutput, +} from "../commands/DeleteBucketWebsiteCommand"; +import { + DeleteObjectCommandInput, + DeleteObjectCommandOutput, +} from "../commands/DeleteObjectCommand"; +import { + DeleteObjectsCommandInput, + DeleteObjectsCommandOutput, +} from "../commands/DeleteObjectsCommand"; +import { + DeleteObjectTaggingCommandInput, + DeleteObjectTaggingCommandOutput, +} from "../commands/DeleteObjectTaggingCommand"; +import { + DeletePublicAccessBlockCommandInput, + DeletePublicAccessBlockCommandOutput, +} from "../commands/DeletePublicAccessBlockCommand"; +import { + GetBucketAccelerateConfigurationCommandInput, + GetBucketAccelerateConfigurationCommandOutput, +} from "../commands/GetBucketAccelerateConfigurationCommand"; +import { + GetBucketAclCommandInput, + GetBucketAclCommandOutput, +} from "../commands/GetBucketAclCommand"; +import { + GetBucketAnalyticsConfigurationCommandInput, + GetBucketAnalyticsConfigurationCommandOutput, +} from "../commands/GetBucketAnalyticsConfigurationCommand"; +import { + GetBucketCorsCommandInput, + GetBucketCorsCommandOutput, +} from "../commands/GetBucketCorsCommand"; +import { + GetBucketEncryptionCommandInput, + GetBucketEncryptionCommandOutput, +} from "../commands/GetBucketEncryptionCommand"; +import { + GetBucketIntelligentTieringConfigurationCommandInput, + GetBucketIntelligentTieringConfigurationCommandOutput, +} from "../commands/GetBucketIntelligentTieringConfigurationCommand"; +import { + GetBucketInventoryConfigurationCommandInput, + GetBucketInventoryConfigurationCommandOutput, +} from "../commands/GetBucketInventoryConfigurationCommand"; +import { + GetBucketLifecycleConfigurationCommandInput, + GetBucketLifecycleConfigurationCommandOutput, +} from "../commands/GetBucketLifecycleConfigurationCommand"; +import { + GetBucketLocationCommandInput, + GetBucketLocationCommandOutput, +} from "../commands/GetBucketLocationCommand"; +import { + GetBucketLoggingCommandInput, + GetBucketLoggingCommandOutput, +} from "../commands/GetBucketLoggingCommand"; +import { + GetBucketMetadataTableConfigurationCommandInput, + GetBucketMetadataTableConfigurationCommandOutput, +} from "../commands/GetBucketMetadataTableConfigurationCommand"; +import { + GetBucketMetricsConfigurationCommandInput, + GetBucketMetricsConfigurationCommandOutput, +} from "../commands/GetBucketMetricsConfigurationCommand"; +import { + GetBucketNotificationConfigurationCommandInput, + GetBucketNotificationConfigurationCommandOutput, +} from "../commands/GetBucketNotificationConfigurationCommand"; +import { + GetBucketOwnershipControlsCommandInput, + GetBucketOwnershipControlsCommandOutput, +} from "../commands/GetBucketOwnershipControlsCommand"; +import { + GetBucketPolicyCommandInput, + GetBucketPolicyCommandOutput, +} from "../commands/GetBucketPolicyCommand"; +import { + GetBucketPolicyStatusCommandInput, + GetBucketPolicyStatusCommandOutput, +} from "../commands/GetBucketPolicyStatusCommand"; +import { + GetBucketReplicationCommandInput, + GetBucketReplicationCommandOutput, +} from "../commands/GetBucketReplicationCommand"; +import { + GetBucketRequestPaymentCommandInput, + GetBucketRequestPaymentCommandOutput, +} from "../commands/GetBucketRequestPaymentCommand"; +import { + GetBucketTaggingCommandInput, + GetBucketTaggingCommandOutput, +} from "../commands/GetBucketTaggingCommand"; +import { + GetBucketVersioningCommandInput, + GetBucketVersioningCommandOutput, +} from "../commands/GetBucketVersioningCommand"; +import { + GetBucketWebsiteCommandInput, + GetBucketWebsiteCommandOutput, +} from "../commands/GetBucketWebsiteCommand"; +import { + GetObjectAclCommandInput, + GetObjectAclCommandOutput, +} from "../commands/GetObjectAclCommand"; +import { + GetObjectAttributesCommandInput, + GetObjectAttributesCommandOutput, +} from "../commands/GetObjectAttributesCommand"; +import { + GetObjectCommandInput, + GetObjectCommandOutput, +} from "../commands/GetObjectCommand"; +import { + GetObjectLegalHoldCommandInput, + GetObjectLegalHoldCommandOutput, +} from "../commands/GetObjectLegalHoldCommand"; +import { + GetObjectLockConfigurationCommandInput, + GetObjectLockConfigurationCommandOutput, +} from "../commands/GetObjectLockConfigurationCommand"; +import { + GetObjectRetentionCommandInput, + GetObjectRetentionCommandOutput, +} from "../commands/GetObjectRetentionCommand"; +import { + GetObjectTaggingCommandInput, + GetObjectTaggingCommandOutput, +} from "../commands/GetObjectTaggingCommand"; +import { + GetObjectTorrentCommandInput, + GetObjectTorrentCommandOutput, +} from "../commands/GetObjectTorrentCommand"; +import { + GetPublicAccessBlockCommandInput, + GetPublicAccessBlockCommandOutput, +} from "../commands/GetPublicAccessBlockCommand"; +import { + HeadBucketCommandInput, + HeadBucketCommandOutput, +} from "../commands/HeadBucketCommand"; +import { + HeadObjectCommandInput, + HeadObjectCommandOutput, +} from "../commands/HeadObjectCommand"; +import { + ListBucketAnalyticsConfigurationsCommandInput, + ListBucketAnalyticsConfigurationsCommandOutput, +} from "../commands/ListBucketAnalyticsConfigurationsCommand"; +import { + ListBucketIntelligentTieringConfigurationsCommandInput, + ListBucketIntelligentTieringConfigurationsCommandOutput, +} from "../commands/ListBucketIntelligentTieringConfigurationsCommand"; +import { + ListBucketInventoryConfigurationsCommandInput, + ListBucketInventoryConfigurationsCommandOutput, +} from "../commands/ListBucketInventoryConfigurationsCommand"; +import { + ListBucketMetricsConfigurationsCommandInput, + ListBucketMetricsConfigurationsCommandOutput, +} from "../commands/ListBucketMetricsConfigurationsCommand"; +import { + ListBucketsCommandInput, + ListBucketsCommandOutput, +} from "../commands/ListBucketsCommand"; +import { + ListDirectoryBucketsCommandInput, + ListDirectoryBucketsCommandOutput, +} from "../commands/ListDirectoryBucketsCommand"; +import { + ListMultipartUploadsCommandInput, + ListMultipartUploadsCommandOutput, +} from "../commands/ListMultipartUploadsCommand"; +import { + ListObjectsCommandInput, + ListObjectsCommandOutput, +} from "../commands/ListObjectsCommand"; +import { + ListObjectsV2CommandInput, + ListObjectsV2CommandOutput, +} from "../commands/ListObjectsV2Command"; +import { + ListObjectVersionsCommandInput, + ListObjectVersionsCommandOutput, +} from "../commands/ListObjectVersionsCommand"; +import { + ListPartsCommandInput, + ListPartsCommandOutput, +} from "../commands/ListPartsCommand"; +import { + PutBucketAccelerateConfigurationCommandInput, + PutBucketAccelerateConfigurationCommandOutput, +} from "../commands/PutBucketAccelerateConfigurationCommand"; +import { + PutBucketAclCommandInput, + PutBucketAclCommandOutput, +} from "../commands/PutBucketAclCommand"; +import { + PutBucketAnalyticsConfigurationCommandInput, + PutBucketAnalyticsConfigurationCommandOutput, +} from "../commands/PutBucketAnalyticsConfigurationCommand"; +import { + PutBucketCorsCommandInput, + PutBucketCorsCommandOutput, +} from "../commands/PutBucketCorsCommand"; +import { + PutBucketEncryptionCommandInput, + PutBucketEncryptionCommandOutput, +} from "../commands/PutBucketEncryptionCommand"; +import { + PutBucketIntelligentTieringConfigurationCommandInput, + PutBucketIntelligentTieringConfigurationCommandOutput, +} from "../commands/PutBucketIntelligentTieringConfigurationCommand"; +import { + PutBucketInventoryConfigurationCommandInput, + PutBucketInventoryConfigurationCommandOutput, +} from "../commands/PutBucketInventoryConfigurationCommand"; +import { + PutBucketLifecycleConfigurationCommandInput, + PutBucketLifecycleConfigurationCommandOutput, +} from "../commands/PutBucketLifecycleConfigurationCommand"; +import { + PutBucketLoggingCommandInput, + PutBucketLoggingCommandOutput, +} from "../commands/PutBucketLoggingCommand"; +import { + PutBucketMetricsConfigurationCommandInput, + PutBucketMetricsConfigurationCommandOutput, +} from "../commands/PutBucketMetricsConfigurationCommand"; +import { + PutBucketNotificationConfigurationCommandInput, + PutBucketNotificationConfigurationCommandOutput, +} from "../commands/PutBucketNotificationConfigurationCommand"; +import { + PutBucketOwnershipControlsCommandInput, + PutBucketOwnershipControlsCommandOutput, +} from "../commands/PutBucketOwnershipControlsCommand"; +import { + PutBucketPolicyCommandInput, + PutBucketPolicyCommandOutput, +} from "../commands/PutBucketPolicyCommand"; +import { + PutBucketReplicationCommandInput, + PutBucketReplicationCommandOutput, +} from "../commands/PutBucketReplicationCommand"; +import { + PutBucketRequestPaymentCommandInput, + PutBucketRequestPaymentCommandOutput, +} from "../commands/PutBucketRequestPaymentCommand"; +import { + PutBucketTaggingCommandInput, + PutBucketTaggingCommandOutput, +} from "../commands/PutBucketTaggingCommand"; +import { + PutBucketVersioningCommandInput, + PutBucketVersioningCommandOutput, +} from "../commands/PutBucketVersioningCommand"; +import { + PutBucketWebsiteCommandInput, + PutBucketWebsiteCommandOutput, +} from "../commands/PutBucketWebsiteCommand"; +import { + PutObjectAclCommandInput, + PutObjectAclCommandOutput, +} from "../commands/PutObjectAclCommand"; +import { + PutObjectCommandInput, + PutObjectCommandOutput, +} from "../commands/PutObjectCommand"; +import { + PutObjectLegalHoldCommandInput, + PutObjectLegalHoldCommandOutput, +} from "../commands/PutObjectLegalHoldCommand"; +import { + PutObjectLockConfigurationCommandInput, + PutObjectLockConfigurationCommandOutput, +} from "../commands/PutObjectLockConfigurationCommand"; +import { + PutObjectRetentionCommandInput, + PutObjectRetentionCommandOutput, +} from "../commands/PutObjectRetentionCommand"; +import { + PutObjectTaggingCommandInput, + PutObjectTaggingCommandOutput, +} from "../commands/PutObjectTaggingCommand"; +import { + PutPublicAccessBlockCommandInput, + PutPublicAccessBlockCommandOutput, +} from "../commands/PutPublicAccessBlockCommand"; +import { + RestoreObjectCommandInput, + RestoreObjectCommandOutput, +} from "../commands/RestoreObjectCommand"; +import { + SelectObjectContentCommandInput, + SelectObjectContentCommandOutput, +} from "../commands/SelectObjectContentCommand"; +import { + UploadPartCommandInput, + UploadPartCommandOutput, +} from "../commands/UploadPartCommand"; +import { + UploadPartCopyCommandInput, + UploadPartCopyCommandOutput, +} from "../commands/UploadPartCopyCommand"; +import { + WriteGetObjectResponseCommandInput, + WriteGetObjectResponseCommandOutput, +} from "../commands/WriteGetObjectResponseCommand"; +export declare const se_AbortMultipartUploadCommand: ( + input: AbortMultipartUploadCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_CompleteMultipartUploadCommand: ( + input: CompleteMultipartUploadCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_CopyObjectCommand: ( + input: CopyObjectCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_CreateBucketCommand: ( + input: CreateBucketCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_CreateBucketMetadataTableConfigurationCommand: ( + input: CreateBucketMetadataTableConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_CreateMultipartUploadCommand: ( + input: CreateMultipartUploadCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_CreateSessionCommand: ( + input: CreateSessionCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteBucketCommand: ( + input: DeleteBucketCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteBucketAnalyticsConfigurationCommand: ( + input: DeleteBucketAnalyticsConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteBucketCorsCommand: ( + input: DeleteBucketCorsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteBucketEncryptionCommand: ( + input: DeleteBucketEncryptionCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteBucketIntelligentTieringConfigurationCommand: ( + input: DeleteBucketIntelligentTieringConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteBucketInventoryConfigurationCommand: ( + input: DeleteBucketInventoryConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteBucketLifecycleCommand: ( + input: DeleteBucketLifecycleCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteBucketMetadataTableConfigurationCommand: ( + input: DeleteBucketMetadataTableConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteBucketMetricsConfigurationCommand: ( + input: DeleteBucketMetricsConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteBucketOwnershipControlsCommand: ( + input: DeleteBucketOwnershipControlsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteBucketPolicyCommand: ( + input: DeleteBucketPolicyCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteBucketReplicationCommand: ( + input: DeleteBucketReplicationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteBucketTaggingCommand: ( + input: DeleteBucketTaggingCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteBucketWebsiteCommand: ( + input: DeleteBucketWebsiteCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteObjectCommand: ( + input: DeleteObjectCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteObjectsCommand: ( + input: DeleteObjectsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteObjectTaggingCommand: ( + input: DeleteObjectTaggingCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeletePublicAccessBlockCommand: ( + input: DeletePublicAccessBlockCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketAccelerateConfigurationCommand: ( + input: GetBucketAccelerateConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketAclCommand: ( + input: GetBucketAclCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketAnalyticsConfigurationCommand: ( + input: GetBucketAnalyticsConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketCorsCommand: ( + input: GetBucketCorsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketEncryptionCommand: ( + input: GetBucketEncryptionCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketIntelligentTieringConfigurationCommand: ( + input: GetBucketIntelligentTieringConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketInventoryConfigurationCommand: ( + input: GetBucketInventoryConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketLifecycleConfigurationCommand: ( + input: GetBucketLifecycleConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketLocationCommand: ( + input: GetBucketLocationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketLoggingCommand: ( + input: GetBucketLoggingCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketMetadataTableConfigurationCommand: ( + input: GetBucketMetadataTableConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketMetricsConfigurationCommand: ( + input: GetBucketMetricsConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketNotificationConfigurationCommand: ( + input: GetBucketNotificationConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketOwnershipControlsCommand: ( + input: GetBucketOwnershipControlsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketPolicyCommand: ( + input: GetBucketPolicyCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketPolicyStatusCommand: ( + input: GetBucketPolicyStatusCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketReplicationCommand: ( + input: GetBucketReplicationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketRequestPaymentCommand: ( + input: GetBucketRequestPaymentCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketTaggingCommand: ( + input: GetBucketTaggingCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketVersioningCommand: ( + input: GetBucketVersioningCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetBucketWebsiteCommand: ( + input: GetBucketWebsiteCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetObjectCommand: ( + input: GetObjectCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetObjectAclCommand: ( + input: GetObjectAclCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetObjectAttributesCommand: ( + input: GetObjectAttributesCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetObjectLegalHoldCommand: ( + input: GetObjectLegalHoldCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetObjectLockConfigurationCommand: ( + input: GetObjectLockConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetObjectRetentionCommand: ( + input: GetObjectRetentionCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetObjectTaggingCommand: ( + input: GetObjectTaggingCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetObjectTorrentCommand: ( + input: GetObjectTorrentCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetPublicAccessBlockCommand: ( + input: GetPublicAccessBlockCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_HeadBucketCommand: ( + input: HeadBucketCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_HeadObjectCommand: ( + input: HeadObjectCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListBucketAnalyticsConfigurationsCommand: ( + input: ListBucketAnalyticsConfigurationsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListBucketIntelligentTieringConfigurationsCommand: ( + input: ListBucketIntelligentTieringConfigurationsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListBucketInventoryConfigurationsCommand: ( + input: ListBucketInventoryConfigurationsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListBucketMetricsConfigurationsCommand: ( + input: ListBucketMetricsConfigurationsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListBucketsCommand: ( + input: ListBucketsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListDirectoryBucketsCommand: ( + input: ListDirectoryBucketsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListMultipartUploadsCommand: ( + input: ListMultipartUploadsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListObjectsCommand: ( + input: ListObjectsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListObjectsV2Command: ( + input: ListObjectsV2CommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListObjectVersionsCommand: ( + input: ListObjectVersionsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListPartsCommand: ( + input: ListPartsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutBucketAccelerateConfigurationCommand: ( + input: PutBucketAccelerateConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutBucketAclCommand: ( + input: PutBucketAclCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutBucketAnalyticsConfigurationCommand: ( + input: PutBucketAnalyticsConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutBucketCorsCommand: ( + input: PutBucketCorsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutBucketEncryptionCommand: ( + input: PutBucketEncryptionCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutBucketIntelligentTieringConfigurationCommand: ( + input: PutBucketIntelligentTieringConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutBucketInventoryConfigurationCommand: ( + input: PutBucketInventoryConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutBucketLifecycleConfigurationCommand: ( + input: PutBucketLifecycleConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutBucketLoggingCommand: ( + input: PutBucketLoggingCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutBucketMetricsConfigurationCommand: ( + input: PutBucketMetricsConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutBucketNotificationConfigurationCommand: ( + input: PutBucketNotificationConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutBucketOwnershipControlsCommand: ( + input: PutBucketOwnershipControlsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutBucketPolicyCommand: ( + input: PutBucketPolicyCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutBucketReplicationCommand: ( + input: PutBucketReplicationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutBucketRequestPaymentCommand: ( + input: PutBucketRequestPaymentCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutBucketTaggingCommand: ( + input: PutBucketTaggingCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutBucketVersioningCommand: ( + input: PutBucketVersioningCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutBucketWebsiteCommand: ( + input: PutBucketWebsiteCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutObjectCommand: ( + input: PutObjectCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutObjectAclCommand: ( + input: PutObjectAclCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutObjectLegalHoldCommand: ( + input: PutObjectLegalHoldCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutObjectLockConfigurationCommand: ( + input: PutObjectLockConfigurationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutObjectRetentionCommand: ( + input: PutObjectRetentionCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutObjectTaggingCommand: ( + input: PutObjectTaggingCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutPublicAccessBlockCommand: ( + input: PutPublicAccessBlockCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_RestoreObjectCommand: ( + input: RestoreObjectCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_SelectObjectContentCommand: ( + input: SelectObjectContentCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UploadPartCommand: ( + input: UploadPartCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UploadPartCopyCommand: ( + input: UploadPartCopyCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_WriteGetObjectResponseCommand: ( + input: WriteGetObjectResponseCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const de_AbortMultipartUploadCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_CompleteMultipartUploadCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_CopyObjectCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_CreateBucketCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_CreateBucketMetadataTableConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_CreateMultipartUploadCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_CreateSessionCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteBucketCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteBucketAnalyticsConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteBucketCorsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteBucketEncryptionCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteBucketIntelligentTieringConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteBucketInventoryConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteBucketLifecycleCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteBucketMetadataTableConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteBucketMetricsConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteBucketOwnershipControlsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteBucketPolicyCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteBucketReplicationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteBucketTaggingCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteBucketWebsiteCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteObjectCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteObjectsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteObjectTaggingCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeletePublicAccessBlockCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketAccelerateConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketAclCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketAnalyticsConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketCorsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketEncryptionCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketIntelligentTieringConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketInventoryConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketLifecycleConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketLocationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketLoggingCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketMetadataTableConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketMetricsConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketNotificationConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketOwnershipControlsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketPolicyCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketPolicyStatusCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketReplicationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketRequestPaymentCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketTaggingCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketVersioningCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetBucketWebsiteCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetObjectCommand: ( + output: __HttpResponse, + context: __SerdeContext & __SdkStreamSerdeContext +) => Promise; +export declare const de_GetObjectAclCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetObjectAttributesCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetObjectLegalHoldCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetObjectLockConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetObjectRetentionCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetObjectTaggingCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetObjectTorrentCommand: ( + output: __HttpResponse, + context: __SerdeContext & __SdkStreamSerdeContext +) => Promise; +export declare const de_GetPublicAccessBlockCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_HeadBucketCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_HeadObjectCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListBucketAnalyticsConfigurationsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListBucketIntelligentTieringConfigurationsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListBucketInventoryConfigurationsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListBucketMetricsConfigurationsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListBucketsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListDirectoryBucketsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListMultipartUploadsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListObjectsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListObjectsV2Command: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListObjectVersionsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListPartsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutBucketAccelerateConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutBucketAclCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutBucketAnalyticsConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutBucketCorsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutBucketEncryptionCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutBucketIntelligentTieringConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutBucketInventoryConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutBucketLifecycleConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutBucketLoggingCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutBucketMetricsConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutBucketNotificationConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutBucketOwnershipControlsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutBucketPolicyCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutBucketReplicationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutBucketRequestPaymentCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutBucketTaggingCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutBucketVersioningCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutBucketWebsiteCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutObjectCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutObjectAclCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutObjectLegalHoldCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutObjectLockConfigurationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutObjectRetentionCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutObjectTaggingCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutPublicAccessBlockCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_RestoreObjectCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_SelectObjectContentCommand: ( + output: __HttpResponse, + context: __SerdeContext & __EventStreamSerdeContext +) => Promise; +export declare const de_UploadPartCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UploadPartCopyCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_WriteGetObjectResponseCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/runtimeConfig.browser.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/runtimeConfig.browser.d.ts new file mode 100644 index 00000000..198f12a1 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/runtimeConfig.browser.d.ts @@ -0,0 +1,152 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { S3ClientConfig } from "./S3Client"; +export declare const getRuntimeConfig: (config: S3ClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | (( + _: unknown + ) => () => Promise); + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + eventStreamSerdeProvider: import("@smithy/types").EventStreamSerdeProvider; + maxAttempts: number | import("@smithy/types").Provider; + md5: import("@smithy/types").HashConstructor; + region: string | import("@smithy/types").Provider; + requestHandler: + | import("@smithy/protocol-http").HttpHandler + | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha1: import("@smithy/types").HashConstructor; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + streamHasher: + | import("@smithy/types").StreamHasher + | import("@smithy/types").StreamHasher; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + getAwsChunkedEncodingStream: + | import("@smithy/types").GetAwsChunkedEncodingStream + | import("@smithy/types").GetAwsChunkedEncodingStream< + import("stream").Readable + >; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + signingEscapePath: boolean; + useArnRegion: boolean | import("@smithy/types").Provider; + sdkStreamMixin: import("@smithy/types").SdkStreamMixinInjector; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + requestChecksumCalculation?: + | import("@aws-sdk/middleware-flexible-checksums").RequestChecksumCalculation + | import("@smithy/types").Provider< + import("@aws-sdk/middleware-flexible-checksums").RequestChecksumCalculation + > + | undefined; + responseChecksumValidation?: + | import("@aws-sdk/middleware-flexible-checksums").ResponseChecksumValidation + | import("@smithy/types").Provider< + import("@aws-sdk/middleware-flexible-checksums").ResponseChecksumValidation + > + | undefined; + requestStreamBufferSize?: number | false | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").S3HttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | typeof import("@aws-sdk/signature-v4-multi-region").SignatureV4MultiRegion; + sigv4aSigningRegionSet?: + | string[] + | import("@smithy/types").Provider + | undefined; + forcePathStyle?: + | (boolean & (boolean | import("@smithy/types").Provider)) + | undefined; + useAccelerateEndpoint?: + | (boolean & (boolean | import("@smithy/types").Provider)) + | undefined; + disableMultiregionAccessPoints?: + | (boolean & (boolean | import("@smithy/types").Provider)) + | undefined; + followRegionRedirects?: boolean | undefined; + s3ExpressIdentityProvider?: + | import("@aws-sdk/middleware-sdk-s3").S3ExpressIdentityProvider + | undefined; + bucketEndpoint?: boolean | undefined; + useGlobalEndpoint?: + | boolean + | import("@smithy/types").Provider + | undefined; + disableS3ExpressSessionAuth?: + | boolean + | import("@smithy/types").Provider + | undefined; +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/runtimeConfig.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/runtimeConfig.d.ts new file mode 100644 index 00000000..4c496929 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/runtimeConfig.d.ts @@ -0,0 +1,151 @@ +import { HashConstructor as __HashConstructor } from "@aws-sdk/types"; +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { S3ClientConfig } from "./S3Client"; +export declare const getRuntimeConfig: (config: S3ClientConfig) => { + runtime: string; + defaultsMode: import("@aws-sdk/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + authSchemePreference: string[] | import("@aws-sdk/types").Provider; + bodyLengthChecker: import("@aws-sdk/types").BodyLengthCalculator; + credentialDefaultProvider: + | ((input: any) => import("@aws-sdk/types").AwsCredentialIdentityProvider) + | (( + init?: + | import("@aws-sdk/credential-provider-node").DefaultProviderInit + | undefined + ) => import("@aws-sdk/types").MemoizedProvider< + import("@aws-sdk/types").AwsCredentialIdentity + >); + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-node").PreviouslyResolved + | undefined + ) => Promise; + disableS3ExpressSessionAuth: + | boolean + | import("@aws-sdk/types").Provider; + eventStreamSerdeProvider: import("@aws-sdk/types").EventStreamSerdeProvider; + maxAttempts: number | import("@aws-sdk/types").Provider; + md5: __HashConstructor; + region: string | import("@aws-sdk/types").Provider; + requestChecksumCalculation: + | import("@aws-sdk/middleware-flexible-checksums").RequestChecksumCalculation + | import("@aws-sdk/types").Provider< + import("@aws-sdk/middleware-flexible-checksums").RequestChecksumCalculation + >; + requestHandler: + | RequestHandler + | import("@smithy/protocol-http").HttpHandler; + responseChecksumValidation: + | import("@aws-sdk/middleware-flexible-checksums").ResponseChecksumValidation + | import("@aws-sdk/types").Provider< + import("@aws-sdk/middleware-flexible-checksums").ResponseChecksumValidation + >; + retryMode: string | import("@aws-sdk/types").Provider; + sha1: __HashConstructor; + sha256: __HashConstructor; + sigv4aSigningRegionSet: + | string[] + | import("@aws-sdk/types").Provider; + streamCollector: import("@aws-sdk/types").StreamCollector; + streamHasher: + | import("@aws-sdk/types").StreamHasher + | import("@aws-sdk/types").StreamHasher; + useArnRegion: boolean | import("@aws-sdk/types").Provider; + useDualstackEndpoint: boolean | import("@aws-sdk/types").Provider; + useFipsEndpoint: boolean | import("@aws-sdk/types").Provider; + userAgentAppId: + | string + | import("@aws-sdk/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@aws-sdk/types").UrlParser; + base64Decoder: import("@aws-sdk/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@aws-sdk/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + getAwsChunkedEncodingStream: + | import("@aws-sdk/types").GetAwsChunkedEncodingStream + | import("@aws-sdk/types").GetAwsChunkedEncodingStream< + import("stream").Readable + >; + logger: import("@aws-sdk/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + signingEscapePath: boolean; + sdkStreamMixin: import("@aws-sdk/types").SdkStreamMixinInjector; + customUserAgent?: string | import("@aws-sdk/types").UserAgent | undefined; + requestStreamBufferSize?: number | false | undefined; + retryStrategy?: + | import("@aws-sdk/types").RetryStrategy + | import("@aws-sdk/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@aws-sdk/types").Endpoint + | import("@aws-sdk/types").Provider + | import("@aws-sdk/types").EndpointV2 + | import("@aws-sdk/types").Provider + ) & + ( + | string + | import("@aws-sdk/types").Provider + | import("@aws-sdk/types").Endpoint + | import("@aws-sdk/types").Provider + | import("@aws-sdk/types").EndpointV2 + | import("@aws-sdk/types").Provider< + import("@aws-sdk/types").EndpointV2 + > + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@aws-sdk/types").Logger | undefined; + } + ) => import("@aws-sdk/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").S3HttpAuthSchemeProvider; + credentials?: + | import("@aws-sdk/types").AwsCredentialIdentity + | import("@aws-sdk/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@aws-sdk/types").RequestSigner + | (( + authScheme?: import("@aws-sdk/types").AuthScheme | undefined + ) => Promise) + | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@aws-sdk/types").RequestSigner) + | typeof import("@aws-sdk/signature-v4-multi-region").SignatureV4MultiRegion; + forcePathStyle?: + | (boolean & (boolean | import("@aws-sdk/types").Provider)) + | undefined; + useAccelerateEndpoint?: + | (boolean & (boolean | import("@aws-sdk/types").Provider)) + | undefined; + disableMultiregionAccessPoints?: + | (boolean & (boolean | import("@aws-sdk/types").Provider)) + | undefined; + followRegionRedirects?: boolean | undefined; + s3ExpressIdentityProvider?: + | import("@aws-sdk/middleware-sdk-s3").S3ExpressIdentityProvider + | undefined; + bucketEndpoint?: boolean | undefined; + useGlobalEndpoint?: + | boolean + | import("@aws-sdk/types").Provider + | undefined; +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/runtimeConfig.native.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/runtimeConfig.native.d.ts new file mode 100644 index 00000000..9ceac98f --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/runtimeConfig.native.d.ts @@ -0,0 +1,156 @@ +import { S3ClientConfig } from "./S3Client"; +export declare const getRuntimeConfig: (config: S3ClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: + | import("@smithy/types").NodeHttpHandlerOptions + | import("@smithy/types").FetchHttpHandlerOptions + | Record + | import("@smithy/protocol-http").HttpHandler + | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + streamHasher: + | import("@smithy/types").StreamHasher + | import("@smithy/types").StreamHasher; + md5: import("@smithy/types").HashConstructor; + sha1: import("@smithy/types").HashConstructor; + getAwsChunkedEncodingStream: + | import("@smithy/types").GetAwsChunkedEncodingStream + | import("@smithy/types").GetAwsChunkedEncodingStream< + import("stream").Readable + >; + credentialDefaultProvider: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | (( + _: unknown + ) => () => Promise); + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + eventStreamSerdeProvider: import("@smithy/types").EventStreamSerdeProvider; + defaultsMode: + | import("@smithy/smithy-client").DefaultsMode + | import("@smithy/types").Provider< + import("@smithy/smithy-client").DefaultsMode + >; + signingEscapePath: boolean; + useArnRegion: boolean | import("@smithy/types").Provider; + sdkStreamMixin: import("@smithy/types").SdkStreamMixinInjector; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + requestChecksumCalculation?: + | import("@aws-sdk/middleware-flexible-checksums").RequestChecksumCalculation + | import("@smithy/types").Provider< + import("@aws-sdk/middleware-flexible-checksums").RequestChecksumCalculation + > + | undefined; + responseChecksumValidation?: + | import("@aws-sdk/middleware-flexible-checksums").ResponseChecksumValidation + | import("@smithy/types").Provider< + import("@aws-sdk/middleware-flexible-checksums").ResponseChecksumValidation + > + | undefined; + requestStreamBufferSize?: number | false | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").S3HttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | typeof import("@aws-sdk/signature-v4-multi-region").SignatureV4MultiRegion; + sigv4aSigningRegionSet?: + | string[] + | import("@smithy/types").Provider + | undefined; + forcePathStyle?: + | (boolean & (boolean | import("@smithy/types").Provider)) + | undefined; + useAccelerateEndpoint?: + | (boolean & (boolean | import("@smithy/types").Provider)) + | undefined; + disableMultiregionAccessPoints?: + | (boolean & (boolean | import("@smithy/types").Provider)) + | undefined; + followRegionRedirects?: boolean | undefined; + s3ExpressIdentityProvider?: + | import("@aws-sdk/middleware-sdk-s3").S3ExpressIdentityProvider + | undefined; + bucketEndpoint?: boolean | undefined; + useGlobalEndpoint?: + | boolean + | import("@smithy/types").Provider + | undefined; + disableS3ExpressSessionAuth?: + | boolean + | import("@smithy/types").Provider + | undefined; +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/runtimeConfig.shared.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/runtimeConfig.shared.d.ts new file mode 100644 index 00000000..523ad8a0 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/runtimeConfig.shared.d.ts @@ -0,0 +1,36 @@ +import { SignatureV4MultiRegion } from "@aws-sdk/signature-v4-multi-region"; +import { S3ClientConfig } from "./S3Client"; +export declare const getRuntimeConfig: (config: S3ClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + getAwsChunkedEncodingStream: + | import("@smithy/types").GetAwsChunkedEncodingStream + | import("@smithy/types").GetAwsChunkedEncodingStream< + import("stream").Readable + >; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").S3HttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + logger: import("@smithy/types").Logger; + sdkStreamMixin: import("@smithy/types").SdkStreamMixinInjector; + serviceId: string; + signerConstructor: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | typeof SignatureV4MultiRegion; + signingEscapePath: boolean; + urlParser: import("@smithy/types").UrlParser; + useArnRegion: boolean | import("@smithy/types").Provider; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/runtimeExtensions.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/runtimeExtensions.d.ts new file mode 100644 index 00000000..90793a48 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/runtimeExtensions.d.ts @@ -0,0 +1,11 @@ +import { S3ExtensionConfiguration } from "./extensionConfiguration"; +export interface RuntimeExtension { + configure(extensionConfiguration: S3ExtensionConfiguration): void; +} +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +export declare const resolveRuntimeExtensions: ( + runtimeConfig: any, + extensions: RuntimeExtension[] +) => any; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/waiters/index.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/waiters/index.d.ts new file mode 100644 index 00000000..a139674b --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/waiters/index.d.ts @@ -0,0 +1,4 @@ +export * from "./waitForBucketExists"; +export * from "./waitForBucketNotExists"; +export * from "./waitForObjectExists"; +export * from "./waitForObjectNotExists"; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/waiters/waitForBucketExists.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/waiters/waitForBucketExists.d.ts new file mode 100644 index 00000000..e9976fc7 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/waiters/waitForBucketExists.d.ts @@ -0,0 +1,11 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { HeadBucketCommandInput } from "../commands/HeadBucketCommand"; +import { S3Client } from "../S3Client"; +export declare const waitForBucketExists: ( + params: WaiterConfiguration, + input: HeadBucketCommandInput +) => Promise; +export declare const waitUntilBucketExists: ( + params: WaiterConfiguration, + input: HeadBucketCommandInput +) => Promise; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/waiters/waitForBucketNotExists.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/waiters/waitForBucketNotExists.d.ts new file mode 100644 index 00000000..3da8b19b --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/waiters/waitForBucketNotExists.d.ts @@ -0,0 +1,11 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { HeadBucketCommandInput } from "../commands/HeadBucketCommand"; +import { S3Client } from "../S3Client"; +export declare const waitForBucketNotExists: ( + params: WaiterConfiguration, + input: HeadBucketCommandInput +) => Promise; +export declare const waitUntilBucketNotExists: ( + params: WaiterConfiguration, + input: HeadBucketCommandInput +) => Promise; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/waiters/waitForObjectExists.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/waiters/waitForObjectExists.d.ts new file mode 100644 index 00000000..54d815a4 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/waiters/waitForObjectExists.d.ts @@ -0,0 +1,11 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { HeadObjectCommandInput } from "../commands/HeadObjectCommand"; +import { S3Client } from "../S3Client"; +export declare const waitForObjectExists: ( + params: WaiterConfiguration, + input: HeadObjectCommandInput +) => Promise; +export declare const waitUntilObjectExists: ( + params: WaiterConfiguration, + input: HeadObjectCommandInput +) => Promise; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/waiters/waitForObjectNotExists.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/waiters/waitForObjectNotExists.d.ts new file mode 100644 index 00000000..fbea261e --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/ts3.4/waiters/waitForObjectNotExists.d.ts @@ -0,0 +1,11 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { HeadObjectCommandInput } from "../commands/HeadObjectCommand"; +import { S3Client } from "../S3Client"; +export declare const waitForObjectNotExists: ( + params: WaiterConfiguration, + input: HeadObjectCommandInput +) => Promise; +export declare const waitUntilObjectNotExists: ( + params: WaiterConfiguration, + input: HeadObjectCommandInput +) => Promise; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/waiters/index.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/waiters/index.d.ts new file mode 100644 index 00000000..a139674b --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/waiters/index.d.ts @@ -0,0 +1,4 @@ +export * from "./waitForBucketExists"; +export * from "./waitForBucketNotExists"; +export * from "./waitForObjectExists"; +export * from "./waitForObjectNotExists"; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/waiters/waitForBucketExists.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/waiters/waitForBucketExists.d.ts new file mode 100644 index 00000000..4f9ce873 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/waiters/waitForBucketExists.d.ts @@ -0,0 +1,14 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { HeadBucketCommandInput } from "../commands/HeadBucketCommand"; +import { S3Client } from "../S3Client"; +/** + * + * @deprecated Use waitUntilBucketExists instead. waitForBucketExists does not throw error in non-success cases. + */ +export declare const waitForBucketExists: (params: WaiterConfiguration, input: HeadBucketCommandInput) => Promise; +/** + * + * @param params - Waiter configuration options. + * @param input - The input to HeadBucketCommand for polling. + */ +export declare const waitUntilBucketExists: (params: WaiterConfiguration, input: HeadBucketCommandInput) => Promise; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/waiters/waitForBucketNotExists.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/waiters/waitForBucketNotExists.d.ts new file mode 100644 index 00000000..0f856bc9 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/waiters/waitForBucketNotExists.d.ts @@ -0,0 +1,14 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { HeadBucketCommandInput } from "../commands/HeadBucketCommand"; +import { S3Client } from "../S3Client"; +/** + * + * @deprecated Use waitUntilBucketNotExists instead. waitForBucketNotExists does not throw error in non-success cases. + */ +export declare const waitForBucketNotExists: (params: WaiterConfiguration, input: HeadBucketCommandInput) => Promise; +/** + * + * @param params - Waiter configuration options. + * @param input - The input to HeadBucketCommand for polling. + */ +export declare const waitUntilBucketNotExists: (params: WaiterConfiguration, input: HeadBucketCommandInput) => Promise; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/waiters/waitForObjectExists.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/waiters/waitForObjectExists.d.ts new file mode 100644 index 00000000..921ac682 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/waiters/waitForObjectExists.d.ts @@ -0,0 +1,14 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { HeadObjectCommandInput } from "../commands/HeadObjectCommand"; +import { S3Client } from "../S3Client"; +/** + * + * @deprecated Use waitUntilObjectExists instead. waitForObjectExists does not throw error in non-success cases. + */ +export declare const waitForObjectExists: (params: WaiterConfiguration, input: HeadObjectCommandInput) => Promise; +/** + * + * @param params - Waiter configuration options. + * @param input - The input to HeadObjectCommand for polling. + */ +export declare const waitUntilObjectExists: (params: WaiterConfiguration, input: HeadObjectCommandInput) => Promise; diff --git a/node_modules/@aws-sdk/client-s3/dist-types/waiters/waitForObjectNotExists.d.ts b/node_modules/@aws-sdk/client-s3/dist-types/waiters/waitForObjectNotExists.d.ts new file mode 100644 index 00000000..e7561fce --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/dist-types/waiters/waitForObjectNotExists.d.ts @@ -0,0 +1,14 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { HeadObjectCommandInput } from "../commands/HeadObjectCommand"; +import { S3Client } from "../S3Client"; +/** + * + * @deprecated Use waitUntilObjectNotExists instead. waitForObjectNotExists does not throw error in non-success cases. + */ +export declare const waitForObjectNotExists: (params: WaiterConfiguration, input: HeadObjectCommandInput) => Promise; +/** + * + * @param params - Waiter configuration options. + * @param input - The input to HeadObjectCommand for polling. + */ +export declare const waitUntilObjectNotExists: (params: WaiterConfiguration, input: HeadObjectCommandInput) => Promise; diff --git a/node_modules/@aws-sdk/client-s3/package.json b/node_modules/@aws-sdk/client-s3/package.json new file mode 100644 index 00000000..30d6e3a8 --- /dev/null +++ b/node_modules/@aws-sdk/client-s3/package.json @@ -0,0 +1,123 @@ +{ + "name": "@aws-sdk/client-s3", + "description": "AWS SDK for JavaScript S3 Client for Node.js, Browser and React Native", + "version": "3.802.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline client-s3", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "generate:client": "node ../../scripts/generate-clients/single-service --solo s3", + "test": "yarn g:vitest run", + "test:browser": "node ./test/browser-build/esbuild && yarn g:vitest run -c vitest.config.browser.ts", + "test:browser:watch": "node ./test/browser-build/esbuild && yarn g:vitest watch -c vitest.config.browser.ts", + "test:e2e": "yarn g:vitest run -c vitest.config.e2e.ts && yarn test:browser", + "test:e2e:watch": "yarn g:vitest watch -c vitest.config.e2e.ts", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "types": "./dist-types/index.d.ts", + "module": "./dist-es/index.js", + "sideEffects": false, + "dependencies": { + "@aws-crypto/sha1-browser": "5.2.0", + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-node": "3.799.0", + "@aws-sdk/middleware-bucket-endpoint": "3.775.0", + "@aws-sdk/middleware-expect-continue": "3.775.0", + "@aws-sdk/middleware-flexible-checksums": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-location-constraint": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-sdk-s3": "3.799.0", + "@aws-sdk/middleware-ssec": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/signature-v4-multi-region": "3.800.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@aws-sdk/xml-builder": "3.775.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/eventstream-serde-browser": "^4.0.2", + "@smithy/eventstream-serde-config-resolver": "^4.1.0", + "@smithy/eventstream-serde-node": "^4.0.2", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-blob-browser": "^4.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/hash-stream-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/md5-js": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "@smithy/util-waiter": "^4.0.3", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@aws-sdk/signature-v4-crt": "3.800.0", + "@tsconfig/node18": "18.2.4", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "browser": { + "./dist-es/runtimeConfig": "./dist-es/runtimeConfig.browser" + }, + "react-native": { + "./dist-es/runtimeConfig": "./dist-es/runtimeConfig.native" + }, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/clients/client-s3", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "clients/client-s3" + } +} diff --git a/node_modules/@aws-sdk/client-sso/LICENSE b/node_modules/@aws-sdk/client-sso/LICENSE new file mode 100644 index 00000000..dd65ae06 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-sdk/client-sso/README.md b/node_modules/@aws-sdk/client-sso/README.md new file mode 100644 index 00000000..09d5fe35 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/README.md @@ -0,0 +1,252 @@ + + +# @aws-sdk/client-sso + +## Description + +AWS SDK for JavaScript SSO Client for Node.js, Browser and React Native. + +

AWS IAM Identity Center (successor to AWS Single Sign-On) Portal is a web service that makes it easy for you to assign user access to +IAM Identity Center resources such as the AWS access portal. Users can get AWS account applications and roles +assigned to them and get federated into the application.

+ +

Although AWS Single Sign-On was renamed, the sso and +identitystore API namespaces will continue to retain their original name for +backward compatibility purposes. For more information, see IAM Identity Center rename.

+
+

This reference guide describes the IAM Identity Center Portal operations that you can call +programatically and includes detailed information on data types and errors.

+ +

AWS provides SDKs that consist of libraries and sample code for various programming +languages and platforms, such as Java, Ruby, .Net, iOS, or Android. The SDKs provide a +convenient way to create programmatic access to IAM Identity Center and other AWS services. For more +information about the AWS SDKs, including how to download and install them, see Tools for Amazon Web Services.

+
+ +## Installing + +To install this package, simply type add or install @aws-sdk/client-sso +using your favorite package manager: + +- `npm install @aws-sdk/client-sso` +- `yarn add @aws-sdk/client-sso` +- `pnpm add @aws-sdk/client-sso` + +## Getting Started + +### Import + +The AWS SDK is modulized by clients and commands. +To send a request, you only need to import the `SSOClient` and +the commands you need, for example `ListAccountsCommand`: + +```js +// ES5 example +const { SSOClient, ListAccountsCommand } = require("@aws-sdk/client-sso"); +``` + +```ts +// ES6+ example +import { SSOClient, ListAccountsCommand } from "@aws-sdk/client-sso"; +``` + +### Usage + +To send a request, you: + +- Initiate client with configuration (e.g. credentials, region). +- Initiate command with input parameters. +- Call `send` operation on client with command object as input. +- If you are using a custom http handler, you may call `destroy()` to close open connections. + +```js +// a client can be shared by different commands. +const client = new SSOClient({ region: "REGION" }); + +const params = { + /** input parameters */ +}; +const command = new ListAccountsCommand(params); +``` + +#### Async/await + +We recommend using [await](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/await) +operator to wait for the promise returned by send operation as follows: + +```js +// async/await. +try { + const data = await client.send(command); + // process data. +} catch (error) { + // error handling. +} finally { + // finally. +} +``` + +Async-await is clean, concise, intuitive, easy to debug and has better error handling +as compared to using Promise chains or callbacks. + +#### Promises + +You can also use [Promise chaining](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Using_promises#chaining) +to execute send operation. + +```js +client.send(command).then( + (data) => { + // process data. + }, + (error) => { + // error handling. + } +); +``` + +Promises can also be called using `.catch()` and `.finally()` as follows: + +```js +client + .send(command) + .then((data) => { + // process data. + }) + .catch((error) => { + // error handling. + }) + .finally(() => { + // finally. + }); +``` + +#### Callbacks + +We do not recommend using callbacks because of [callback hell](http://callbackhell.com/), +but they are supported by the send operation. + +```js +// callbacks. +client.send(command, (err, data) => { + // process err and data. +}); +``` + +#### v2 compatible style + +The client can also send requests using v2 compatible style. +However, it results in a bigger bundle size and may be dropped in next major version. More details in the blog post +on [modular packages in AWS SDK for JavaScript](https://aws.amazon.com/blogs/developer/modular-packages-in-aws-sdk-for-javascript/) + +```ts +import * as AWS from "@aws-sdk/client-sso"; +const client = new AWS.SSO({ region: "REGION" }); + +// async/await. +try { + const data = await client.listAccounts(params); + // process data. +} catch (error) { + // error handling. +} + +// Promises. +client + .listAccounts(params) + .then((data) => { + // process data. + }) + .catch((error) => { + // error handling. + }); + +// callbacks. +client.listAccounts(params, (err, data) => { + // process err and data. +}); +``` + +### Troubleshooting + +When the service returns an exception, the error will include the exception information, +as well as response metadata (e.g. request id). + +```js +try { + const data = await client.send(command); + // process data. +} catch (error) { + const { requestId, cfId, extendedRequestId } = error.$metadata; + console.log({ requestId, cfId, extendedRequestId }); + /** + * The keys within exceptions are also parsed. + * You can access them by specifying exception names: + * if (error.name === 'SomeServiceException') { + * const value = error.specialKeyInException; + * } + */ +} +``` + +## Getting Help + +Please use these community resources for getting help. +We use the GitHub issues for tracking bugs and feature requests, but have limited bandwidth to address them. + +- Visit [Developer Guide](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/welcome.html) + or [API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/index.html). +- Check out the blog posts tagged with [`aws-sdk-js`](https://aws.amazon.com/blogs/developer/tag/aws-sdk-js/) + on AWS Developer Blog. +- Ask a question on [StackOverflow](https://stackoverflow.com/questions/tagged/aws-sdk-js) and tag it with `aws-sdk-js`. +- Join the AWS JavaScript community on [gitter](https://gitter.im/aws/aws-sdk-js-v3). +- If it turns out that you may have found a bug, please [open an issue](https://github.com/aws/aws-sdk-js-v3/issues/new/choose). + +To test your universal JavaScript code in Node.js, browser and react-native environments, +visit our [code samples repo](https://github.com/aws-samples/aws-sdk-js-tests). + +## Contributing + +This client code is generated automatically. Any modifications will be overwritten the next time the `@aws-sdk/client-sso` package is updated. +To contribute to client you can check our [generate clients scripts](https://github.com/aws/aws-sdk-js-v3/tree/main/scripts/generate-clients). + +## License + +This SDK is distributed under the +[Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0), +see LICENSE for more information. + +## Client Commands (Operations List) + +
+ +GetRoleCredentials + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/sso/command/GetRoleCredentialsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/GetRoleCredentialsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/GetRoleCredentialsCommandOutput/) + +
+
+ +ListAccountRoles + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/sso/command/ListAccountRolesCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/ListAccountRolesCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/ListAccountRolesCommandOutput/) + +
+
+ +ListAccounts + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/sso/command/ListAccountsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/ListAccountsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/ListAccountsCommandOutput/) + +
+
+ +Logout + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/sso/command/LogoutCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/LogoutCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/LogoutCommandOutput/) + +
diff --git a/node_modules/@aws-sdk/client-sso/dist-cjs/auth/httpAuthSchemeProvider.js b/node_modules/@aws-sdk/client-sso/dist-cjs/auth/httpAuthSchemeProvider.js new file mode 100644 index 00000000..2c256eea --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-cjs/auth/httpAuthSchemeProvider.js @@ -0,0 +1,68 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthSchemeConfig = exports.defaultSSOHttpAuthSchemeProvider = exports.defaultSSOHttpAuthSchemeParametersProvider = void 0; +const core_1 = require("@aws-sdk/core"); +const util_middleware_1 = require("@smithy/util-middleware"); +const defaultSSOHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultSSOHttpAuthSchemeParametersProvider = defaultSSOHttpAuthSchemeParametersProvider; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "awsssoportal", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +const defaultSSOHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "GetRoleCredentials": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "ListAccountRoles": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "ListAccounts": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "Logout": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultSSOHttpAuthSchemeProvider = defaultSSOHttpAuthSchemeProvider; +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, core_1.resolveAwsSdkSigV4Config)(config); + return Object.assign(config_0, { + authSchemePreference: (0, util_middleware_1.normalizeProvider)(config.authSchemePreference ?? []), + }); +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; diff --git a/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/endpointResolver.js b/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/endpointResolver.js new file mode 100644 index 00000000..7258a356 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/endpointResolver.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = require("@aws-sdk/util-endpoints"); +const util_endpoints_2 = require("@smithy/util-endpoints"); +const ruleset_1 = require("./ruleset"); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS"], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; diff --git a/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/ruleset.js b/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/ruleset.js new file mode 100644 index 00000000..4321ed97 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/ruleset.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ruleSet = void 0; +const u = "required", v = "fn", w = "argv", x = "ref"; +const a = true, b = "isSet", c = "booleanEquals", d = "error", e = "endpoint", f = "tree", g = "PartitionResult", h = "getAttr", i = { [u]: false, "type": "String" }, j = { [u]: true, "default": false, "type": "Boolean" }, k = { [x]: "Endpoint" }, l = { [v]: c, [w]: [{ [x]: "UseFIPS" }, true] }, m = { [v]: c, [w]: [{ [x]: "UseDualStack" }, true] }, n = {}, o = { [v]: h, [w]: [{ [x]: g }, "supportsFIPS"] }, p = { [x]: g }, q = { [v]: c, [w]: [true, { [v]: h, [w]: [p, "supportsDualStack"] }] }, r = [l], s = [m], t = [{ [x]: "Region" }]; +const _data = { version: "1.0", parameters: { Region: i, UseDualStack: j, UseFIPS: j, Endpoint: i }, rules: [{ conditions: [{ [v]: b, [w]: [k] }], rules: [{ conditions: r, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: d }, { conditions: s, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: d }, { endpoint: { url: k, properties: n, headers: n }, type: e }], type: f }, { conditions: [{ [v]: b, [w]: t }], rules: [{ conditions: [{ [v]: "aws.partition", [w]: t, assign: g }], rules: [{ conditions: [l, m], rules: [{ conditions: [{ [v]: c, [w]: [a, o] }, q], rules: [{ endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: d }], type: f }, { conditions: r, rules: [{ conditions: [{ [v]: c, [w]: [o, a] }], rules: [{ conditions: [{ [v]: "stringEquals", [w]: [{ [v]: h, [w]: [p, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://portal.sso.{Region}.amazonaws.com", properties: n, headers: n }, type: e }, { endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS is enabled but this partition does not support FIPS", type: d }], type: f }, { conditions: s, rules: [{ conditions: [q], rules: [{ endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "DualStack is enabled but this partition does not support DualStack", type: d }], type: f }, { endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }], type: f }, { error: "Invalid Configuration: Missing Region", type: d }] }; +exports.ruleSet = _data; diff --git a/node_modules/@aws-sdk/client-sso/dist-cjs/index.js b/node_modules/@aws-sdk/client-sso/dist-cjs/index.js new file mode 100644 index 00000000..8383b07f --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-cjs/index.js @@ -0,0 +1,625 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + GetRoleCredentialsCommand: () => GetRoleCredentialsCommand, + GetRoleCredentialsRequestFilterSensitiveLog: () => GetRoleCredentialsRequestFilterSensitiveLog, + GetRoleCredentialsResponseFilterSensitiveLog: () => GetRoleCredentialsResponseFilterSensitiveLog, + InvalidRequestException: () => InvalidRequestException, + ListAccountRolesCommand: () => ListAccountRolesCommand, + ListAccountRolesRequestFilterSensitiveLog: () => ListAccountRolesRequestFilterSensitiveLog, + ListAccountsCommand: () => ListAccountsCommand, + ListAccountsRequestFilterSensitiveLog: () => ListAccountsRequestFilterSensitiveLog, + LogoutCommand: () => LogoutCommand, + LogoutRequestFilterSensitiveLog: () => LogoutRequestFilterSensitiveLog, + ResourceNotFoundException: () => ResourceNotFoundException, + RoleCredentialsFilterSensitiveLog: () => RoleCredentialsFilterSensitiveLog, + SSO: () => SSO, + SSOClient: () => SSOClient, + SSOServiceException: () => SSOServiceException, + TooManyRequestsException: () => TooManyRequestsException, + UnauthorizedException: () => UnauthorizedException, + __Client: () => import_smithy_client.Client, + paginateListAccountRoles: () => paginateListAccountRoles, + paginateListAccounts: () => paginateListAccounts +}); +module.exports = __toCommonJS(index_exports); + +// src/SSOClient.ts +var import_middleware_host_header = require("@aws-sdk/middleware-host-header"); +var import_middleware_logger = require("@aws-sdk/middleware-logger"); +var import_middleware_recursion_detection = require("@aws-sdk/middleware-recursion-detection"); +var import_middleware_user_agent = require("@aws-sdk/middleware-user-agent"); +var import_config_resolver = require("@smithy/config-resolver"); +var import_core = require("@smithy/core"); +var import_middleware_content_length = require("@smithy/middleware-content-length"); +var import_middleware_endpoint = require("@smithy/middleware-endpoint"); +var import_middleware_retry = require("@smithy/middleware-retry"); + +var import_httpAuthSchemeProvider = require("./auth/httpAuthSchemeProvider"); + +// src/endpoint/EndpointParameters.ts +var resolveClientEndpointParameters = /* @__PURE__ */ __name((options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "awsssoportal" + }); +}, "resolveClientEndpointParameters"); +var commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" } +}; + +// src/SSOClient.ts +var import_runtimeConfig = require("././runtimeConfig"); + +// src/runtimeExtensions.ts +var import_region_config_resolver = require("@aws-sdk/region-config-resolver"); +var import_protocol_http = require("@smithy/protocol-http"); +var import_smithy_client = require("@smithy/smithy-client"); + +// src/auth/httpAuthExtensionConfiguration.ts +var getHttpAuthExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + } + }; +}, "getHttpAuthExtensionConfiguration"); +var resolveHttpAuthRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials() + }; +}, "resolveHttpAuthRuntimeConfig"); + +// src/runtimeExtensions.ts +var resolveRuntimeExtensions = /* @__PURE__ */ __name((runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign( + (0, import_region_config_resolver.getAwsRegionExtensionConfiguration)(runtimeConfig), + (0, import_smithy_client.getDefaultExtensionConfiguration)(runtimeConfig), + (0, import_protocol_http.getHttpHandlerExtensionConfiguration)(runtimeConfig), + getHttpAuthExtensionConfiguration(runtimeConfig) + ); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign( + runtimeConfig, + (0, import_region_config_resolver.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), + (0, import_smithy_client.resolveDefaultRuntimeConfig)(extensionConfiguration), + (0, import_protocol_http.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), + resolveHttpAuthRuntimeConfig(extensionConfiguration) + ); +}, "resolveRuntimeExtensions"); + +// src/SSOClient.ts +var SSOClient = class extends import_smithy_client.Client { + static { + __name(this, "SSOClient"); + } + /** + * The resolved configuration of SSOClient class. This is resolved and normalized from the {@link SSOClientConfig | constructor configuration interface}. + */ + config; + constructor(...[configuration]) { + const _config_0 = (0, import_runtimeConfig.getRuntimeConfig)(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = (0, import_middleware_user_agent.resolveUserAgentConfig)(_config_1); + const _config_3 = (0, import_middleware_retry.resolveRetryConfig)(_config_2); + const _config_4 = (0, import_config_resolver.resolveRegionConfig)(_config_3); + const _config_5 = (0, import_middleware_host_header.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, import_middleware_endpoint.resolveEndpointConfig)(_config_5); + const _config_7 = (0, import_httpAuthSchemeProvider.resolveHttpAuthSchemeConfig)(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use((0, import_middleware_user_agent.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_retry.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_content_length.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_host_header.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_logger.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_recursion_detection.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use( + (0, import_core.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: import_httpAuthSchemeProvider.defaultSSOHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: /* @__PURE__ */ __name(async (config) => new import_core.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials + }), "identityProviderConfigProvider") + }) + ); + this.middlewareStack.use((0, import_core.getHttpSigningPlugin)(this.config)); + } + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy() { + super.destroy(); + } +}; + +// src/SSO.ts + + +// src/commands/GetRoleCredentialsCommand.ts + +var import_middleware_serde = require("@smithy/middleware-serde"); + + +// src/models/models_0.ts + + +// src/models/SSOServiceException.ts + +var SSOServiceException = class _SSOServiceException extends import_smithy_client.ServiceException { + static { + __name(this, "SSOServiceException"); + } + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _SSOServiceException.prototype); + } +}; + +// src/models/models_0.ts +var InvalidRequestException = class _InvalidRequestException extends SSOServiceException { + static { + __name(this, "InvalidRequestException"); + } + name = "InvalidRequestException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidRequestException.prototype); + } +}; +var ResourceNotFoundException = class _ResourceNotFoundException extends SSOServiceException { + static { + __name(this, "ResourceNotFoundException"); + } + name = "ResourceNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ResourceNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ResourceNotFoundException.prototype); + } +}; +var TooManyRequestsException = class _TooManyRequestsException extends SSOServiceException { + static { + __name(this, "TooManyRequestsException"); + } + name = "TooManyRequestsException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TooManyRequestsException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TooManyRequestsException.prototype); + } +}; +var UnauthorizedException = class _UnauthorizedException extends SSOServiceException { + static { + __name(this, "UnauthorizedException"); + } + name = "UnauthorizedException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "UnauthorizedException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _UnauthorizedException.prototype); + } +}; +var GetRoleCredentialsRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "GetRoleCredentialsRequestFilterSensitiveLog"); +var RoleCredentialsFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.secretAccessKey && { secretAccessKey: import_smithy_client.SENSITIVE_STRING }, + ...obj.sessionToken && { sessionToken: import_smithy_client.SENSITIVE_STRING } +}), "RoleCredentialsFilterSensitiveLog"); +var GetRoleCredentialsResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.roleCredentials && { roleCredentials: RoleCredentialsFilterSensitiveLog(obj.roleCredentials) } +}), "GetRoleCredentialsResponseFilterSensitiveLog"); +var ListAccountRolesRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "ListAccountRolesRequestFilterSensitiveLog"); +var ListAccountsRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "ListAccountsRequestFilterSensitiveLog"); +var LogoutRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "LogoutRequestFilterSensitiveLog"); + +// src/protocols/Aws_restJson1.ts +var import_core2 = require("@aws-sdk/core"); + + +var se_GetRoleCredentialsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/federation/credentials"); + const query = (0, import_smithy_client.map)({ + [_rn]: [, (0, import_smithy_client.expectNonNull)(input[_rN], `roleName`)], + [_ai]: [, (0, import_smithy_client.expectNonNull)(input[_aI], `accountId`)] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetRoleCredentialsCommand"); +var se_ListAccountRolesCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/assignment/roles"); + const query = (0, import_smithy_client.map)({ + [_nt]: [, input[_nT]], + [_mr]: [() => input.maxResults !== void 0, () => input[_mR].toString()], + [_ai]: [, (0, import_smithy_client.expectNonNull)(input[_aI], `accountId`)] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListAccountRolesCommand"); +var se_ListAccountsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/assignment/accounts"); + const query = (0, import_smithy_client.map)({ + [_nt]: [, input[_nT]], + [_mr]: [() => input.maxResults !== void 0, () => input[_mR].toString()] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListAccountsCommand"); +var se_LogoutCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/logout"); + let body; + b.m("POST").h(headers).b(body); + return b.build(); +}, "se_LogoutCommand"); +var de_GetRoleCredentialsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client.take)(data, { + roleCredentials: import_smithy_client._json + }); + Object.assign(contents, doc); + return contents; +}, "de_GetRoleCredentialsCommand"); +var de_ListAccountRolesCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client.take)(data, { + nextToken: import_smithy_client.expectString, + roleList: import_smithy_client._json + }); + Object.assign(contents, doc); + return contents; +}, "de_ListAccountRolesCommand"); +var de_ListAccountsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client.take)(data, { + accountList: import_smithy_client._json, + nextToken: import_smithy_client.expectString + }); + Object.assign(contents, doc); + return contents; +}, "de_ListAccountsCommand"); +var de_LogoutCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_LogoutCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core2.parseJsonErrorBody)(output.body, context) + }; + const errorCode = (0, import_core2.loadRestJsonErrorCode)(output, parsedOutput.body); + switch (errorCode) { + case "InvalidRequestException": + case "com.amazonaws.sso#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.sso#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "TooManyRequestsException": + case "com.amazonaws.sso#TooManyRequestsException": + throw await de_TooManyRequestsExceptionRes(parsedOutput, context); + case "UnauthorizedException": + case "com.amazonaws.sso#UnauthorizedException": + throw await de_UnauthorizedExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}, "de_CommandError"); +var throwDefaultError = (0, import_smithy_client.withBaseException)(SSOServiceException); +var de_InvalidRequestExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidRequestExceptionRes"); +var de_ResourceNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new ResourceNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_ResourceNotFoundExceptionRes"); +var de_TooManyRequestsExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new TooManyRequestsException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_TooManyRequestsExceptionRes"); +var de_UnauthorizedExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new UnauthorizedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_UnauthorizedExceptionRes"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); +var _aI = "accountId"; +var _aT = "accessToken"; +var _ai = "account_id"; +var _mR = "maxResults"; +var _mr = "max_result"; +var _nT = "nextToken"; +var _nt = "next_token"; +var _rN = "roleName"; +var _rn = "role_name"; +var _xasbt = "x-amz-sso_bearer_token"; + +// src/commands/GetRoleCredentialsCommand.ts +var GetRoleCredentialsCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "GetRoleCredentials", {}).n("SSOClient", "GetRoleCredentialsCommand").f(GetRoleCredentialsRequestFilterSensitiveLog, GetRoleCredentialsResponseFilterSensitiveLog).ser(se_GetRoleCredentialsCommand).de(de_GetRoleCredentialsCommand).build() { + static { + __name(this, "GetRoleCredentialsCommand"); + } +}; + +// src/commands/ListAccountRolesCommand.ts + + + +var ListAccountRolesCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "ListAccountRoles", {}).n("SSOClient", "ListAccountRolesCommand").f(ListAccountRolesRequestFilterSensitiveLog, void 0).ser(se_ListAccountRolesCommand).de(de_ListAccountRolesCommand).build() { + static { + __name(this, "ListAccountRolesCommand"); + } +}; + +// src/commands/ListAccountsCommand.ts + + + +var ListAccountsCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "ListAccounts", {}).n("SSOClient", "ListAccountsCommand").f(ListAccountsRequestFilterSensitiveLog, void 0).ser(se_ListAccountsCommand).de(de_ListAccountsCommand).build() { + static { + __name(this, "ListAccountsCommand"); + } +}; + +// src/commands/LogoutCommand.ts + + + +var LogoutCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "Logout", {}).n("SSOClient", "LogoutCommand").f(LogoutRequestFilterSensitiveLog, void 0).ser(se_LogoutCommand).de(de_LogoutCommand).build() { + static { + __name(this, "LogoutCommand"); + } +}; + +// src/SSO.ts +var commands = { + GetRoleCredentialsCommand, + ListAccountRolesCommand, + ListAccountsCommand, + LogoutCommand +}; +var SSO = class extends SSOClient { + static { + __name(this, "SSO"); + } +}; +(0, import_smithy_client.createAggregatedClient)(commands, SSO); + +// src/pagination/ListAccountRolesPaginator.ts + +var paginateListAccountRoles = (0, import_core.createPaginator)(SSOClient, ListAccountRolesCommand, "nextToken", "nextToken", "maxResults"); + +// src/pagination/ListAccountsPaginator.ts + +var paginateListAccounts = (0, import_core.createPaginator)(SSOClient, ListAccountsCommand, "nextToken", "nextToken", "maxResults"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + SSOServiceException, + __Client, + SSOClient, + SSO, + $Command, + GetRoleCredentialsCommand, + ListAccountRolesCommand, + ListAccountsCommand, + LogoutCommand, + paginateListAccountRoles, + paginateListAccounts, + InvalidRequestException, + ResourceNotFoundException, + TooManyRequestsException, + UnauthorizedException, + GetRoleCredentialsRequestFilterSensitiveLog, + RoleCredentialsFilterSensitiveLog, + GetRoleCredentialsResponseFilterSensitiveLog, + ListAccountRolesRequestFilterSensitiveLog, + ListAccountsRequestFilterSensitiveLog, + LogoutRequestFilterSensitiveLog +}); + diff --git a/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.browser.js b/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.browser.js new file mode 100644 index 00000000..3b40936a --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.browser.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../package.json")); +const sha256_browser_1 = require("@aws-crypto/sha256-browser"); +const util_user_agent_browser_1 = require("@aws-sdk/util-user-agent-browser"); +const config_resolver_1 = require("@smithy/config-resolver"); +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const invalid_dependency_1 = require("@smithy/invalid-dependency"); +const util_body_length_browser_1 = require("@smithy/util-body-length-browser"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_browser_1 = require("@smithy/util-defaults-mode-browser"); +const getRuntimeConfig = (config) => { + const defaultsMode = (0, util_defaults_mode_browser_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_browser_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_browser_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? util_retry_1.DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? (0, invalid_dependency_1.invalidProvider)("Region is missing"), + requestHandler: fetch_http_handler_1.FetchHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? sha256_browser_1.Sha256, + streamCollector: config?.streamCollector ?? fetch_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.js b/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.js new file mode 100644 index 00000000..befc7393 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.js @@ -0,0 +1,51 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../package.json")); +const core_1 = require("@aws-sdk/core"); +const util_user_agent_node_1 = require("@aws-sdk/util-user-agent-node"); +const config_resolver_1 = require("@smithy/config-resolver"); +const hash_node_1 = require("@smithy/hash-node"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const node_config_provider_1 = require("@smithy/node-config-provider"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_body_length_node_1 = require("@smithy/util-body-length-node"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_node_1 = require("@smithy/util-defaults-mode-node"); +const smithy_client_2 = require("@smithy/smithy-client"); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? (0, node_config_provider_1.loadConfig)(core_1.NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.native.js b/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.native.js new file mode 100644 index 00000000..34c5f8ec --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.native.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const sha256_js_1 = require("@aws-crypto/sha256-js"); +const runtimeConfig_browser_1 = require("./runtimeConfig.browser"); +const getRuntimeConfig = (config) => { + const browserDefaults = (0, runtimeConfig_browser_1.getRuntimeConfig)(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? sha256_js_1.Sha256, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.shared.js b/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.shared.js new file mode 100644 index 00000000..24a378c1 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.shared.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const core_1 = require("@aws-sdk/core"); +const core_2 = require("@smithy/core"); +const smithy_client_1 = require("@smithy/smithy-client"); +const url_parser_1 = require("@smithy/url-parser"); +const util_base64_1 = require("@smithy/util-base64"); +const util_utf8_1 = require("@smithy/util-utf8"); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const endpointResolver_1 = require("./endpoint/endpointResolver"); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultSSOHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "SSO", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/node_modules/@aws-sdk/client-sso/dist-es/SSO.js b/node_modules/@aws-sdk/client-sso/dist-es/SSO.js new file mode 100644 index 00000000..04d31690 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/SSO.js @@ -0,0 +1,15 @@ +import { createAggregatedClient } from "@smithy/smithy-client"; +import { GetRoleCredentialsCommand, } from "./commands/GetRoleCredentialsCommand"; +import { ListAccountRolesCommand, } from "./commands/ListAccountRolesCommand"; +import { ListAccountsCommand, } from "./commands/ListAccountsCommand"; +import { LogoutCommand } from "./commands/LogoutCommand"; +import { SSOClient } from "./SSOClient"; +const commands = { + GetRoleCredentialsCommand, + ListAccountRolesCommand, + ListAccountsCommand, + LogoutCommand, +}; +export class SSO extends SSOClient { +} +createAggregatedClient(commands, SSO); diff --git a/node_modules/@aws-sdk/client-sso/dist-es/SSOClient.js b/node_modules/@aws-sdk/client-sso/dist-es/SSOClient.js new file mode 100644 index 00000000..890a848a --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/SSOClient.js @@ -0,0 +1,48 @@ +import { getHostHeaderPlugin, resolveHostHeaderConfig, } from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { getRecursionDetectionPlugin } from "@aws-sdk/middleware-recursion-detection"; +import { getUserAgentPlugin, resolveUserAgentConfig, } from "@aws-sdk/middleware-user-agent"; +import { resolveRegionConfig } from "@smithy/config-resolver"; +import { DefaultIdentityProviderConfig, getHttpAuthSchemeEndpointRuleSetPlugin, getHttpSigningPlugin, } from "@smithy/core"; +import { getContentLengthPlugin } from "@smithy/middleware-content-length"; +import { resolveEndpointConfig } from "@smithy/middleware-endpoint"; +import { getRetryPlugin, resolveRetryConfig } from "@smithy/middleware-retry"; +import { Client as __Client, } from "@smithy/smithy-client"; +import { defaultSSOHttpAuthSchemeParametersProvider, resolveHttpAuthSchemeConfig, } from "./auth/httpAuthSchemeProvider"; +import { resolveClientEndpointParameters, } from "./endpoint/EndpointParameters"; +import { getRuntimeConfig as __getRuntimeConfig } from "./runtimeConfig"; +import { resolveRuntimeExtensions } from "./runtimeExtensions"; +export { __Client }; +export class SSOClient extends __Client { + config; + constructor(...[configuration]) { + const _config_0 = __getRuntimeConfig(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = resolveUserAgentConfig(_config_1); + const _config_3 = resolveRetryConfig(_config_2); + const _config_4 = resolveRegionConfig(_config_3); + const _config_5 = resolveHostHeaderConfig(_config_4); + const _config_6 = resolveEndpointConfig(_config_5); + const _config_7 = resolveHttpAuthSchemeConfig(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + this.middlewareStack.use(getRecursionDetectionPlugin(this.config)); + this.middlewareStack.use(getHttpAuthSchemeEndpointRuleSetPlugin(this.config, { + httpAuthSchemeParametersProvider: defaultSSOHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use(getHttpSigningPlugin(this.config)); + } + destroy() { + super.destroy(); + } +} diff --git a/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthExtensionConfiguration.js b/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 00000000..2ba1d48c --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,38 @@ +export const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +export const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; diff --git a/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthSchemeProvider.js b/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthSchemeProvider.js new file mode 100644 index 00000000..f7ff90fa --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthSchemeProvider.js @@ -0,0 +1,62 @@ +import { resolveAwsSdkSigV4Config, } from "@aws-sdk/core"; +import { getSmithyContext, normalizeProvider } from "@smithy/util-middleware"; +export const defaultSSOHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: getSmithyContext(context).operation, + region: (await normalizeProvider(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "awsssoportal", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +export const defaultSSOHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "GetRoleCredentials": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "ListAccountRoles": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "ListAccounts": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "Logout": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +export const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = resolveAwsSdkSigV4Config(config); + return Object.assign(config_0, { + authSchemePreference: normalizeProvider(config.authSchemePreference ?? []), + }); +}; diff --git a/node_modules/@aws-sdk/client-sso/dist-es/commands/GetRoleCredentialsCommand.js b/node_modules/@aws-sdk/client-sso/dist-es/commands/GetRoleCredentialsCommand.js new file mode 100644 index 00000000..aa4c2e32 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/commands/GetRoleCredentialsCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { GetRoleCredentialsRequestFilterSensitiveLog, GetRoleCredentialsResponseFilterSensitiveLog, } from "../models/models_0"; +import { de_GetRoleCredentialsCommand, se_GetRoleCredentialsCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class GetRoleCredentialsCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("SWBPortalService", "GetRoleCredentials", {}) + .n("SSOClient", "GetRoleCredentialsCommand") + .f(GetRoleCredentialsRequestFilterSensitiveLog, GetRoleCredentialsResponseFilterSensitiveLog) + .ser(se_GetRoleCredentialsCommand) + .de(de_GetRoleCredentialsCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountRolesCommand.js b/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountRolesCommand.js new file mode 100644 index 00000000..d5bcc146 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountRolesCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { ListAccountRolesRequestFilterSensitiveLog, } from "../models/models_0"; +import { de_ListAccountRolesCommand, se_ListAccountRolesCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class ListAccountRolesCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("SWBPortalService", "ListAccountRoles", {}) + .n("SSOClient", "ListAccountRolesCommand") + .f(ListAccountRolesRequestFilterSensitiveLog, void 0) + .ser(se_ListAccountRolesCommand) + .de(de_ListAccountRolesCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountsCommand.js b/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountsCommand.js new file mode 100644 index 00000000..d4ab8bae --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountsCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { ListAccountsRequestFilterSensitiveLog } from "../models/models_0"; +import { de_ListAccountsCommand, se_ListAccountsCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class ListAccountsCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("SWBPortalService", "ListAccounts", {}) + .n("SSOClient", "ListAccountsCommand") + .f(ListAccountsRequestFilterSensitiveLog, void 0) + .ser(se_ListAccountsCommand) + .de(de_ListAccountsCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-sso/dist-es/commands/LogoutCommand.js b/node_modules/@aws-sdk/client-sso/dist-es/commands/LogoutCommand.js new file mode 100644 index 00000000..29a37ed8 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/commands/LogoutCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { LogoutRequestFilterSensitiveLog } from "../models/models_0"; +import { de_LogoutCommand, se_LogoutCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class LogoutCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("SWBPortalService", "Logout", {}) + .n("SSOClient", "LogoutCommand") + .f(LogoutRequestFilterSensitiveLog, void 0) + .ser(se_LogoutCommand) + .de(de_LogoutCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/client-sso/dist-es/commands/index.js b/node_modules/@aws-sdk/client-sso/dist-es/commands/index.js new file mode 100644 index 00000000..0ab890d3 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/commands/index.js @@ -0,0 +1,4 @@ +export * from "./GetRoleCredentialsCommand"; +export * from "./ListAccountRolesCommand"; +export * from "./ListAccountsCommand"; +export * from "./LogoutCommand"; diff --git a/node_modules/@aws-sdk/client-sso/dist-es/endpoint/EndpointParameters.js b/node_modules/@aws-sdk/client-sso/dist-es/endpoint/EndpointParameters.js new file mode 100644 index 00000000..77e34f85 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/endpoint/EndpointParameters.js @@ -0,0 +1,13 @@ +export const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "awsssoportal", + }); +}; +export const commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, +}; diff --git a/node_modules/@aws-sdk/client-sso/dist-es/endpoint/endpointResolver.js b/node_modules/@aws-sdk/client-sso/dist-es/endpoint/endpointResolver.js new file mode 100644 index 00000000..0ac15bcd --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/endpoint/endpointResolver.js @@ -0,0 +1,14 @@ +import { awsEndpointFunctions } from "@aws-sdk/util-endpoints"; +import { customEndpointFunctions, EndpointCache, resolveEndpoint } from "@smithy/util-endpoints"; +import { ruleSet } from "./ruleset"; +const cache = new EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS"], +}); +export const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => resolveEndpoint(ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/node_modules/@aws-sdk/client-sso/dist-es/endpoint/ruleset.js b/node_modules/@aws-sdk/client-sso/dist-es/endpoint/ruleset.js new file mode 100644 index 00000000..c48673dd --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/endpoint/ruleset.js @@ -0,0 +1,4 @@ +const u = "required", v = "fn", w = "argv", x = "ref"; +const a = true, b = "isSet", c = "booleanEquals", d = "error", e = "endpoint", f = "tree", g = "PartitionResult", h = "getAttr", i = { [u]: false, "type": "String" }, j = { [u]: true, "default": false, "type": "Boolean" }, k = { [x]: "Endpoint" }, l = { [v]: c, [w]: [{ [x]: "UseFIPS" }, true] }, m = { [v]: c, [w]: [{ [x]: "UseDualStack" }, true] }, n = {}, o = { [v]: h, [w]: [{ [x]: g }, "supportsFIPS"] }, p = { [x]: g }, q = { [v]: c, [w]: [true, { [v]: h, [w]: [p, "supportsDualStack"] }] }, r = [l], s = [m], t = [{ [x]: "Region" }]; +const _data = { version: "1.0", parameters: { Region: i, UseDualStack: j, UseFIPS: j, Endpoint: i }, rules: [{ conditions: [{ [v]: b, [w]: [k] }], rules: [{ conditions: r, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: d }, { conditions: s, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: d }, { endpoint: { url: k, properties: n, headers: n }, type: e }], type: f }, { conditions: [{ [v]: b, [w]: t }], rules: [{ conditions: [{ [v]: "aws.partition", [w]: t, assign: g }], rules: [{ conditions: [l, m], rules: [{ conditions: [{ [v]: c, [w]: [a, o] }, q], rules: [{ endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: d }], type: f }, { conditions: r, rules: [{ conditions: [{ [v]: c, [w]: [o, a] }], rules: [{ conditions: [{ [v]: "stringEquals", [w]: [{ [v]: h, [w]: [p, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://portal.sso.{Region}.amazonaws.com", properties: n, headers: n }, type: e }, { endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS is enabled but this partition does not support FIPS", type: d }], type: f }, { conditions: s, rules: [{ conditions: [q], rules: [{ endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "DualStack is enabled but this partition does not support DualStack", type: d }], type: f }, { endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }], type: f }, { error: "Invalid Configuration: Missing Region", type: d }] }; +export const ruleSet = _data; diff --git a/node_modules/@aws-sdk/client-sso/dist-es/extensionConfiguration.js b/node_modules/@aws-sdk/client-sso/dist-es/extensionConfiguration.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/extensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/client-sso/dist-es/index.js b/node_modules/@aws-sdk/client-sso/dist-es/index.js new file mode 100644 index 00000000..b2975563 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/index.js @@ -0,0 +1,6 @@ +export * from "./SSOClient"; +export * from "./SSO"; +export * from "./commands"; +export * from "./pagination"; +export * from "./models"; +export { SSOServiceException } from "./models/SSOServiceException"; diff --git a/node_modules/@aws-sdk/client-sso/dist-es/models/SSOServiceException.js b/node_modules/@aws-sdk/client-sso/dist-es/models/SSOServiceException.js new file mode 100644 index 00000000..fa5d8fb9 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/models/SSOServiceException.js @@ -0,0 +1,8 @@ +import { ServiceException as __ServiceException, } from "@smithy/smithy-client"; +export { __ServiceException }; +export class SSOServiceException extends __ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, SSOServiceException.prototype); + } +} diff --git a/node_modules/@aws-sdk/client-sso/dist-es/models/index.js b/node_modules/@aws-sdk/client-sso/dist-es/models/index.js new file mode 100644 index 00000000..09c5d6e0 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/models/index.js @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/node_modules/@aws-sdk/client-sso/dist-es/models/models_0.js b/node_modules/@aws-sdk/client-sso/dist-es/models/models_0.js new file mode 100644 index 00000000..56ec16dc --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/models/models_0.js @@ -0,0 +1,75 @@ +import { SENSITIVE_STRING } from "@smithy/smithy-client"; +import { SSOServiceException as __BaseException } from "./SSOServiceException"; +export class InvalidRequestException extends __BaseException { + name = "InvalidRequestException"; + $fault = "client"; + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidRequestException.prototype); + } +} +export class ResourceNotFoundException extends __BaseException { + name = "ResourceNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ResourceNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ResourceNotFoundException.prototype); + } +} +export class TooManyRequestsException extends __BaseException { + name = "TooManyRequestsException"; + $fault = "client"; + constructor(opts) { + super({ + name: "TooManyRequestsException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TooManyRequestsException.prototype); + } +} +export class UnauthorizedException extends __BaseException { + name = "UnauthorizedException"; + $fault = "client"; + constructor(opts) { + super({ + name: "UnauthorizedException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, UnauthorizedException.prototype); + } +} +export const GetRoleCredentialsRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), +}); +export const RoleCredentialsFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.secretAccessKey && { secretAccessKey: SENSITIVE_STRING }), + ...(obj.sessionToken && { sessionToken: SENSITIVE_STRING }), +}); +export const GetRoleCredentialsResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.roleCredentials && { roleCredentials: RoleCredentialsFilterSensitiveLog(obj.roleCredentials) }), +}); +export const ListAccountRolesRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), +}); +export const ListAccountsRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), +}); +export const LogoutRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), +}); diff --git a/node_modules/@aws-sdk/client-sso/dist-es/pagination/Interfaces.js b/node_modules/@aws-sdk/client-sso/dist-es/pagination/Interfaces.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/pagination/Interfaces.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountRolesPaginator.js b/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountRolesPaginator.js new file mode 100644 index 00000000..b18c3a82 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountRolesPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListAccountRolesCommand, } from "../commands/ListAccountRolesCommand"; +import { SSOClient } from "../SSOClient"; +export const paginateListAccountRoles = createPaginator(SSOClient, ListAccountRolesCommand, "nextToken", "nextToken", "maxResults"); diff --git a/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountsPaginator.js b/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountsPaginator.js new file mode 100644 index 00000000..342c6638 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountsPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListAccountsCommand, } from "../commands/ListAccountsCommand"; +import { SSOClient } from "../SSOClient"; +export const paginateListAccounts = createPaginator(SSOClient, ListAccountsCommand, "nextToken", "nextToken", "maxResults"); diff --git a/node_modules/@aws-sdk/client-sso/dist-es/pagination/index.js b/node_modules/@aws-sdk/client-sso/dist-es/pagination/index.js new file mode 100644 index 00000000..1e7866f7 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/pagination/index.js @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./ListAccountRolesPaginator"; +export * from "./ListAccountsPaginator"; diff --git a/node_modules/@aws-sdk/client-sso/dist-es/protocols/Aws_restJson1.js b/node_modules/@aws-sdk/client-sso/dist-es/protocols/Aws_restJson1.js new file mode 100644 index 00000000..11b18925 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/protocols/Aws_restJson1.js @@ -0,0 +1,210 @@ +import { loadRestJsonErrorCode, parseJsonBody as parseBody, parseJsonErrorBody as parseErrorBody } from "@aws-sdk/core"; +import { requestBuilder as rb } from "@smithy/core"; +import { _json, collectBody, decorateServiceException as __decorateServiceException, expectNonNull as __expectNonNull, expectObject as __expectObject, expectString as __expectString, isSerializableHeaderValue, map, take, withBaseException, } from "@smithy/smithy-client"; +import { InvalidRequestException, ResourceNotFoundException, TooManyRequestsException, UnauthorizedException, } from "../models/models_0"; +import { SSOServiceException as __BaseException } from "../models/SSOServiceException"; +export const se_GetRoleCredentialsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xasbt]: input[_aT], + }); + b.bp("/federation/credentials"); + const query = map({ + [_rn]: [, __expectNonNull(input[_rN], `roleName`)], + [_ai]: [, __expectNonNull(input[_aI], `accountId`)], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_ListAccountRolesCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xasbt]: input[_aT], + }); + b.bp("/assignment/roles"); + const query = map({ + [_nt]: [, input[_nT]], + [_mr]: [() => input.maxResults !== void 0, () => input[_mR].toString()], + [_ai]: [, __expectNonNull(input[_aI], `accountId`)], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_ListAccountsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xasbt]: input[_aT], + }); + b.bp("/assignment/accounts"); + const query = map({ + [_nt]: [, input[_nT]], + [_mr]: [() => input.maxResults !== void 0, () => input[_mR].toString()], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_LogoutCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xasbt]: input[_aT], + }); + b.bp("/logout"); + let body; + b.m("POST").h(headers).b(body); + return b.build(); +}; +export const de_GetRoleCredentialsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + const doc = take(data, { + roleCredentials: _json, + }); + Object.assign(contents, doc); + return contents; +}; +export const de_ListAccountRolesCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + const doc = take(data, { + nextToken: __expectString, + roleList: _json, + }); + Object.assign(contents, doc); + return contents; +}; +export const de_ListAccountsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + const doc = take(data, { + accountList: _json, + nextToken: __expectString, + }); + Object.assign(contents, doc); + return contents; +}; +export const de_LogoutCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +const de_CommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InvalidRequestException": + case "com.amazonaws.sso#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.sso#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "TooManyRequestsException": + case "com.amazonaws.sso#TooManyRequestsException": + throw await de_TooManyRequestsExceptionRes(parsedOutput, context); + case "UnauthorizedException": + case "com.amazonaws.sso#UnauthorizedException": + throw await de_UnauthorizedExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const throwDefaultError = withBaseException(__BaseException); +const de_InvalidRequestExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + message: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_ResourceNotFoundExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + message: __expectString, + }); + Object.assign(contents, doc); + const exception = new ResourceNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_TooManyRequestsExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + message: __expectString, + }); + Object.assign(contents, doc); + const exception = new TooManyRequestsException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_UnauthorizedExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + message: __expectString, + }); + Object.assign(contents, doc); + const exception = new UnauthorizedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); +const _aI = "accountId"; +const _aT = "accessToken"; +const _ai = "account_id"; +const _mR = "maxResults"; +const _mr = "max_result"; +const _nT = "nextToken"; +const _nt = "next_token"; +const _rN = "roleName"; +const _rn = "role_name"; +const _xasbt = "x-amz-sso_bearer_token"; diff --git a/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.browser.js b/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.browser.js new file mode 100644 index 00000000..7c8fe850 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.browser.js @@ -0,0 +1,33 @@ +import packageInfo from "../package.json"; +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-browser"; +import { DEFAULT_USE_DUALSTACK_ENDPOINT, DEFAULT_USE_FIPS_ENDPOINT } from "@smithy/config-resolver"; +import { FetchHttpHandler as RequestHandler, streamCollector } from "@smithy/fetch-http-handler"; +import { invalidProvider } from "@smithy/invalid-dependency"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-browser"; +export const getRuntimeConfig = (config) => { + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? invalidProvider("Region is missing"), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? Sha256, + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; diff --git a/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.js b/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.js new file mode 100644 index 00000000..d8440b73 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.js @@ -0,0 +1,46 @@ +import packageInfo from "../package.json"; +import { NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, emitWarningIfUnsupportedVersion as awsCheckVersion } from "@aws-sdk/core"; +import { NODE_APP_ID_CONFIG_OPTIONS, createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-node"; +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS, NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, } from "@smithy/config-resolver"; +import { Hash } from "@smithy/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS, NODE_RETRY_MODE_CONFIG_OPTIONS } from "@smithy/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@smithy/node-config-provider"; +import { NodeHttpHandler as RequestHandler, streamCollector } from "@smithy/node-http-handler"; +import { calculateBodyLength } from "@smithy/util-body-length-node"; +import { DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-node"; +import { emitWarningIfUnsupportedVersion } from "@smithy/smithy-client"; +export const getRuntimeConfig = (config) => { + emitWarningIfUnsupportedVersion(process.version); + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + awsCheckVersion(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? loadNodeConfig(NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, { ...NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + loadNodeConfig({ + ...NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? loadNodeConfig(NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? loadNodeConfig(NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? loadNodeConfig(NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; diff --git a/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.native.js b/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.native.js new file mode 100644 index 00000000..0b546952 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.native.js @@ -0,0 +1,11 @@ +import { Sha256 } from "@aws-crypto/sha256-js"; +import { getRuntimeConfig as getBrowserRuntimeConfig } from "./runtimeConfig.browser"; +export const getRuntimeConfig = (config) => { + const browserDefaults = getBrowserRuntimeConfig(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? Sha256, + }; +}; diff --git a/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.shared.js b/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.shared.js new file mode 100644 index 00000000..3dfac586 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.shared.js @@ -0,0 +1,36 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { parseUrl } from "@smithy/url-parser"; +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { defaultSSOHttpAuthSchemeProvider } from "./auth/httpAuthSchemeProvider"; +import { defaultEndpointResolver } from "./endpoint/endpointResolver"; +export const getRuntimeConfig = (config) => { + return { + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? fromBase64, + base64Encoder: config?.base64Encoder ?? toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? defaultSSOHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new NoAuthSigner(), + }, + ], + logger: config?.logger ?? new NoOpLogger(), + serviceId: config?.serviceId ?? "SSO", + urlParser: config?.urlParser ?? parseUrl, + utf8Decoder: config?.utf8Decoder ?? fromUtf8, + utf8Encoder: config?.utf8Encoder ?? toUtf8, + }; +}; diff --git a/node_modules/@aws-sdk/client-sso/dist-es/runtimeExtensions.js b/node_modules/@aws-sdk/client-sso/dist-es/runtimeExtensions.js new file mode 100644 index 00000000..5b296950 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-es/runtimeExtensions.js @@ -0,0 +1,9 @@ +import { getAwsRegionExtensionConfiguration, resolveAwsRegionExtensionConfiguration, } from "@aws-sdk/region-config-resolver"; +import { getHttpHandlerExtensionConfiguration, resolveHttpHandlerRuntimeConfig } from "@smithy/protocol-http"; +import { getDefaultExtensionConfiguration, resolveDefaultRuntimeConfig } from "@smithy/smithy-client"; +import { getHttpAuthExtensionConfiguration, resolveHttpAuthRuntimeConfig } from "./auth/httpAuthExtensionConfiguration"; +export const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign(getAwsRegionExtensionConfiguration(runtimeConfig), getDefaultExtensionConfiguration(runtimeConfig), getHttpHandlerExtensionConfiguration(runtimeConfig), getHttpAuthExtensionConfiguration(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, resolveAwsRegionExtensionConfiguration(extensionConfiguration), resolveDefaultRuntimeConfig(extensionConfiguration), resolveHttpHandlerRuntimeConfig(extensionConfiguration), resolveHttpAuthRuntimeConfig(extensionConfiguration)); +}; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/SSO.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/SSO.d.ts new file mode 100644 index 00000000..8500e0cf --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/SSO.d.ts @@ -0,0 +1,53 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { GetRoleCredentialsCommandInput, GetRoleCredentialsCommandOutput } from "./commands/GetRoleCredentialsCommand"; +import { ListAccountRolesCommandInput, ListAccountRolesCommandOutput } from "./commands/ListAccountRolesCommand"; +import { ListAccountsCommandInput, ListAccountsCommandOutput } from "./commands/ListAccountsCommand"; +import { LogoutCommandInput, LogoutCommandOutput } from "./commands/LogoutCommand"; +import { SSOClient } from "./SSOClient"; +export interface SSO { + /** + * @see {@link GetRoleCredentialsCommand} + */ + getRoleCredentials(args: GetRoleCredentialsCommandInput, options?: __HttpHandlerOptions): Promise; + getRoleCredentials(args: GetRoleCredentialsCommandInput, cb: (err: any, data?: GetRoleCredentialsCommandOutput) => void): void; + getRoleCredentials(args: GetRoleCredentialsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetRoleCredentialsCommandOutput) => void): void; + /** + * @see {@link ListAccountRolesCommand} + */ + listAccountRoles(args: ListAccountRolesCommandInput, options?: __HttpHandlerOptions): Promise; + listAccountRoles(args: ListAccountRolesCommandInput, cb: (err: any, data?: ListAccountRolesCommandOutput) => void): void; + listAccountRoles(args: ListAccountRolesCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListAccountRolesCommandOutput) => void): void; + /** + * @see {@link ListAccountsCommand} + */ + listAccounts(args: ListAccountsCommandInput, options?: __HttpHandlerOptions): Promise; + listAccounts(args: ListAccountsCommandInput, cb: (err: any, data?: ListAccountsCommandOutput) => void): void; + listAccounts(args: ListAccountsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListAccountsCommandOutput) => void): void; + /** + * @see {@link LogoutCommand} + */ + logout(args: LogoutCommandInput, options?: __HttpHandlerOptions): Promise; + logout(args: LogoutCommandInput, cb: (err: any, data?: LogoutCommandOutput) => void): void; + logout(args: LogoutCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: LogoutCommandOutput) => void): void; +} +/** + *

AWS IAM Identity Center (successor to AWS Single Sign-On) Portal is a web service that makes it easy for you to assign user access to + * IAM Identity Center resources such as the AWS access portal. Users can get AWS account applications and roles + * assigned to them and get federated into the application.

+ * + *

Although AWS Single Sign-On was renamed, the sso and + * identitystore API namespaces will continue to retain their original name for + * backward compatibility purposes. For more information, see IAM Identity Center rename.

+ *
+ *

This reference guide describes the IAM Identity Center Portal operations that you can call + * programatically and includes detailed information on data types and errors.

+ * + *

AWS provides SDKs that consist of libraries and sample code for various programming + * languages and platforms, such as Java, Ruby, .Net, iOS, or Android. The SDKs provide a + * convenient way to create programmatic access to IAM Identity Center and other AWS services. For more + * information about the AWS SDKs, including how to download and install them, see Tools for Amazon Web Services.

+ *
+ * @public + */ +export declare class SSO extends SSOClient implements SSO { +} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/SSOClient.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/SSOClient.d.ts new file mode 100644 index 00000000..acfb2fdf --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/SSOClient.d.ts @@ -0,0 +1,200 @@ +import { HostHeaderInputConfig, HostHeaderResolvedConfig } from "@aws-sdk/middleware-host-header"; +import { UserAgentInputConfig, UserAgentResolvedConfig } from "@aws-sdk/middleware-user-agent"; +import { RegionInputConfig, RegionResolvedConfig } from "@smithy/config-resolver"; +import { EndpointInputConfig, EndpointResolvedConfig } from "@smithy/middleware-endpoint"; +import { RetryInputConfig, RetryResolvedConfig } from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { Client as __Client, DefaultsMode as __DefaultsMode, SmithyConfiguration as __SmithyConfiguration, SmithyResolvedConfiguration as __SmithyResolvedConfiguration } from "@smithy/smithy-client"; +import { BodyLengthCalculator as __BodyLengthCalculator, CheckOptionalClientConfig as __CheckOptionalClientConfig, ChecksumConstructor as __ChecksumConstructor, Decoder as __Decoder, Encoder as __Encoder, HashConstructor as __HashConstructor, HttpHandlerOptions as __HttpHandlerOptions, Logger as __Logger, Provider as __Provider, Provider, StreamCollector as __StreamCollector, UrlParser as __UrlParser, UserAgent as __UserAgent } from "@smithy/types"; +import { HttpAuthSchemeInputConfig, HttpAuthSchemeResolvedConfig } from "./auth/httpAuthSchemeProvider"; +import { GetRoleCredentialsCommandInput, GetRoleCredentialsCommandOutput } from "./commands/GetRoleCredentialsCommand"; +import { ListAccountRolesCommandInput, ListAccountRolesCommandOutput } from "./commands/ListAccountRolesCommand"; +import { ListAccountsCommandInput, ListAccountsCommandOutput } from "./commands/ListAccountsCommand"; +import { LogoutCommandInput, LogoutCommandOutput } from "./commands/LogoutCommand"; +import { ClientInputEndpointParameters, ClientResolvedEndpointParameters, EndpointParameters } from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +/** + * @public + */ +export type ServiceInputTypes = GetRoleCredentialsCommandInput | ListAccountRolesCommandInput | ListAccountsCommandInput | LogoutCommandInput; +/** + * @public + */ +export type ServiceOutputTypes = GetRoleCredentialsCommandOutput | ListAccountRolesCommandOutput | ListAccountsCommandOutput | LogoutCommandOutput; +/** + * @public + */ +export interface ClientDefaults extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use or its constructor options. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandlerUserInput; + /** + * A constructor for a class implementing the {@link @smithy/types#ChecksumConstructor} interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + * @internal + */ + sha256?: __ChecksumConstructor | __HashConstructor; + /** + * The function that will be used to convert strings into HTTP endpoints. + * @internal + */ + urlParser?: __UrlParser; + /** + * A function that can calculate the length of a request body. + * @internal + */ + bodyLengthChecker?: __BodyLengthCalculator; + /** + * A function that converts a stream into an array of bytes. + * @internal + */ + streamCollector?: __StreamCollector; + /** + * The function that will be used to convert a base64-encoded string to a byte array. + * @internal + */ + base64Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a base64-encoded string. + * @internal + */ + base64Encoder?: __Encoder; + /** + * The function that will be used to convert a UTF8-encoded string to a byte array. + * @internal + */ + utf8Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a UTF-8 encoded string. + * @internal + */ + utf8Encoder?: __Encoder; + /** + * The runtime environment. + * @internal + */ + runtime?: string; + /** + * Disable dynamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | __Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | __Provider; + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + /** + * Setting a client profile is similar to setting a value for the + * AWS_PROFILE environment variable. Setting a profile on a client + * in code only affects the single client instance, unlike AWS_PROFILE. + * + * When set, and only for environments where an AWS configuration + * file exists, fields configurable by this file will be retrieved + * from the specified profile within that file. + * Conflicting code configuration and environment variables will + * still have higher priority. + * + * For client credential resolution that involves checking the AWS + * configuration file, the client's profile (this value) will be + * used unless a different profile is set in the credential + * provider options. + * + */ + profile?: string; + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header + * @internal + */ + defaultUserAgentProvider?: Provider<__UserAgent>; + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + /** + * Specifies which retry algorithm to use. + * @see https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-util-retry/Enum/RETRY_MODES/ + * + */ + retryMode?: string | __Provider; + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + /** + * Optional extensions + */ + extensions?: RuntimeExtension[]; + /** + * The {@link @smithy/smithy-client#DefaultsMode} that will be used to determine how certain default configuration options are resolved in the SDK. + */ + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +/** + * @public + */ +export type SSOClientConfigType = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & ClientDefaults & UserAgentInputConfig & RetryInputConfig & RegionInputConfig & HostHeaderInputConfig & EndpointInputConfig & HttpAuthSchemeInputConfig & ClientInputEndpointParameters; +/** + * @public + * + * The configuration interface of SSOClient class constructor that set the region, credentials and other options. + */ +export interface SSOClientConfig extends SSOClientConfigType { +} +/** + * @public + */ +export type SSOClientResolvedConfigType = __SmithyResolvedConfiguration<__HttpHandlerOptions> & Required & RuntimeExtensionsConfig & UserAgentResolvedConfig & RetryResolvedConfig & RegionResolvedConfig & HostHeaderResolvedConfig & EndpointResolvedConfig & HttpAuthSchemeResolvedConfig & ClientResolvedEndpointParameters; +/** + * @public + * + * The resolved configuration interface of SSOClient class. This is resolved and normalized from the {@link SSOClientConfig | constructor configuration interface}. + */ +export interface SSOClientResolvedConfig extends SSOClientResolvedConfigType { +} +/** + *

AWS IAM Identity Center (successor to AWS Single Sign-On) Portal is a web service that makes it easy for you to assign user access to + * IAM Identity Center resources such as the AWS access portal. Users can get AWS account applications and roles + * assigned to them and get federated into the application.

+ * + *

Although AWS Single Sign-On was renamed, the sso and + * identitystore API namespaces will continue to retain their original name for + * backward compatibility purposes. For more information, see IAM Identity Center rename.

+ *
+ *

This reference guide describes the IAM Identity Center Portal operations that you can call + * programatically and includes detailed information on data types and errors.

+ * + *

AWS provides SDKs that consist of libraries and sample code for various programming + * languages and platforms, such as Java, Ruby, .Net, iOS, or Android. The SDKs provide a + * convenient way to create programmatic access to IAM Identity Center and other AWS services. For more + * information about the AWS SDKs, including how to download and install them, see Tools for Amazon Web Services.

+ *
+ * @public + */ +export declare class SSOClient extends __Client<__HttpHandlerOptions, ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig> { + /** + * The resolved configuration of SSOClient class. This is resolved and normalized from the {@link SSOClientConfig | constructor configuration interface}. + */ + readonly config: SSOClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy(): void; +} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthExtensionConfiguration.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 00000000..7e7ff4c0 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,29 @@ +import { AwsCredentialIdentity, AwsCredentialIdentityProvider, HttpAuthScheme } from "@smithy/types"; +import { SSOHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +/** + * @internal + */ +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider(httpAuthSchemeProvider: SSOHttpAuthSchemeProvider): void; + httpAuthSchemeProvider(): SSOHttpAuthSchemeProvider; + setCredentials(credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider): void; + credentials(): AwsCredentialIdentity | AwsCredentialIdentityProvider | undefined; +} +/** + * @internal + */ +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: SSOHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +/** + * @internal + */ +export declare const getHttpAuthExtensionConfiguration: (runtimeConfig: HttpAuthRuntimeConfig) => HttpAuthExtensionConfiguration; +/** + * @internal + */ +export declare const resolveHttpAuthRuntimeConfig: (config: HttpAuthExtensionConfiguration) => HttpAuthRuntimeConfig; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthSchemeProvider.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 00000000..bf3aad6a --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,75 @@ +import { AwsSdkSigV4AuthInputConfig, AwsSdkSigV4AuthResolvedConfig, AwsSdkSigV4PreviouslyResolved } from "@aws-sdk/core"; +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, Provider } from "@smithy/types"; +import { SSOClientResolvedConfig } from "../SSOClient"; +/** + * @internal + */ +export interface SSOHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +/** + * @internal + */ +export interface SSOHttpAuthSchemeParametersProvider extends HttpAuthSchemeParametersProvider { +} +/** + * @internal + */ +export declare const defaultSSOHttpAuthSchemeParametersProvider: (config: SSOClientResolvedConfig, context: HandlerExecutionContext, input: object) => Promise; +/** + * @internal + */ +export interface SSOHttpAuthSchemeProvider extends HttpAuthSchemeProvider { +} +/** + * @internal + */ +export declare const defaultSSOHttpAuthSchemeProvider: SSOHttpAuthSchemeProvider; +/** + * @internal + */ +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + authSchemePreference?: string[] | Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + httpAuthSchemes?: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + httpAuthSchemeProvider?: SSOHttpAuthSchemeProvider; +} +/** + * @internal + */ +export interface HttpAuthSchemeResolvedConfig extends AwsSdkSigV4AuthResolvedConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + readonly authSchemePreference: Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + readonly httpAuthSchemes: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + readonly httpAuthSchemeProvider: SSOHttpAuthSchemeProvider; +} +/** + * @internal + */ +export declare const resolveHttpAuthSchemeConfig: (config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved) => T & HttpAuthSchemeResolvedConfig; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/commands/GetRoleCredentialsCommand.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/commands/GetRoleCredentialsCommand.d.ts new file mode 100644 index 00000000..f306bd51 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/commands/GetRoleCredentialsCommand.d.ts @@ -0,0 +1,95 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetRoleCredentialsRequest, GetRoleCredentialsResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig } from "../SSOClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetRoleCredentialsCommand}. + */ +export interface GetRoleCredentialsCommandInput extends GetRoleCredentialsRequest { +} +/** + * @public + * + * The output of {@link GetRoleCredentialsCommand}. + */ +export interface GetRoleCredentialsCommandOutput extends GetRoleCredentialsResponse, __MetadataBearer { +} +declare const GetRoleCredentialsCommand_base: { + new (input: GetRoleCredentialsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetRoleCredentialsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns the STS short-term credentials for a given role name that is assigned to the + * user.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOClient, GetRoleCredentialsCommand } from "@aws-sdk/client-sso"; // ES Modules import + * // const { SSOClient, GetRoleCredentialsCommand } = require("@aws-sdk/client-sso"); // CommonJS import + * const client = new SSOClient(config); + * const input = { // GetRoleCredentialsRequest + * roleName: "STRING_VALUE", // required + * accountId: "STRING_VALUE", // required + * accessToken: "STRING_VALUE", // required + * }; + * const command = new GetRoleCredentialsCommand(input); + * const response = await client.send(command); + * // { // GetRoleCredentialsResponse + * // roleCredentials: { // RoleCredentials + * // accessKeyId: "STRING_VALUE", + * // secretAccessKey: "STRING_VALUE", + * // sessionToken: "STRING_VALUE", + * // expiration: Number("long"), + * // }, + * // }; + * + * ``` + * + * @param GetRoleCredentialsCommandInput - {@link GetRoleCredentialsCommandInput} + * @returns {@link GetRoleCredentialsCommandOutput} + * @see {@link GetRoleCredentialsCommandInput} for command's `input` shape. + * @see {@link GetRoleCredentialsCommandOutput} for command's `response` shape. + * @see {@link SSOClientResolvedConfig | config} for SSOClient's `config` shape. + * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The specified resource doesn't exist.

+ * + * @throws {@link TooManyRequestsException} (client fault) + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * + * @throws {@link UnauthorizedException} (client fault) + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * + * @throws {@link SSOServiceException} + *

Base exception class for all service exceptions from SSO service.

+ * + * + * @public + */ +export declare class GetRoleCredentialsCommand extends GetRoleCredentialsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetRoleCredentialsRequest; + output: GetRoleCredentialsResponse; + }; + sdk: { + input: GetRoleCredentialsCommandInput; + output: GetRoleCredentialsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountRolesCommand.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountRolesCommand.d.ts new file mode 100644 index 00000000..8ce6a044 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountRolesCommand.d.ts @@ -0,0 +1,96 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListAccountRolesRequest, ListAccountRolesResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig } from "../SSOClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListAccountRolesCommand}. + */ +export interface ListAccountRolesCommandInput extends ListAccountRolesRequest { +} +/** + * @public + * + * The output of {@link ListAccountRolesCommand}. + */ +export interface ListAccountRolesCommandOutput extends ListAccountRolesResponse, __MetadataBearer { +} +declare const ListAccountRolesCommand_base: { + new (input: ListAccountRolesCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListAccountRolesCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Lists all roles that are assigned to the user for a given AWS account.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOClient, ListAccountRolesCommand } from "@aws-sdk/client-sso"; // ES Modules import + * // const { SSOClient, ListAccountRolesCommand } = require("@aws-sdk/client-sso"); // CommonJS import + * const client = new SSOClient(config); + * const input = { // ListAccountRolesRequest + * nextToken: "STRING_VALUE", + * maxResults: Number("int"), + * accessToken: "STRING_VALUE", // required + * accountId: "STRING_VALUE", // required + * }; + * const command = new ListAccountRolesCommand(input); + * const response = await client.send(command); + * // { // ListAccountRolesResponse + * // nextToken: "STRING_VALUE", + * // roleList: [ // RoleListType + * // { // RoleInfo + * // roleName: "STRING_VALUE", + * // accountId: "STRING_VALUE", + * // }, + * // ], + * // }; + * + * ``` + * + * @param ListAccountRolesCommandInput - {@link ListAccountRolesCommandInput} + * @returns {@link ListAccountRolesCommandOutput} + * @see {@link ListAccountRolesCommandInput} for command's `input` shape. + * @see {@link ListAccountRolesCommandOutput} for command's `response` shape. + * @see {@link SSOClientResolvedConfig | config} for SSOClient's `config` shape. + * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The specified resource doesn't exist.

+ * + * @throws {@link TooManyRequestsException} (client fault) + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * + * @throws {@link UnauthorizedException} (client fault) + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * + * @throws {@link SSOServiceException} + *

Base exception class for all service exceptions from SSO service.

+ * + * + * @public + */ +export declare class ListAccountRolesCommand extends ListAccountRolesCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListAccountRolesRequest; + output: ListAccountRolesResponse; + }; + sdk: { + input: ListAccountRolesCommandInput; + output: ListAccountRolesCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountsCommand.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountsCommand.d.ts new file mode 100644 index 00000000..cffc47e6 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountsCommand.d.ts @@ -0,0 +1,98 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListAccountsRequest, ListAccountsResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig } from "../SSOClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListAccountsCommand}. + */ +export interface ListAccountsCommandInput extends ListAccountsRequest { +} +/** + * @public + * + * The output of {@link ListAccountsCommand}. + */ +export interface ListAccountsCommandOutput extends ListAccountsResponse, __MetadataBearer { +} +declare const ListAccountsCommand_base: { + new (input: ListAccountsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListAccountsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Lists all AWS accounts assigned to the user. These AWS accounts are assigned by the + * administrator of the account. For more information, see Assign User Access in the IAM Identity Center User Guide. This operation + * returns a paginated response.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOClient, ListAccountsCommand } from "@aws-sdk/client-sso"; // ES Modules import + * // const { SSOClient, ListAccountsCommand } = require("@aws-sdk/client-sso"); // CommonJS import + * const client = new SSOClient(config); + * const input = { // ListAccountsRequest + * nextToken: "STRING_VALUE", + * maxResults: Number("int"), + * accessToken: "STRING_VALUE", // required + * }; + * const command = new ListAccountsCommand(input); + * const response = await client.send(command); + * // { // ListAccountsResponse + * // nextToken: "STRING_VALUE", + * // accountList: [ // AccountListType + * // { // AccountInfo + * // accountId: "STRING_VALUE", + * // accountName: "STRING_VALUE", + * // emailAddress: "STRING_VALUE", + * // }, + * // ], + * // }; + * + * ``` + * + * @param ListAccountsCommandInput - {@link ListAccountsCommandInput} + * @returns {@link ListAccountsCommandOutput} + * @see {@link ListAccountsCommandInput} for command's `input` shape. + * @see {@link ListAccountsCommandOutput} for command's `response` shape. + * @see {@link SSOClientResolvedConfig | config} for SSOClient's `config` shape. + * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The specified resource doesn't exist.

+ * + * @throws {@link TooManyRequestsException} (client fault) + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * + * @throws {@link UnauthorizedException} (client fault) + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * + * @throws {@link SSOServiceException} + *

Base exception class for all service exceptions from SSO service.

+ * + * + * @public + */ +export declare class ListAccountsCommand extends ListAccountsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListAccountsRequest; + output: ListAccountsResponse; + }; + sdk: { + input: ListAccountsCommandInput; + output: ListAccountsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/commands/LogoutCommand.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/commands/LogoutCommand.d.ts new file mode 100644 index 00000000..e85fe339 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/commands/LogoutCommand.d.ts @@ -0,0 +1,95 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { LogoutRequest } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig } from "../SSOClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link LogoutCommand}. + */ +export interface LogoutCommandInput extends LogoutRequest { +} +/** + * @public + * + * The output of {@link LogoutCommand}. + */ +export interface LogoutCommandOutput extends __MetadataBearer { +} +declare const LogoutCommand_base: { + new (input: LogoutCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: LogoutCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Removes the locally stored SSO tokens from the client-side cache and sends an API call to + * the IAM Identity Center service to invalidate the corresponding server-side IAM Identity Center sign in + * session.

+ * + *

If a user uses IAM Identity Center to access the AWS CLI, the user’s IAM Identity Center sign in session is + * used to obtain an IAM session, as specified in the corresponding IAM Identity Center permission set. + * More specifically, IAM Identity Center assumes an IAM role in the target account on behalf of the user, + * and the corresponding temporary AWS credentials are returned to the client.

+ *

After user logout, any existing IAM role sessions that were created by using IAM Identity Center + * permission sets continue based on the duration configured in the permission set. + * For more information, see User + * authentications in the IAM Identity Center User + * Guide.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOClient, LogoutCommand } from "@aws-sdk/client-sso"; // ES Modules import + * // const { SSOClient, LogoutCommand } = require("@aws-sdk/client-sso"); // CommonJS import + * const client = new SSOClient(config); + * const input = { // LogoutRequest + * accessToken: "STRING_VALUE", // required + * }; + * const command = new LogoutCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param LogoutCommandInput - {@link LogoutCommandInput} + * @returns {@link LogoutCommandOutput} + * @see {@link LogoutCommandInput} for command's `input` shape. + * @see {@link LogoutCommandOutput} for command's `response` shape. + * @see {@link SSOClientResolvedConfig | config} for SSOClient's `config` shape. + * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link TooManyRequestsException} (client fault) + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * + * @throws {@link UnauthorizedException} (client fault) + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * + * @throws {@link SSOServiceException} + *

Base exception class for all service exceptions from SSO service.

+ * + * + * @public + */ +export declare class LogoutCommand extends LogoutCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: LogoutRequest; + output: {}; + }; + sdk: { + input: LogoutCommandInput; + output: LogoutCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/commands/index.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/commands/index.d.ts new file mode 100644 index 00000000..0ab890d3 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/commands/index.d.ts @@ -0,0 +1,4 @@ +export * from "./GetRoleCredentialsCommand"; +export * from "./ListAccountRolesCommand"; +export * from "./ListAccountsCommand"; +export * from "./LogoutCommand"; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/endpoint/EndpointParameters.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/endpoint/EndpointParameters.d.ts new file mode 100644 index 00000000..23f42e36 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/endpoint/EndpointParameters.d.ts @@ -0,0 +1,40 @@ +import { Endpoint, EndpointParameters as __EndpointParameters, EndpointV2, Provider } from "@smithy/types"; +/** + * @public + */ +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: string | Provider | Endpoint | Provider | EndpointV2 | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: (options: T & ClientInputEndpointParameters) => T & ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; +} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/endpoint/endpointResolver.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/endpoint/endpointResolver.d.ts new file mode 100644 index 00000000..70a8eaec --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/endpoint/endpointResolver.d.ts @@ -0,0 +1,5 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: (endpointParams: EndpointParameters, context?: { + logger?: Logger; +}) => EndpointV2; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/endpoint/ruleset.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/endpoint/ruleset.d.ts new file mode 100644 index 00000000..4b238994 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/extensionConfiguration.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/extensionConfiguration.d.ts new file mode 100644 index 00000000..0f76dd37 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +/** + * @internal + */ +export interface SSOExtensionConfiguration extends HttpHandlerExtensionConfiguration, DefaultExtensionConfiguration, AwsRegionExtensionConfiguration, HttpAuthExtensionConfiguration { +} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/index.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/index.d.ts new file mode 100644 index 00000000..3b3bceaa --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/index.d.ts @@ -0,0 +1,29 @@ +/** + *

AWS IAM Identity Center (successor to AWS Single Sign-On) Portal is a web service that makes it easy for you to assign user access to + * IAM Identity Center resources such as the AWS access portal. Users can get AWS account applications and roles + * assigned to them and get federated into the application.

+ * + *

Although AWS Single Sign-On was renamed, the sso and + * identitystore API namespaces will continue to retain their original name for + * backward compatibility purposes. For more information, see IAM Identity Center rename.

+ *
+ *

This reference guide describes the IAM Identity Center Portal operations that you can call + * programatically and includes detailed information on data types and errors.

+ * + *

AWS provides SDKs that consist of libraries and sample code for various programming + * languages and platforms, such as Java, Ruby, .Net, iOS, or Android. The SDKs provide a + * convenient way to create programmatic access to IAM Identity Center and other AWS services. For more + * information about the AWS SDKs, including how to download and install them, see Tools for Amazon Web Services.

+ *
+ * + * @packageDocumentation + */ +export * from "./SSOClient"; +export * from "./SSO"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export type { RuntimeExtension } from "./runtimeExtensions"; +export type { SSOExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./pagination"; +export * from "./models"; +export { SSOServiceException } from "./models/SSOServiceException"; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/models/SSOServiceException.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/models/SSOServiceException.d.ts new file mode 100644 index 00000000..9172f1a5 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/models/SSOServiceException.d.ts @@ -0,0 +1,14 @@ +import { ServiceException as __ServiceException, ServiceExceptionOptions as __ServiceExceptionOptions } from "@smithy/smithy-client"; +export type { __ServiceExceptionOptions }; +export { __ServiceException }; +/** + * @public + * + * Base exception class for all service exceptions from SSO service. + */ +export declare class SSOServiceException extends __ServiceException { + /** + * @internal + */ + constructor(options: __ServiceExceptionOptions); +} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/models/index.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/models/index.d.ts new file mode 100644 index 00000000..09c5d6e0 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/models/models_0.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/models/models_0.d.ts new file mode 100644 index 00000000..0d40fa7f --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/models/models_0.d.ts @@ -0,0 +1,266 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { SSOServiceException as __BaseException } from "./SSOServiceException"; +/** + *

Provides information about your AWS account.

+ * @public + */ +export interface AccountInfo { + /** + *

The identifier of the AWS account that is assigned to the user.

+ * @public + */ + accountId?: string | undefined; + /** + *

The display name of the AWS account that is assigned to the user.

+ * @public + */ + accountName?: string | undefined; + /** + *

The email address of the AWS account that is assigned to the user.

+ * @public + */ + emailAddress?: string | undefined; +} +/** + * @public + */ +export interface GetRoleCredentialsRequest { + /** + *

The friendly name of the role that is assigned to the user.

+ * @public + */ + roleName: string | undefined; + /** + *

The identifier for the AWS account that is assigned to the user.

+ * @public + */ + accountId: string | undefined; + /** + *

The token issued by the CreateToken API call. For more information, see + * CreateToken in the IAM Identity Center OIDC API Reference Guide.

+ * @public + */ + accessToken: string | undefined; +} +/** + *

Provides information about the role credentials that are assigned to the user.

+ * @public + */ +export interface RoleCredentials { + /** + *

The identifier used for the temporary security credentials. For more information, see + * Using Temporary Security Credentials to Request Access to AWS Resources in the + * AWS IAM User Guide.

+ * @public + */ + accessKeyId?: string | undefined; + /** + *

The key that is used to sign the request. For more information, see Using Temporary Security Credentials to Request Access to AWS Resources in the + * AWS IAM User Guide.

+ * @public + */ + secretAccessKey?: string | undefined; + /** + *

The token used for temporary credentials. For more information, see Using Temporary Security Credentials to Request Access to AWS Resources in the + * AWS IAM User Guide.

+ * @public + */ + sessionToken?: string | undefined; + /** + *

The date on which temporary security credentials expire.

+ * @public + */ + expiration?: number | undefined; +} +/** + * @public + */ +export interface GetRoleCredentialsResponse { + /** + *

The credentials for the role that is assigned to the user.

+ * @public + */ + roleCredentials?: RoleCredentials | undefined; +} +/** + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * @public + */ +export declare class InvalidRequestException extends __BaseException { + readonly name: "InvalidRequestException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The specified resource doesn't exist.

+ * @public + */ +export declare class ResourceNotFoundException extends __BaseException { + readonly name: "ResourceNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * @public + */ +export declare class TooManyRequestsException extends __BaseException { + readonly name: "TooManyRequestsException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * @public + */ +export declare class UnauthorizedException extends __BaseException { + readonly name: "UnauthorizedException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface ListAccountRolesRequest { + /** + *

The page token from the previous response output when you request subsequent pages.

+ * @public + */ + nextToken?: string | undefined; + /** + *

The number of items that clients can request per page.

+ * @public + */ + maxResults?: number | undefined; + /** + *

The token issued by the CreateToken API call. For more information, see + * CreateToken in the IAM Identity Center OIDC API Reference Guide.

+ * @public + */ + accessToken: string | undefined; + /** + *

The identifier for the AWS account that is assigned to the user.

+ * @public + */ + accountId: string | undefined; +} +/** + *

Provides information about the role that is assigned to the user.

+ * @public + */ +export interface RoleInfo { + /** + *

The friendly name of the role that is assigned to the user.

+ * @public + */ + roleName?: string | undefined; + /** + *

The identifier of the AWS account assigned to the user.

+ * @public + */ + accountId?: string | undefined; +} +/** + * @public + */ +export interface ListAccountRolesResponse { + /** + *

The page token client that is used to retrieve the list of accounts.

+ * @public + */ + nextToken?: string | undefined; + /** + *

A paginated response with the list of roles and the next token if more results are + * available.

+ * @public + */ + roleList?: RoleInfo[] | undefined; +} +/** + * @public + */ +export interface ListAccountsRequest { + /** + *

(Optional) When requesting subsequent pages, this is the page token from the previous + * response output.

+ * @public + */ + nextToken?: string | undefined; + /** + *

This is the number of items clients can request per page.

+ * @public + */ + maxResults?: number | undefined; + /** + *

The token issued by the CreateToken API call. For more information, see + * CreateToken in the IAM Identity Center OIDC API Reference Guide.

+ * @public + */ + accessToken: string | undefined; +} +/** + * @public + */ +export interface ListAccountsResponse { + /** + *

The page token client that is used to retrieve the list of accounts.

+ * @public + */ + nextToken?: string | undefined; + /** + *

A paginated response with the list of account information and the next token if more + * results are available.

+ * @public + */ + accountList?: AccountInfo[] | undefined; +} +/** + * @public + */ +export interface LogoutRequest { + /** + *

The token issued by the CreateToken API call. For more information, see + * CreateToken in the IAM Identity Center OIDC API Reference Guide.

+ * @public + */ + accessToken: string | undefined; +} +/** + * @internal + */ +export declare const GetRoleCredentialsRequestFilterSensitiveLog: (obj: GetRoleCredentialsRequest) => any; +/** + * @internal + */ +export declare const RoleCredentialsFilterSensitiveLog: (obj: RoleCredentials) => any; +/** + * @internal + */ +export declare const GetRoleCredentialsResponseFilterSensitiveLog: (obj: GetRoleCredentialsResponse) => any; +/** + * @internal + */ +export declare const ListAccountRolesRequestFilterSensitiveLog: (obj: ListAccountRolesRequest) => any; +/** + * @internal + */ +export declare const ListAccountsRequestFilterSensitiveLog: (obj: ListAccountsRequest) => any; +/** + * @internal + */ +export declare const LogoutRequestFilterSensitiveLog: (obj: LogoutRequest) => any; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/pagination/Interfaces.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/pagination/Interfaces.d.ts new file mode 100644 index 00000000..81addcaa --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/pagination/Interfaces.d.ts @@ -0,0 +1,8 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { SSOClient } from "../SSOClient"; +/** + * @public + */ +export interface SSOPaginationConfiguration extends PaginationConfiguration { + client: SSOClient; +} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountRolesPaginator.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountRolesPaginator.d.ts new file mode 100644 index 00000000..fa309d48 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountRolesPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListAccountRolesCommandInput, ListAccountRolesCommandOutput } from "../commands/ListAccountRolesCommand"; +import { SSOPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListAccountRoles: (config: SSOPaginationConfiguration, input: ListAccountRolesCommandInput, ...rest: any[]) => Paginator; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountsPaginator.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountsPaginator.d.ts new file mode 100644 index 00000000..21c25592 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountsPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListAccountsCommandInput, ListAccountsCommandOutput } from "../commands/ListAccountsCommand"; +import { SSOPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListAccounts: (config: SSOPaginationConfiguration, input: ListAccountsCommandInput, ...rest: any[]) => Paginator; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/pagination/index.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/pagination/index.d.ts new file mode 100644 index 00000000..1e7866f7 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/pagination/index.d.ts @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./ListAccountRolesPaginator"; +export * from "./ListAccountsPaginator"; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/protocols/Aws_restJson1.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/protocols/Aws_restJson1.d.ts new file mode 100644 index 00000000..02d97aa5 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/protocols/Aws_restJson1.d.ts @@ -0,0 +1,38 @@ +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { GetRoleCredentialsCommandInput, GetRoleCredentialsCommandOutput } from "../commands/GetRoleCredentialsCommand"; +import { ListAccountRolesCommandInput, ListAccountRolesCommandOutput } from "../commands/ListAccountRolesCommand"; +import { ListAccountsCommandInput, ListAccountsCommandOutput } from "../commands/ListAccountsCommand"; +import { LogoutCommandInput, LogoutCommandOutput } from "../commands/LogoutCommand"; +/** + * serializeAws_restJson1GetRoleCredentialsCommand + */ +export declare const se_GetRoleCredentialsCommand: (input: GetRoleCredentialsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restJson1ListAccountRolesCommand + */ +export declare const se_ListAccountRolesCommand: (input: ListAccountRolesCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restJson1ListAccountsCommand + */ +export declare const se_ListAccountsCommand: (input: ListAccountsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restJson1LogoutCommand + */ +export declare const se_LogoutCommand: (input: LogoutCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * deserializeAws_restJson1GetRoleCredentialsCommand + */ +export declare const de_GetRoleCredentialsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restJson1ListAccountRolesCommand + */ +export declare const de_ListAccountRolesCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restJson1ListAccountsCommand + */ +export declare const de_ListAccountsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restJson1LogoutCommand + */ +export declare const de_LogoutCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.browser.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.browser.d.ts new file mode 100644 index 00000000..c593515e --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.browser.d.ts @@ -0,0 +1,57 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { SSOClientConfig } from "./SSOClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: import("@smithy/protocol-http").HttpHandler | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.d.ts new file mode 100644 index 00000000..4194fd51 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.d.ts @@ -0,0 +1,57 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { SSOClientConfig } from "./SSOClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-node").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: RequestHandler | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.native.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.native.d.ts new file mode 100644 index 00000000..38c1d336 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.native.d.ts @@ -0,0 +1,56 @@ +import { SSOClientConfig } from "./SSOClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: import("@smithy/types").NodeHttpHandlerOptions | import("@smithy/types").FetchHttpHandlerOptions | Record | import("@smithy/protocol-http").HttpHandler | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: import("@smithy/smithy-client").DefaultsMode | import("@smithy/types").Provider; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.shared.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.shared.d.ts new file mode 100644 index 00000000..20ab682e --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.shared.d.ts @@ -0,0 +1,32 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { SSOClientConfig } from "./SSOClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: NoAuthSigner; + })[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/runtimeExtensions.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/runtimeExtensions.d.ts new file mode 100644 index 00000000..a0f078c8 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/runtimeExtensions.d.ts @@ -0,0 +1,17 @@ +import { SSOExtensionConfiguration } from "./extensionConfiguration"; +/** + * @public + */ +export interface RuntimeExtension { + configure(extensionConfiguration: SSOExtensionConfiguration): void; +} +/** + * @public + */ +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +/** + * @internal + */ +export declare const resolveRuntimeExtensions: (runtimeConfig: any, extensions: RuntimeExtension[]) => any; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSO.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSO.d.ts new file mode 100644 index 00000000..9a242fcc --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSO.d.ts @@ -0,0 +1,73 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, +} from "./commands/GetRoleCredentialsCommand"; +import { + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, +} from "./commands/ListAccountRolesCommand"; +import { + ListAccountsCommandInput, + ListAccountsCommandOutput, +} from "./commands/ListAccountsCommand"; +import { + LogoutCommandInput, + LogoutCommandOutput, +} from "./commands/LogoutCommand"; +import { SSOClient } from "./SSOClient"; +export interface SSO { + getRoleCredentials( + args: GetRoleCredentialsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getRoleCredentials( + args: GetRoleCredentialsCommandInput, + cb: (err: any, data?: GetRoleCredentialsCommandOutput) => void + ): void; + getRoleCredentials( + args: GetRoleCredentialsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetRoleCredentialsCommandOutput) => void + ): void; + listAccountRoles( + args: ListAccountRolesCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listAccountRoles( + args: ListAccountRolesCommandInput, + cb: (err: any, data?: ListAccountRolesCommandOutput) => void + ): void; + listAccountRoles( + args: ListAccountRolesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListAccountRolesCommandOutput) => void + ): void; + listAccounts( + args: ListAccountsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listAccounts( + args: ListAccountsCommandInput, + cb: (err: any, data?: ListAccountsCommandOutput) => void + ): void; + listAccounts( + args: ListAccountsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListAccountsCommandOutput) => void + ): void; + logout( + args: LogoutCommandInput, + options?: __HttpHandlerOptions + ): Promise; + logout( + args: LogoutCommandInput, + cb: (err: any, data?: LogoutCommandOutput) => void + ): void; + logout( + args: LogoutCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: LogoutCommandOutput) => void + ): void; +} +export declare class SSO extends SSOClient implements SSO {} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSOClient.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSOClient.d.ts new file mode 100644 index 00000000..efd5a5f9 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSOClient.d.ts @@ -0,0 +1,138 @@ +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, +} from "@aws-sdk/middleware-host-header"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, +} from "@aws-sdk/middleware-user-agent"; +import { + RegionInputConfig, + RegionResolvedConfig, +} from "@smithy/config-resolver"; +import { + EndpointInputConfig, + EndpointResolvedConfig, +} from "@smithy/middleware-endpoint"; +import { + RetryInputConfig, + RetryResolvedConfig, +} from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { + Client as __Client, + DefaultsMode as __DefaultsMode, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@smithy/smithy-client"; +import { + BodyLengthCalculator as __BodyLengthCalculator, + CheckOptionalClientConfig as __CheckOptionalClientConfig, + ChecksumConstructor as __ChecksumConstructor, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, + UserAgent as __UserAgent, +} from "@smithy/types"; +import { + HttpAuthSchemeInputConfig, + HttpAuthSchemeResolvedConfig, +} from "./auth/httpAuthSchemeProvider"; +import { + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, +} from "./commands/GetRoleCredentialsCommand"; +import { + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, +} from "./commands/ListAccountRolesCommand"; +import { + ListAccountsCommandInput, + ListAccountsCommandOutput, +} from "./commands/ListAccountsCommand"; +import { + LogoutCommandInput, + LogoutCommandOutput, +} from "./commands/LogoutCommand"; +import { + ClientInputEndpointParameters, + ClientResolvedEndpointParameters, + EndpointParameters, +} from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +export type ServiceInputTypes = + | GetRoleCredentialsCommandInput + | ListAccountRolesCommandInput + | ListAccountsCommandInput + | LogoutCommandInput; +export type ServiceOutputTypes = + | GetRoleCredentialsCommandOutput + | ListAccountRolesCommandOutput + | ListAccountsCommandOutput + | LogoutCommandOutput; +export interface ClientDefaults + extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + requestHandler?: __HttpHandlerUserInput; + sha256?: __ChecksumConstructor | __HashConstructor; + urlParser?: __UrlParser; + bodyLengthChecker?: __BodyLengthCalculator; + streamCollector?: __StreamCollector; + base64Decoder?: __Decoder; + base64Encoder?: __Encoder; + utf8Decoder?: __Decoder; + utf8Encoder?: __Encoder; + runtime?: string; + disableHostPrefix?: boolean; + serviceId?: string; + useDualstackEndpoint?: boolean | __Provider; + useFipsEndpoint?: boolean | __Provider; + region?: string | __Provider; + profile?: string; + defaultUserAgentProvider?: Provider<__UserAgent>; + maxAttempts?: number | __Provider; + retryMode?: string | __Provider; + logger?: __Logger; + extensions?: RuntimeExtension[]; + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +export type SSOClientConfigType = Partial< + __SmithyConfiguration<__HttpHandlerOptions> +> & + ClientDefaults & + UserAgentInputConfig & + RetryInputConfig & + RegionInputConfig & + HostHeaderInputConfig & + EndpointInputConfig & + HttpAuthSchemeInputConfig & + ClientInputEndpointParameters; +export interface SSOClientConfig extends SSOClientConfigType {} +export type SSOClientResolvedConfigType = + __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RuntimeExtensionsConfig & + UserAgentResolvedConfig & + RetryResolvedConfig & + RegionResolvedConfig & + HostHeaderResolvedConfig & + EndpointResolvedConfig & + HttpAuthSchemeResolvedConfig & + ClientResolvedEndpointParameters; +export interface SSOClientResolvedConfig extends SSOClientResolvedConfigType {} +export declare class SSOClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig +> { + readonly config: SSOClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + destroy(): void; +} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 00000000..29f38b32 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,32 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + HttpAuthScheme, +} from "@smithy/types"; +import { SSOHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider( + httpAuthSchemeProvider: SSOHttpAuthSchemeProvider + ): void; + httpAuthSchemeProvider(): SSOHttpAuthSchemeProvider; + setCredentials( + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider + ): void; + credentials(): + | AwsCredentialIdentity + | AwsCredentialIdentityProvider + | undefined; +} +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: SSOHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +export declare const getHttpAuthExtensionConfiguration: ( + runtimeConfig: HttpAuthRuntimeConfig +) => HttpAuthExtensionConfiguration; +export declare const resolveHttpAuthRuntimeConfig: ( + config: HttpAuthExtensionConfiguration +) => HttpAuthRuntimeConfig; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 00000000..864f755c --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,46 @@ +import { + AwsSdkSigV4AuthInputConfig, + AwsSdkSigV4AuthResolvedConfig, + AwsSdkSigV4PreviouslyResolved, +} from "@aws-sdk/core"; +import { + HandlerExecutionContext, + HttpAuthScheme, + HttpAuthSchemeParameters, + HttpAuthSchemeParametersProvider, + HttpAuthSchemeProvider, + Provider, +} from "@smithy/types"; +import { SSOClientResolvedConfig } from "../SSOClient"; +export interface SSOHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +export interface SSOHttpAuthSchemeParametersProvider + extends HttpAuthSchemeParametersProvider< + SSOClientResolvedConfig, + HandlerExecutionContext, + SSOHttpAuthSchemeParameters, + object + > {} +export declare const defaultSSOHttpAuthSchemeParametersProvider: ( + config: SSOClientResolvedConfig, + context: HandlerExecutionContext, + input: object +) => Promise; +export interface SSOHttpAuthSchemeProvider + extends HttpAuthSchemeProvider {} +export declare const defaultSSOHttpAuthSchemeProvider: SSOHttpAuthSchemeProvider; +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + authSchemePreference?: string[] | Provider; + httpAuthSchemes?: HttpAuthScheme[]; + httpAuthSchemeProvider?: SSOHttpAuthSchemeProvider; +} +export interface HttpAuthSchemeResolvedConfig + extends AwsSdkSigV4AuthResolvedConfig { + readonly authSchemePreference: Provider; + readonly httpAuthSchemes: HttpAuthScheme[]; + readonly httpAuthSchemeProvider: SSOHttpAuthSchemeProvider; +} +export declare const resolveHttpAuthSchemeConfig: ( + config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & HttpAuthSchemeResolvedConfig; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/GetRoleCredentialsCommand.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/GetRoleCredentialsCommand.d.ts new file mode 100644 index 00000000..7c1b3580 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/GetRoleCredentialsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetRoleCredentialsRequest, + GetRoleCredentialsResponse, +} from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig, +} from "../SSOClient"; +export { __MetadataBearer }; +export { $Command }; +export interface GetRoleCredentialsCommandInput + extends GetRoleCredentialsRequest {} +export interface GetRoleCredentialsCommandOutput + extends GetRoleCredentialsResponse, + __MetadataBearer {} +declare const GetRoleCredentialsCommand_base: { + new ( + input: GetRoleCredentialsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetRoleCredentialsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetRoleCredentialsCommand extends GetRoleCredentialsCommand_base { + protected static __types: { + api: { + input: GetRoleCredentialsRequest; + output: GetRoleCredentialsResponse; + }; + sdk: { + input: GetRoleCredentialsCommandInput; + output: GetRoleCredentialsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountRolesCommand.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountRolesCommand.d.ts new file mode 100644 index 00000000..3b898a95 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountRolesCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + ListAccountRolesRequest, + ListAccountRolesResponse, +} from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig, +} from "../SSOClient"; +export { __MetadataBearer }; +export { $Command }; +export interface ListAccountRolesCommandInput extends ListAccountRolesRequest {} +export interface ListAccountRolesCommandOutput + extends ListAccountRolesResponse, + __MetadataBearer {} +declare const ListAccountRolesCommand_base: { + new ( + input: ListAccountRolesCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListAccountRolesCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListAccountRolesCommand extends ListAccountRolesCommand_base { + protected static __types: { + api: { + input: ListAccountRolesRequest; + output: ListAccountRolesResponse; + }; + sdk: { + input: ListAccountRolesCommandInput; + output: ListAccountRolesCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountsCommand.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountsCommand.d.ts new file mode 100644 index 00000000..3a00cc11 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListAccountsRequest, ListAccountsResponse } from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig, +} from "../SSOClient"; +export { __MetadataBearer }; +export { $Command }; +export interface ListAccountsCommandInput extends ListAccountsRequest {} +export interface ListAccountsCommandOutput + extends ListAccountsResponse, + __MetadataBearer {} +declare const ListAccountsCommand_base: { + new ( + input: ListAccountsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListAccountsCommandInput, + ListAccountsCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListAccountsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListAccountsCommandInput, + ListAccountsCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListAccountsCommand extends ListAccountsCommand_base { + protected static __types: { + api: { + input: ListAccountsRequest; + output: ListAccountsResponse; + }; + sdk: { + input: ListAccountsCommandInput; + output: ListAccountsCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/LogoutCommand.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/LogoutCommand.d.ts new file mode 100644 index 00000000..25992506 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/LogoutCommand.d.ts @@ -0,0 +1,41 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { LogoutRequest } from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig, +} from "../SSOClient"; +export { __MetadataBearer }; +export { $Command }; +export interface LogoutCommandInput extends LogoutRequest {} +export interface LogoutCommandOutput extends __MetadataBearer {} +declare const LogoutCommand_base: { + new (input: LogoutCommandInput): import("@smithy/smithy-client").CommandImpl< + LogoutCommandInput, + LogoutCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new (__0_0: LogoutCommandInput): import("@smithy/smithy-client").CommandImpl< + LogoutCommandInput, + LogoutCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class LogoutCommand extends LogoutCommand_base { + protected static __types: { + api: { + input: LogoutRequest; + output: {}; + }; + sdk: { + input: LogoutCommandInput; + output: LogoutCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/index.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/index.d.ts new file mode 100644 index 00000000..0ab890d3 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/index.d.ts @@ -0,0 +1,4 @@ +export * from "./GetRoleCredentialsCommand"; +export * from "./ListAccountRolesCommand"; +export * from "./ListAccountsCommand"; +export * from "./LogoutCommand"; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/EndpointParameters.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/EndpointParameters.d.ts new file mode 100644 index 00000000..7f245406 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/EndpointParameters.d.ts @@ -0,0 +1,51 @@ +import { + Endpoint, + EndpointParameters as __EndpointParameters, + EndpointV2, + Provider, +} from "@smithy/types"; +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: + | string + | Provider + | Endpoint + | Provider + | EndpointV2 + | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: ( + options: T & ClientInputEndpointParameters +) => T & + ClientInputEndpointParameters & { + defaultSigningName: string; + }; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; +} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/endpointResolver.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/endpointResolver.d.ts new file mode 100644 index 00000000..59099254 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/endpointResolver.d.ts @@ -0,0 +1,8 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: ( + endpointParams: EndpointParameters, + context?: { + logger?: Logger; + } +) => EndpointV2; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/ruleset.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/ruleset.d.ts new file mode 100644 index 00000000..4b238994 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/extensionConfiguration.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/extensionConfiguration.d.ts new file mode 100644 index 00000000..c1b43ff4 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +export interface SSOExtensionConfiguration + extends HttpHandlerExtensionConfiguration, + DefaultExtensionConfiguration, + AwsRegionExtensionConfiguration, + HttpAuthExtensionConfiguration {} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..891aed3a --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/index.d.ts @@ -0,0 +1,9 @@ +export * from "./SSOClient"; +export * from "./SSO"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export { RuntimeExtension } from "./runtimeExtensions"; +export { SSOExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./pagination"; +export * from "./models"; +export { SSOServiceException } from "./models/SSOServiceException"; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/SSOServiceException.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/SSOServiceException.d.ts new file mode 100644 index 00000000..1ad045dc --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/SSOServiceException.d.ts @@ -0,0 +1,9 @@ +import { + ServiceException as __ServiceException, + ServiceExceptionOptions as __ServiceExceptionOptions, +} from "@smithy/smithy-client"; +export { __ServiceExceptionOptions }; +export { __ServiceException }; +export declare class SSOServiceException extends __ServiceException { + constructor(options: __ServiceExceptionOptions); +} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/index.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/index.d.ts new file mode 100644 index 00000000..09c5d6e0 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/models_0.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/models_0.d.ts new file mode 100644 index 00000000..4bbe08cd --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/models_0.d.ts @@ -0,0 +1,93 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { SSOServiceException as __BaseException } from "./SSOServiceException"; +export interface AccountInfo { + accountId?: string | undefined; + accountName?: string | undefined; + emailAddress?: string | undefined; +} +export interface GetRoleCredentialsRequest { + roleName: string | undefined; + accountId: string | undefined; + accessToken: string | undefined; +} +export interface RoleCredentials { + accessKeyId?: string | undefined; + secretAccessKey?: string | undefined; + sessionToken?: string | undefined; + expiration?: number | undefined; +} +export interface GetRoleCredentialsResponse { + roleCredentials?: RoleCredentials | undefined; +} +export declare class InvalidRequestException extends __BaseException { + readonly name: "InvalidRequestException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class ResourceNotFoundException extends __BaseException { + readonly name: "ResourceNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class TooManyRequestsException extends __BaseException { + readonly name: "TooManyRequestsException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class UnauthorizedException extends __BaseException { + readonly name: "UnauthorizedException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface ListAccountRolesRequest { + nextToken?: string | undefined; + maxResults?: number | undefined; + accessToken: string | undefined; + accountId: string | undefined; +} +export interface RoleInfo { + roleName?: string | undefined; + accountId?: string | undefined; +} +export interface ListAccountRolesResponse { + nextToken?: string | undefined; + roleList?: RoleInfo[] | undefined; +} +export interface ListAccountsRequest { + nextToken?: string | undefined; + maxResults?: number | undefined; + accessToken: string | undefined; +} +export interface ListAccountsResponse { + nextToken?: string | undefined; + accountList?: AccountInfo[] | undefined; +} +export interface LogoutRequest { + accessToken: string | undefined; +} +export declare const GetRoleCredentialsRequestFilterSensitiveLog: ( + obj: GetRoleCredentialsRequest +) => any; +export declare const RoleCredentialsFilterSensitiveLog: ( + obj: RoleCredentials +) => any; +export declare const GetRoleCredentialsResponseFilterSensitiveLog: ( + obj: GetRoleCredentialsResponse +) => any; +export declare const ListAccountRolesRequestFilterSensitiveLog: ( + obj: ListAccountRolesRequest +) => any; +export declare const ListAccountsRequestFilterSensitiveLog: ( + obj: ListAccountsRequest +) => any; +export declare const LogoutRequestFilterSensitiveLog: ( + obj: LogoutRequest +) => any; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/Interfaces.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/Interfaces.d.ts new file mode 100644 index 00000000..29708980 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/Interfaces.d.ts @@ -0,0 +1,5 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { SSOClient } from "../SSOClient"; +export interface SSOPaginationConfiguration extends PaginationConfiguration { + client: SSOClient; +} diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountRolesPaginator.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountRolesPaginator.d.ts new file mode 100644 index 00000000..174f32bd --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountRolesPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, +} from "../commands/ListAccountRolesCommand"; +import { SSOPaginationConfiguration } from "./Interfaces"; +export declare const paginateListAccountRoles: ( + config: SSOPaginationConfiguration, + input: ListAccountRolesCommandInput, + ...rest: any[] +) => Paginator; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountsPaginator.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountsPaginator.d.ts new file mode 100644 index 00000000..bb5e66de --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountsPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListAccountsCommandInput, + ListAccountsCommandOutput, +} from "../commands/ListAccountsCommand"; +import { SSOPaginationConfiguration } from "./Interfaces"; +export declare const paginateListAccounts: ( + config: SSOPaginationConfiguration, + input: ListAccountsCommandInput, + ...rest: any[] +) => Paginator; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/index.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/index.d.ts new file mode 100644 index 00000000..1e7866f7 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/index.d.ts @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./ListAccountRolesPaginator"; +export * from "./ListAccountsPaginator"; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/protocols/Aws_restJson1.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/protocols/Aws_restJson1.d.ts new file mode 100644 index 00000000..74eebdc7 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/protocols/Aws_restJson1.d.ts @@ -0,0 +1,53 @@ +import { + HttpRequest as __HttpRequest, + HttpResponse as __HttpResponse, +} from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, +} from "../commands/GetRoleCredentialsCommand"; +import { + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, +} from "../commands/ListAccountRolesCommand"; +import { + ListAccountsCommandInput, + ListAccountsCommandOutput, +} from "../commands/ListAccountsCommand"; +import { + LogoutCommandInput, + LogoutCommandOutput, +} from "../commands/LogoutCommand"; +export declare const se_GetRoleCredentialsCommand: ( + input: GetRoleCredentialsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListAccountRolesCommand: ( + input: ListAccountRolesCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListAccountsCommand: ( + input: ListAccountsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_LogoutCommand: ( + input: LogoutCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const de_GetRoleCredentialsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListAccountRolesCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListAccountsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_LogoutCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.browser.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.browser.d.ts new file mode 100644 index 00000000..4042bcf1 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.browser.d.ts @@ -0,0 +1,120 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { SSOClientConfig } from "./SSOClient"; +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | import("@smithy/protocol-http").HttpHandler + | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.d.ts new file mode 100644 index 00000000..71524456 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.d.ts @@ -0,0 +1,114 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { SSOClientConfig } from "./SSOClient"; +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-node").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | RequestHandler + | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.native.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.native.d.ts new file mode 100644 index 00000000..3dc6c95b --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.native.d.ts @@ -0,0 +1,124 @@ +import { SSOClientConfig } from "./SSOClient"; +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: + | import("@smithy/types").NodeHttpHandlerOptions + | import("@smithy/types").FetchHttpHandlerOptions + | Record + | import("@smithy/protocol-http").HttpHandler + | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: + | import("@smithy/smithy-client").DefaultsMode + | import("@smithy/types").Provider< + import("@smithy/smithy-client").DefaultsMode + >; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.shared.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.shared.d.ts new file mode 100644 index 00000000..00b29420 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.shared.d.ts @@ -0,0 +1,49 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { SSOClientConfig } from "./SSOClient"; +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: NoAuthSigner; + } + )[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeExtensions.d.ts b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeExtensions.d.ts new file mode 100644 index 00000000..fbec1e52 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeExtensions.d.ts @@ -0,0 +1,11 @@ +import { SSOExtensionConfiguration } from "./extensionConfiguration"; +export interface RuntimeExtension { + configure(extensionConfiguration: SSOExtensionConfiguration): void; +} +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +export declare const resolveRuntimeExtensions: ( + runtimeConfig: any, + extensions: RuntimeExtension[] +) => any; diff --git a/node_modules/@aws-sdk/client-sso/package.json b/node_modules/@aws-sdk/client-sso/package.json new file mode 100644 index 00000000..0b46e499 --- /dev/null +++ b/node_modules/@aws-sdk/client-sso/package.json @@ -0,0 +1,98 @@ +{ + "name": "@aws-sdk/client-sso", + "description": "AWS SDK for JavaScript Sso Client for Node.js, Browser and React Native", + "version": "3.799.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline client-sso", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "generate:client": "node ../../scripts/generate-clients/single-service --solo sso" + }, + "main": "./dist-cjs/index.js", + "types": "./dist-types/index.d.ts", + "module": "./dist-es/index.js", + "sideEffects": false, + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.2", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/node18": "18.2.4", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "browser": { + "./dist-es/runtimeConfig": "./dist-es/runtimeConfig.browser" + }, + "react-native": { + "./dist-es/runtimeConfig": "./dist-es/runtimeConfig.native" + }, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/clients/client-sso", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "clients/client-sso" + } +} diff --git a/node_modules/@aws-sdk/core/README.md b/node_modules/@aws-sdk/core/README.md new file mode 100644 index 00000000..6056468b --- /dev/null +++ b/node_modules/@aws-sdk/core/README.md @@ -0,0 +1,39 @@ +# `@aws-sdk/core` + +This package provides common or core functionality to the AWS SDK for JavaScript (v3). + +You do not need to explicitly install this package, since it will be transitively installed by AWS SDK clients. + +## `@aws-sdk/core` submodules + +Core submodules are organized for distribution via the `package.json` `exports` field. + +`exports` is supported by default by the latest Node.js, webpack, and esbuild. For react-native, it can be +enabled via instructions found at [reactnative.dev/blog](https://reactnative.dev/blog/2023/06/21/package-exports-support). + +Think of `@aws-sdk/core` as a mono-package within the monorepo. +It preserves the benefits of modularization, for example to optimize Node.js initialization speed, +while making it easier to have a consistent version of core dependencies, reducing package sprawl when +installing an SDK client. + +### Guide for submodules + +- Each `index.ts` file corresponding to the pattern `./src/submodules//index.ts` will be + published as a separate `dist-cjs` bundled submodule index using the `Inliner.js` build script. +- create a folder as `./src/submodules/` including an `index.ts` file and a `README.md` file. + - The linter will throw an error on missing submodule metadata in `package.json` and the various `tsconfig.json` files, but it will automatically fix them if possible. +- a submodule is equivalent to a standalone `@aws-sdk/` package in that importing it in Node.js will resolve a separate bundle. +- submodules may not relatively import files from other submodules. Instead, directly use the `@scope/pkg/submodule` name as the import. + - The linter will check for this and throw an error. +- To the extent possible, correctly declaring submodule metadata is validated by the linter in `@aws-sdk/core`. + The linter runs during `yarn build` and also as `yarn lint`. + +### When should I create an `@aws-sdk/core/submodule` vs. `@aws-sdk/new-package`? + +Keep in mind that the core package is installed by all AWS SDK clients. + +If the component functionality is upstream of multiple clients, it is +a good candidate for a core submodule. For example, XML serialization. + +If the component's functionality is downstream of a client, for example S3 pre-signing, +it should be a standalone package with potentially a peer or runtime dependency on an AWS SDK client. diff --git a/node_modules/@aws-sdk/core/account-id-endpoint.d.ts b/node_modules/@aws-sdk/core/account-id-endpoint.d.ts new file mode 100644 index 00000000..60f14d19 --- /dev/null +++ b/node_modules/@aws-sdk/core/account-id-endpoint.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/core/account-id-endpoint" { + export * from "@aws-sdk/core/dist-types/submodules/account-id-endpoint/index.d"; +} diff --git a/node_modules/@aws-sdk/core/account-id-endpoint.js b/node_modules/@aws-sdk/core/account-id-endpoint.js new file mode 100644 index 00000000..b2550f7c --- /dev/null +++ b/node_modules/@aws-sdk/core/account-id-endpoint.js @@ -0,0 +1,6 @@ + +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/account-id-endpoint/index.js"); diff --git a/node_modules/@aws-sdk/core/client.d.ts b/node_modules/@aws-sdk/core/client.d.ts new file mode 100644 index 00000000..ce995aea --- /dev/null +++ b/node_modules/@aws-sdk/core/client.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/core/client" { + export * from "@aws-sdk/core/dist-types/submodules/client/index.d"; +} diff --git a/node_modules/@aws-sdk/core/client.js b/node_modules/@aws-sdk/core/client.js new file mode 100644 index 00000000..e3a644bd --- /dev/null +++ b/node_modules/@aws-sdk/core/client.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/client/index.js"); diff --git a/node_modules/@aws-sdk/core/dist-cjs/index.js b/node_modules/@aws-sdk/core/dist-cjs/index.js new file mode 100644 index 00000000..cddde6a8 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-cjs/index.js @@ -0,0 +1,6 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const tslib_1 = require("tslib"); +tslib_1.__exportStar(require("./submodules/client/index"), exports); +tslib_1.__exportStar(require("./submodules/httpAuthSchemes/index"), exports); +tslib_1.__exportStar(require("./submodules/protocols/index"), exports); diff --git a/node_modules/@aws-sdk/core/dist-cjs/submodules/account-id-endpoint/index.js b/node_modules/@aws-sdk/core/dist-cjs/submodules/account-id-endpoint/index.js new file mode 100644 index 00000000..c277b79b --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-cjs/submodules/account-id-endpoint/index.js @@ -0,0 +1,95 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/account-id-endpoint/index.ts +var index_exports = {}; +__export(index_exports, { + ACCOUNT_ID_ENDPOINT_MODE_VALUES: () => ACCOUNT_ID_ENDPOINT_MODE_VALUES, + CONFIG_ACCOUNT_ID_ENDPOINT_MODE: () => CONFIG_ACCOUNT_ID_ENDPOINT_MODE, + DEFAULT_ACCOUNT_ID_ENDPOINT_MODE: () => DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, + ENV_ACCOUNT_ID_ENDPOINT_MODE: () => ENV_ACCOUNT_ID_ENDPOINT_MODE, + NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS: () => NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS, + resolveAccountIdEndpointModeConfig: () => resolveAccountIdEndpointModeConfig, + validateAccountIdEndpointMode: () => validateAccountIdEndpointMode +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.ts +var import_util_middleware = require("@smithy/util-middleware"); + +// src/submodules/account-id-endpoint/AccountIdEndpointModeConstants.ts +var DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = "preferred"; +var ACCOUNT_ID_ENDPOINT_MODE_VALUES = ["disabled", "preferred", "required"]; +function validateAccountIdEndpointMode(value) { + return ACCOUNT_ID_ENDPOINT_MODE_VALUES.includes(value); +} +__name(validateAccountIdEndpointMode, "validateAccountIdEndpointMode"); + +// src/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.ts +var resolveAccountIdEndpointModeConfig = /* @__PURE__ */ __name((input) => { + const { accountIdEndpointMode } = input; + const accountIdEndpointModeProvider = (0, import_util_middleware.normalizeProvider)(accountIdEndpointMode ?? DEFAULT_ACCOUNT_ID_ENDPOINT_MODE); + return Object.assign(input, { + accountIdEndpointMode: /* @__PURE__ */ __name(async () => { + const accIdMode = await accountIdEndpointModeProvider(); + if (!validateAccountIdEndpointMode(accIdMode)) { + throw new Error( + `Invalid value for accountIdEndpointMode: ${accIdMode}. Valid values are: "required", "preferred", "disabled".` + ); + } + return accIdMode; + }, "accountIdEndpointMode") + }); +}, "resolveAccountIdEndpointModeConfig"); + +// src/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.ts +var err = "Invalid AccountIdEndpointMode value"; +var _throw = /* @__PURE__ */ __name((message) => { + throw new Error(message); +}, "_throw"); +var ENV_ACCOUNT_ID_ENDPOINT_MODE = "AWS_ACCOUNT_ID_ENDPOINT_MODE"; +var CONFIG_ACCOUNT_ID_ENDPOINT_MODE = "account_id_endpoint_mode"; +var NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env) => { + const value = env[ENV_ACCOUNT_ID_ENDPOINT_MODE]; + if (value && !validateAccountIdEndpointMode(value)) { + _throw(err); + } + return value; + }, "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => { + const value = profile[CONFIG_ACCOUNT_ID_ENDPOINT_MODE]; + if (value && !validateAccountIdEndpointMode(value)) { + _throw(err); + } + return value; + }, "configFileSelector"), + default: DEFAULT_ACCOUNT_ID_ENDPOINT_MODE +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + ACCOUNT_ID_ENDPOINT_MODE_VALUES, + CONFIG_ACCOUNT_ID_ENDPOINT_MODE, + DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, + ENV_ACCOUNT_ID_ENDPOINT_MODE, + NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS, + resolveAccountIdEndpointModeConfig, + validateAccountIdEndpointMode +}); diff --git a/node_modules/@aws-sdk/core/dist-cjs/submodules/client/index.js b/node_modules/@aws-sdk/core/dist-cjs/submodules/client/index.js new file mode 100644 index 00000000..ebd6c616 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-cjs/submodules/client/index.js @@ -0,0 +1,78 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/client/index.ts +var index_exports = {}; +__export(index_exports, { + emitWarningIfUnsupportedVersion: () => emitWarningIfUnsupportedVersion, + setCredentialFeature: () => setCredentialFeature, + setFeature: () => setFeature, + state: () => state +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/client/emitWarningIfUnsupportedVersion.ts +var state = { + warningEmitted: false +}; +var emitWarningIfUnsupportedVersion = /* @__PURE__ */ __name((version) => { + if (version && !state.warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 18) { + state.warningEmitted = true; + process.emitWarning( + `NodeDeprecationWarning: The AWS SDK for JavaScript (v3) will +no longer support Node.js 16.x on January 6, 2025. + +To continue receiving updates to AWS services, bug fixes, and security +updates please upgrade to a supported Node.js LTS version. + +More information can be found at: https://a.co/74kJMmI` + ); + } +}, "emitWarningIfUnsupportedVersion"); + +// src/submodules/client/setCredentialFeature.ts +function setCredentialFeature(credentials, feature, value) { + if (!credentials.$source) { + credentials.$source = {}; + } + credentials.$source[feature] = value; + return credentials; +} +__name(setCredentialFeature, "setCredentialFeature"); + +// src/submodules/client/setFeature.ts +function setFeature(context, feature, value) { + if (!context.__aws_sdk_context) { + context.__aws_sdk_context = { + features: {} + }; + } else if (!context.__aws_sdk_context.features) { + context.__aws_sdk_context.features = {}; + } + context.__aws_sdk_context.features[feature] = value; +} +__name(setFeature, "setFeature"); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + emitWarningIfUnsupportedVersion, + setCredentialFeature, + setFeature, + state +}); diff --git a/node_modules/@aws-sdk/core/dist-cjs/submodules/httpAuthSchemes/index.js b/node_modules/@aws-sdk/core/dist-cjs/submodules/httpAuthSchemes/index.js new file mode 100644 index 00000000..82db91e5 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-cjs/submodules/httpAuthSchemes/index.js @@ -0,0 +1,382 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/httpAuthSchemes/index.ts +var index_exports = {}; +__export(index_exports, { + AWSSDKSigV4Signer: () => AWSSDKSigV4Signer, + AwsSdkSigV4ASigner: () => AwsSdkSigV4ASigner, + AwsSdkSigV4Signer: () => AwsSdkSigV4Signer, + NODE_AUTH_SCHEME_PREFERENCE_OPTIONS: () => NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, + NODE_SIGV4A_CONFIG_OPTIONS: () => NODE_SIGV4A_CONFIG_OPTIONS, + resolveAWSSDKSigV4Config: () => resolveAWSSDKSigV4Config, + resolveAwsSdkSigV4AConfig: () => resolveAwsSdkSigV4AConfig, + resolveAwsSdkSigV4Config: () => resolveAwsSdkSigV4Config, + validateSigningProperties: () => validateSigningProperties +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.ts +var import_protocol_http2 = require("@smithy/protocol-http"); + +// src/submodules/httpAuthSchemes/utils/getDateHeader.ts +var import_protocol_http = require("@smithy/protocol-http"); +var getDateHeader = /* @__PURE__ */ __name((response) => import_protocol_http.HttpResponse.isInstance(response) ? response.headers?.date ?? response.headers?.Date : void 0, "getDateHeader"); + +// src/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.ts +var getSkewCorrectedDate = /* @__PURE__ */ __name((systemClockOffset) => new Date(Date.now() + systemClockOffset), "getSkewCorrectedDate"); + +// src/submodules/httpAuthSchemes/utils/isClockSkewed.ts +var isClockSkewed = /* @__PURE__ */ __name((clockTime, systemClockOffset) => Math.abs(getSkewCorrectedDate(systemClockOffset).getTime() - clockTime) >= 3e5, "isClockSkewed"); + +// src/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.ts +var getUpdatedSystemClockOffset = /* @__PURE__ */ __name((clockTime, currentSystemClockOffset) => { + const clockTimeInMs = Date.parse(clockTime); + if (isClockSkewed(clockTimeInMs, currentSystemClockOffset)) { + return clockTimeInMs - Date.now(); + } + return currentSystemClockOffset; +}, "getUpdatedSystemClockOffset"); + +// src/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.ts +var throwSigningPropertyError = /* @__PURE__ */ __name((name, property) => { + if (!property) { + throw new Error(`Property \`${name}\` is not resolved for AWS SDK SigV4Auth`); + } + return property; +}, "throwSigningPropertyError"); +var validateSigningProperties = /* @__PURE__ */ __name(async (signingProperties) => { + const context = throwSigningPropertyError( + "context", + signingProperties.context + ); + const config = throwSigningPropertyError("config", signingProperties.config); + const authScheme = context.endpointV2?.properties?.authSchemes?.[0]; + const signerFunction = throwSigningPropertyError( + "signer", + config.signer + ); + const signer = await signerFunction(authScheme); + const signingRegion = signingProperties?.signingRegion; + const signingRegionSet = signingProperties?.signingRegionSet; + const signingName = signingProperties?.signingName; + return { + config, + signer, + signingRegion, + signingRegionSet, + signingName + }; +}, "validateSigningProperties"); +var AwsSdkSigV4Signer = class { + static { + __name(this, "AwsSdkSigV4Signer"); + } + async sign(httpRequest, identity, signingProperties) { + if (!import_protocol_http2.HttpRequest.isInstance(httpRequest)) { + throw new Error("The request is not an instance of `HttpRequest` and cannot be signed"); + } + const validatedProps = await validateSigningProperties(signingProperties); + const { config, signer } = validatedProps; + let { signingRegion, signingName } = validatedProps; + const handlerExecutionContext = signingProperties.context; + if (handlerExecutionContext?.authSchemes?.length ?? 0 > 1) { + const [first, second] = handlerExecutionContext.authSchemes; + if (first?.name === "sigv4a" && second?.name === "sigv4") { + signingRegion = second?.signingRegion ?? signingRegion; + signingName = second?.signingName ?? signingName; + } + } + const signedRequest = await signer.sign(httpRequest, { + signingDate: getSkewCorrectedDate(config.systemClockOffset), + signingRegion, + signingService: signingName + }); + return signedRequest; + } + errorHandler(signingProperties) { + return (error) => { + const serverTime = error.ServerTime ?? getDateHeader(error.$response); + if (serverTime) { + const config = throwSigningPropertyError("config", signingProperties.config); + const initialSystemClockOffset = config.systemClockOffset; + config.systemClockOffset = getUpdatedSystemClockOffset(serverTime, config.systemClockOffset); + const clockSkewCorrected = config.systemClockOffset !== initialSystemClockOffset; + if (clockSkewCorrected && error.$metadata) { + error.$metadata.clockSkewCorrected = true; + } + } + throw error; + }; + } + successHandler(httpResponse, signingProperties) { + const dateHeader = getDateHeader(httpResponse); + if (dateHeader) { + const config = throwSigningPropertyError("config", signingProperties.config); + config.systemClockOffset = getUpdatedSystemClockOffset(dateHeader, config.systemClockOffset); + } + } +}; +var AWSSDKSigV4Signer = AwsSdkSigV4Signer; + +// src/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.ts +var import_protocol_http3 = require("@smithy/protocol-http"); +var AwsSdkSigV4ASigner = class extends AwsSdkSigV4Signer { + static { + __name(this, "AwsSdkSigV4ASigner"); + } + async sign(httpRequest, identity, signingProperties) { + if (!import_protocol_http3.HttpRequest.isInstance(httpRequest)) { + throw new Error("The request is not an instance of `HttpRequest` and cannot be signed"); + } + const { config, signer, signingRegion, signingRegionSet, signingName } = await validateSigningProperties( + signingProperties + ); + const configResolvedSigningRegionSet = await config.sigv4aSigningRegionSet?.(); + const multiRegionOverride = (configResolvedSigningRegionSet ?? signingRegionSet ?? [signingRegion]).join(","); + const signedRequest = await signer.sign(httpRequest, { + signingDate: getSkewCorrectedDate(config.systemClockOffset), + signingRegion: multiRegionOverride, + signingService: signingName + }); + return signedRequest; + } +}; + +// src/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.ts +var getArrayForCommaSeparatedString = /* @__PURE__ */ __name((str) => typeof str === "string" && str.length > 0 ? str.split(",").map((item) => item.trim()) : [], "getArrayForCommaSeparatedString"); + +// src/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.ts +var NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY = "AWS_AUTH_SCHEME_PREFERENCE"; +var NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY = "auth_scheme_preference"; +var NODE_AUTH_SCHEME_PREFERENCE_OPTIONS = { + /** + * Retrieves auth scheme preference from environment variables + * @param env - Node process environment object + * @returns Array of auth scheme strings if preference is set, undefined otherwise + */ + environmentVariableSelector: /* @__PURE__ */ __name((env) => { + if (!(NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY in env)) return void 0; + return getArrayForCommaSeparatedString(env[NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY]); + }, "environmentVariableSelector"), + /** + * Retrieves auth scheme preference from config file + * @param profile - Config profile object + * @returns Array of auth scheme strings if preference is set, undefined otherwise + */ + configFileSelector: /* @__PURE__ */ __name((profile) => { + if (!(NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY in profile)) return void 0; + return getArrayForCommaSeparatedString(profile[NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY]); + }, "configFileSelector"), + /** + * Default auth scheme preference if not specified in environment or config + */ + default: [] +}; + +// src/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.ts +var import_core = require("@smithy/core"); +var import_property_provider = require("@smithy/property-provider"); +var resolveAwsSdkSigV4AConfig = /* @__PURE__ */ __name((config) => { + config.sigv4aSigningRegionSet = (0, import_core.normalizeProvider)(config.sigv4aSigningRegionSet); + return config; +}, "resolveAwsSdkSigV4AConfig"); +var NODE_SIGV4A_CONFIG_OPTIONS = { + environmentVariableSelector(env) { + if (env.AWS_SIGV4A_SIGNING_REGION_SET) { + return env.AWS_SIGV4A_SIGNING_REGION_SET.split(",").map((_) => _.trim()); + } + throw new import_property_provider.ProviderError("AWS_SIGV4A_SIGNING_REGION_SET not set in env.", { + tryNextLink: true + }); + }, + configFileSelector(profile) { + if (profile.sigv4a_signing_region_set) { + return (profile.sigv4a_signing_region_set ?? "").split(",").map((_) => _.trim()); + } + throw new import_property_provider.ProviderError("sigv4a_signing_region_set not set in profile.", { + tryNextLink: true + }); + }, + default: void 0 +}; + +// src/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.ts +var import_client = require("@aws-sdk/core/client"); +var import_core2 = require("@smithy/core"); +var import_signature_v4 = require("@smithy/signature-v4"); +var resolveAwsSdkSigV4Config = /* @__PURE__ */ __name((config) => { + let inputCredentials = config.credentials; + let isUserSupplied = !!config.credentials; + let resolvedCredentials = void 0; + Object.defineProperty(config, "credentials", { + set(credentials) { + if (credentials && credentials !== inputCredentials && credentials !== resolvedCredentials) { + isUserSupplied = true; + } + inputCredentials = credentials; + const memoizedProvider = normalizeCredentialProvider(config, { + credentials: inputCredentials, + credentialDefaultProvider: config.credentialDefaultProvider + }); + const boundProvider = bindCallerConfig(config, memoizedProvider); + if (isUserSupplied && !boundProvider.attributed) { + resolvedCredentials = /* @__PURE__ */ __name(async (options) => boundProvider(options).then( + (creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_CODE", "e") + ), "resolvedCredentials"); + resolvedCredentials.memoized = boundProvider.memoized; + resolvedCredentials.configBound = boundProvider.configBound; + resolvedCredentials.attributed = true; + } else { + resolvedCredentials = boundProvider; + } + }, + get() { + return resolvedCredentials; + }, + enumerable: true, + configurable: true + }); + config.credentials = inputCredentials; + const { + // Default for signingEscapePath + signingEscapePath = true, + // Default for systemClockOffset + systemClockOffset = config.systemClockOffset || 0, + // No default for sha256 since it is platform dependent + sha256 + } = config; + let signer; + if (config.signer) { + signer = (0, import_core2.normalizeProvider)(config.signer); + } else if (config.regionInfoProvider) { + signer = /* @__PURE__ */ __name(() => (0, import_core2.normalizeProvider)(config.region)().then( + async (region) => [ + await config.regionInfoProvider(region, { + useFipsEndpoint: await config.useFipsEndpoint(), + useDualstackEndpoint: await config.useDualstackEndpoint() + }) || {}, + region + ] + ).then(([regionInfo, region]) => { + const { signingRegion, signingService } = regionInfo; + config.signingRegion = config.signingRegion || signingRegion || region; + config.signingName = config.signingName || signingService || config.serviceId; + const params = { + ...config, + credentials: config.credentials, + region: config.signingRegion, + service: config.signingName, + sha256, + uriEscapePath: signingEscapePath + }; + const SignerCtor = config.signerConstructor || import_signature_v4.SignatureV4; + return new SignerCtor(params); + }), "signer"); + } else { + signer = /* @__PURE__ */ __name(async (authScheme) => { + authScheme = Object.assign( + {}, + { + name: "sigv4", + signingName: config.signingName || config.defaultSigningName, + signingRegion: await (0, import_core2.normalizeProvider)(config.region)(), + properties: {} + }, + authScheme + ); + const signingRegion = authScheme.signingRegion; + const signingService = authScheme.signingName; + config.signingRegion = config.signingRegion || signingRegion; + config.signingName = config.signingName || signingService || config.serviceId; + const params = { + ...config, + credentials: config.credentials, + region: config.signingRegion, + service: config.signingName, + sha256, + uriEscapePath: signingEscapePath + }; + const SignerCtor = config.signerConstructor || import_signature_v4.SignatureV4; + return new SignerCtor(params); + }, "signer"); + } + const resolvedConfig = Object.assign(config, { + systemClockOffset, + signingEscapePath, + signer + }); + return resolvedConfig; +}, "resolveAwsSdkSigV4Config"); +var resolveAWSSDKSigV4Config = resolveAwsSdkSigV4Config; +function normalizeCredentialProvider(config, { + credentials, + credentialDefaultProvider +}) { + let credentialsProvider; + if (credentials) { + if (!credentials?.memoized) { + credentialsProvider = (0, import_core2.memoizeIdentityProvider)(credentials, import_core2.isIdentityExpired, import_core2.doesIdentityRequireRefresh); + } else { + credentialsProvider = credentials; + } + } else { + if (credentialDefaultProvider) { + credentialsProvider = (0, import_core2.normalizeProvider)( + credentialDefaultProvider( + Object.assign({}, config, { + parentClientConfig: config + }) + ) + ); + } else { + credentialsProvider = /* @__PURE__ */ __name(async () => { + throw new Error( + "@aws-sdk/core::resolveAwsSdkSigV4Config - `credentials` not provided and no credentialDefaultProvider was configured." + ); + }, "credentialsProvider"); + } + } + credentialsProvider.memoized = true; + return credentialsProvider; +} +__name(normalizeCredentialProvider, "normalizeCredentialProvider"); +function bindCallerConfig(config, credentialsProvider) { + if (credentialsProvider.configBound) { + return credentialsProvider; + } + const fn = /* @__PURE__ */ __name(async (options) => credentialsProvider({ ...options, callerClientConfig: config }), "fn"); + fn.memoized = credentialsProvider.memoized; + fn.configBound = true; + return fn; +} +__name(bindCallerConfig, "bindCallerConfig"); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + AWSSDKSigV4Signer, + AwsSdkSigV4ASigner, + AwsSdkSigV4Signer, + NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, + NODE_SIGV4A_CONFIG_OPTIONS, + resolveAWSSDKSigV4Config, + resolveAwsSdkSigV4AConfig, + resolveAwsSdkSigV4Config, + validateSigningProperties +}); diff --git a/node_modules/@aws-sdk/core/dist-cjs/submodules/protocols/index.js b/node_modules/@aws-sdk/core/dist-cjs/submodules/protocols/index.js new file mode 100644 index 00000000..d84c65b6 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-cjs/submodules/protocols/index.js @@ -0,0 +1,227 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/protocols/index.ts +var index_exports = {}; +__export(index_exports, { + _toBool: () => _toBool, + _toNum: () => _toNum, + _toStr: () => _toStr, + awsExpectUnion: () => awsExpectUnion, + loadRestJsonErrorCode: () => loadRestJsonErrorCode, + loadRestXmlErrorCode: () => loadRestXmlErrorCode, + parseJsonBody: () => parseJsonBody, + parseJsonErrorBody: () => parseJsonErrorBody, + parseXmlBody: () => parseXmlBody, + parseXmlErrorBody: () => parseXmlErrorBody +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/protocols/coercing-serializers.ts +var _toStr = /* @__PURE__ */ __name((val) => { + if (val == null) { + return val; + } + if (typeof val === "number" || typeof val === "bigint") { + const warning = new Error(`Received number ${val} where a string was expected.`); + warning.name = "Warning"; + console.warn(warning); + return String(val); + } + if (typeof val === "boolean") { + const warning = new Error(`Received boolean ${val} where a string was expected.`); + warning.name = "Warning"; + console.warn(warning); + return String(val); + } + return val; +}, "_toStr"); +var _toBool = /* @__PURE__ */ __name((val) => { + if (val == null) { + return val; + } + if (typeof val === "number") { + } + if (typeof val === "string") { + const lowercase = val.toLowerCase(); + if (val !== "" && lowercase !== "false" && lowercase !== "true") { + const warning = new Error(`Received string "${val}" where a boolean was expected.`); + warning.name = "Warning"; + console.warn(warning); + } + return val !== "" && lowercase !== "false"; + } + return val; +}, "_toBool"); +var _toNum = /* @__PURE__ */ __name((val) => { + if (val == null) { + return val; + } + if (typeof val === "boolean") { + } + if (typeof val === "string") { + const num = Number(val); + if (num.toString() !== val) { + const warning = new Error(`Received string "${val}" where a number was expected.`); + warning.name = "Warning"; + console.warn(warning); + return val; + } + return num; + } + return val; +}, "_toNum"); + +// src/submodules/protocols/json/awsExpectUnion.ts +var import_smithy_client = require("@smithy/smithy-client"); +var awsExpectUnion = /* @__PURE__ */ __name((value) => { + if (value == null) { + return void 0; + } + if (typeof value === "object" && "__type" in value) { + delete value.__type; + } + return (0, import_smithy_client.expectUnion)(value); +}, "awsExpectUnion"); + +// src/submodules/protocols/common.ts +var import_smithy_client2 = require("@smithy/smithy-client"); +var collectBodyString = /* @__PURE__ */ __name((streamBody, context) => (0, import_smithy_client2.collectBody)(streamBody, context).then((body) => context.utf8Encoder(body)), "collectBodyString"); + +// src/submodules/protocols/json/parseJsonBody.ts +var parseJsonBody = /* @__PURE__ */ __name((streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + try { + return JSON.parse(encoded); + } catch (e) { + if (e?.name === "SyntaxError") { + Object.defineProperty(e, "$responseBodyText", { + value: encoded + }); + } + throw e; + } + } + return {}; +}), "parseJsonBody"); +var parseJsonErrorBody = /* @__PURE__ */ __name(async (errorBody, context) => { + const value = await parseJsonBody(errorBody, context); + value.message = value.message ?? value.Message; + return value; +}, "parseJsonErrorBody"); +var loadRestJsonErrorCode = /* @__PURE__ */ __name((output, data) => { + const findKey = /* @__PURE__ */ __name((object, key) => Object.keys(object).find((k) => k.toLowerCase() === key.toLowerCase()), "findKey"); + const sanitizeErrorCode = /* @__PURE__ */ __name((rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }, "sanitizeErrorCode"); + const headerKey = findKey(output.headers, "x-amzn-errortype"); + if (headerKey !== void 0) { + return sanitizeErrorCode(output.headers[headerKey]); + } + if (data.code !== void 0) { + return sanitizeErrorCode(data.code); + } + if (data["__type"] !== void 0) { + return sanitizeErrorCode(data["__type"]); + } +}, "loadRestJsonErrorCode"); + +// src/submodules/protocols/xml/parseXmlBody.ts +var import_smithy_client3 = require("@smithy/smithy-client"); +var import_fast_xml_parser = require("fast-xml-parser"); +var parseXmlBody = /* @__PURE__ */ __name((streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + const parser = new import_fast_xml_parser.XMLParser({ + attributeNamePrefix: "", + htmlEntities: true, + ignoreAttributes: false, + ignoreDeclaration: true, + parseTagValue: false, + trimValues: false, + tagValueProcessor: /* @__PURE__ */ __name((_, val) => val.trim() === "" && val.includes("\n") ? "" : void 0, "tagValueProcessor") + }); + parser.addEntity("#xD", "\r"); + parser.addEntity("#10", "\n"); + let parsedObj; + try { + parsedObj = parser.parse(encoded, true); + } catch (e) { + if (e && typeof e === "object") { + Object.defineProperty(e, "$responseBodyText", { + value: encoded + }); + } + throw e; + } + const textNodeName = "#text"; + const key = Object.keys(parsedObj)[0]; + const parsedObjToReturn = parsedObj[key]; + if (parsedObjToReturn[textNodeName]) { + parsedObjToReturn[key] = parsedObjToReturn[textNodeName]; + delete parsedObjToReturn[textNodeName]; + } + return (0, import_smithy_client3.getValueFromTextNode)(parsedObjToReturn); + } + return {}; +}), "parseXmlBody"); +var parseXmlErrorBody = /* @__PURE__ */ __name(async (errorBody, context) => { + const value = await parseXmlBody(errorBody, context); + if (value.Error) { + value.Error.message = value.Error.message ?? value.Error.Message; + } + return value; +}, "parseXmlErrorBody"); +var loadRestXmlErrorCode = /* @__PURE__ */ __name((output, data) => { + if (data?.Error?.Code !== void 0) { + return data.Error.Code; + } + if (data?.Code !== void 0) { + return data.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}, "loadRestXmlErrorCode"); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + _toBool, + _toNum, + _toStr, + awsExpectUnion, + loadRestJsonErrorCode, + loadRestXmlErrorCode, + parseJsonBody, + parseJsonErrorBody, + parseXmlBody, + parseXmlErrorBody +}); diff --git a/node_modules/@aws-sdk/core/dist-es/index.js b/node_modules/@aws-sdk/core/dist-es/index.js new file mode 100644 index 00000000..239de7a6 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./submodules/client/index"; +export * from "./submodules/httpAuthSchemes/index"; +export * from "./submodules/protocols/index"; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.js b/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.js new file mode 100644 index 00000000..cc0c55a9 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.js @@ -0,0 +1,15 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +import { DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, validateAccountIdEndpointMode, } from "./AccountIdEndpointModeConstants"; +export const resolveAccountIdEndpointModeConfig = (input) => { + const { accountIdEndpointMode } = input; + const accountIdEndpointModeProvider = normalizeProvider(accountIdEndpointMode ?? DEFAULT_ACCOUNT_ID_ENDPOINT_MODE); + return Object.assign(input, { + accountIdEndpointMode: async () => { + const accIdMode = await accountIdEndpointModeProvider(); + if (!validateAccountIdEndpointMode(accIdMode)) { + throw new Error(`Invalid value for accountIdEndpointMode: ${accIdMode}. Valid values are: "required", "preferred", "disabled".`); + } + return accIdMode; + }, + }); +}; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConstants.js b/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConstants.js new file mode 100644 index 00000000..e7a2ca00 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConstants.js @@ -0,0 +1,5 @@ +export const DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = "preferred"; +export const ACCOUNT_ID_ENDPOINT_MODE_VALUES = ["disabled", "preferred", "required"]; +export function validateAccountIdEndpointMode(value) { + return ACCOUNT_ID_ENDPOINT_MODE_VALUES.includes(value); +} diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.js b/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.js new file mode 100644 index 00000000..54832d53 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.js @@ -0,0 +1,24 @@ +import { DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, validateAccountIdEndpointMode, } from "./AccountIdEndpointModeConstants"; +const err = "Invalid AccountIdEndpointMode value"; +const _throw = (message) => { + throw new Error(message); +}; +export const ENV_ACCOUNT_ID_ENDPOINT_MODE = "AWS_ACCOUNT_ID_ENDPOINT_MODE"; +export const CONFIG_ACCOUNT_ID_ENDPOINT_MODE = "account_id_endpoint_mode"; +export const NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + const value = env[ENV_ACCOUNT_ID_ENDPOINT_MODE]; + if (value && !validateAccountIdEndpointMode(value)) { + _throw(err); + } + return value; + }, + configFileSelector: (profile) => { + const value = profile[CONFIG_ACCOUNT_ID_ENDPOINT_MODE]; + if (value && !validateAccountIdEndpointMode(value)) { + _throw(err); + } + return value; + }, + default: DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, +}; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/index.js b/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/index.js new file mode 100644 index 00000000..52af11df --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/index.js @@ -0,0 +1,3 @@ +export * from "./AccountIdEndpointModeConfigResolver"; +export * from "./AccountIdEndpointModeConstants"; +export * from "./NodeAccountIdEndpointModeConfigOptions"; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/client/emitWarningIfUnsupportedVersion.js b/node_modules/@aws-sdk/core/dist-es/submodules/client/emitWarningIfUnsupportedVersion.js new file mode 100644 index 00000000..d1dab1da --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/client/emitWarningIfUnsupportedVersion.js @@ -0,0 +1,15 @@ +export const state = { + warningEmitted: false, +}; +export const emitWarningIfUnsupportedVersion = (version) => { + if (version && !state.warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 18) { + state.warningEmitted = true; + process.emitWarning(`NodeDeprecationWarning: The AWS SDK for JavaScript (v3) will +no longer support Node.js 16.x on January 6, 2025. + +To continue receiving updates to AWS services, bug fixes, and security +updates please upgrade to a supported Node.js LTS version. + +More information can be found at: https://a.co/74kJMmI`); + } +}; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/client/index.js b/node_modules/@aws-sdk/core/dist-es/submodules/client/index.js new file mode 100644 index 00000000..1a2cc9d1 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/client/index.js @@ -0,0 +1,3 @@ +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./setCredentialFeature"; +export * from "./setFeature"; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/client/setCredentialFeature.js b/node_modules/@aws-sdk/core/dist-es/submodules/client/setCredentialFeature.js new file mode 100644 index 00000000..a489c407 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/client/setCredentialFeature.js @@ -0,0 +1,7 @@ +export function setCredentialFeature(credentials, feature, value) { + if (!credentials.$source) { + credentials.$source = {}; + } + credentials.$source[feature] = value; + return credentials; +} diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/client/setFeature.js b/node_modules/@aws-sdk/core/dist-es/submodules/client/setFeature.js new file mode 100644 index 00000000..2d8804bb --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/client/setFeature.js @@ -0,0 +1,11 @@ +export function setFeature(context, feature, value) { + if (!context.__aws_sdk_context) { + context.__aws_sdk_context = { + features: {}, + }; + } + else if (!context.__aws_sdk_context.features) { + context.__aws_sdk_context.features = {}; + } + context.__aws_sdk_context.features[feature] = value; +} diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.js b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.js new file mode 100644 index 00000000..548fefb3 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.js @@ -0,0 +1,20 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { getSkewCorrectedDate } from "../utils"; +import { AwsSdkSigV4Signer, validateSigningProperties } from "./AwsSdkSigV4Signer"; +export class AwsSdkSigV4ASigner extends AwsSdkSigV4Signer { + async sign(httpRequest, identity, signingProperties) { + if (!HttpRequest.isInstance(httpRequest)) { + throw new Error("The request is not an instance of `HttpRequest` and cannot be signed"); + } + const { config, signer, signingRegion, signingRegionSet, signingName } = await validateSigningProperties(signingProperties); + const configResolvedSigningRegionSet = await config.sigv4aSigningRegionSet?.(); + const multiRegionOverride = (configResolvedSigningRegionSet ?? + signingRegionSet ?? [signingRegion]).join(","); + const signedRequest = await signer.sign(httpRequest, { + signingDate: getSkewCorrectedDate(config.systemClockOffset), + signingRegion: multiRegionOverride, + signingService: signingName, + }); + return signedRequest; + } +} diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.js b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.js new file mode 100644 index 00000000..ee236cd7 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.js @@ -0,0 +1,72 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { getDateHeader, getSkewCorrectedDate, getUpdatedSystemClockOffset } from "../utils"; +const throwSigningPropertyError = (name, property) => { + if (!property) { + throw new Error(`Property \`${name}\` is not resolved for AWS SDK SigV4Auth`); + } + return property; +}; +export const validateSigningProperties = async (signingProperties) => { + const context = throwSigningPropertyError("context", signingProperties.context); + const config = throwSigningPropertyError("config", signingProperties.config); + const authScheme = context.endpointV2?.properties?.authSchemes?.[0]; + const signerFunction = throwSigningPropertyError("signer", config.signer); + const signer = await signerFunction(authScheme); + const signingRegion = signingProperties?.signingRegion; + const signingRegionSet = signingProperties?.signingRegionSet; + const signingName = signingProperties?.signingName; + return { + config, + signer, + signingRegion, + signingRegionSet, + signingName, + }; +}; +export class AwsSdkSigV4Signer { + async sign(httpRequest, identity, signingProperties) { + if (!HttpRequest.isInstance(httpRequest)) { + throw new Error("The request is not an instance of `HttpRequest` and cannot be signed"); + } + const validatedProps = await validateSigningProperties(signingProperties); + const { config, signer } = validatedProps; + let { signingRegion, signingName } = validatedProps; + const handlerExecutionContext = signingProperties.context; + if (handlerExecutionContext?.authSchemes?.length ?? 0 > 1) { + const [first, second] = handlerExecutionContext.authSchemes; + if (first?.name === "sigv4a" && second?.name === "sigv4") { + signingRegion = second?.signingRegion ?? signingRegion; + signingName = second?.signingName ?? signingName; + } + } + const signedRequest = await signer.sign(httpRequest, { + signingDate: getSkewCorrectedDate(config.systemClockOffset), + signingRegion: signingRegion, + signingService: signingName, + }); + return signedRequest; + } + errorHandler(signingProperties) { + return (error) => { + const serverTime = error.ServerTime ?? getDateHeader(error.$response); + if (serverTime) { + const config = throwSigningPropertyError("config", signingProperties.config); + const initialSystemClockOffset = config.systemClockOffset; + config.systemClockOffset = getUpdatedSystemClockOffset(serverTime, config.systemClockOffset); + const clockSkewCorrected = config.systemClockOffset !== initialSystemClockOffset; + if (clockSkewCorrected && error.$metadata) { + error.$metadata.clockSkewCorrected = true; + } + } + throw error; + }; + } + successHandler(httpResponse, signingProperties) { + const dateHeader = getDateHeader(httpResponse); + if (dateHeader) { + const config = throwSigningPropertyError("config", signingProperties.config); + config.systemClockOffset = getUpdatedSystemClockOffset(dateHeader, config.systemClockOffset); + } + } +} +export const AWSSDKSigV4Signer = AwsSdkSigV4Signer; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.js b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.js new file mode 100644 index 00000000..17e3d2e5 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.js @@ -0,0 +1,16 @@ +import { getArrayForCommaSeparatedString } from "../utils/getArrayForCommaSeparatedString"; +const NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY = "AWS_AUTH_SCHEME_PREFERENCE"; +const NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY = "auth_scheme_preference"; +export const NODE_AUTH_SCHEME_PREFERENCE_OPTIONS = { + environmentVariableSelector: (env) => { + if (!(NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY in env)) + return undefined; + return getArrayForCommaSeparatedString(env[NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY]); + }, + configFileSelector: (profile) => { + if (!(NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY in profile)) + return undefined; + return getArrayForCommaSeparatedString(profile[NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY]); + }, + default: [], +}; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/index.js b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/index.js new file mode 100644 index 00000000..40712255 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/index.js @@ -0,0 +1,5 @@ +export { AwsSdkSigV4Signer, AWSSDKSigV4Signer, validateSigningProperties } from "./AwsSdkSigV4Signer"; +export { AwsSdkSigV4ASigner } from "./AwsSdkSigV4ASigner"; +export * from "./NODE_AUTH_SCHEME_PREFERENCE_OPTIONS"; +export * from "./resolveAwsSdkSigV4AConfig"; +export * from "./resolveAwsSdkSigV4Config"; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.js b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.js new file mode 100644 index 00000000..0e62ef05 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.js @@ -0,0 +1,25 @@ +import { normalizeProvider } from "@smithy/core"; +import { ProviderError } from "@smithy/property-provider"; +export const resolveAwsSdkSigV4AConfig = (config) => { + config.sigv4aSigningRegionSet = normalizeProvider(config.sigv4aSigningRegionSet); + return config; +}; +export const NODE_SIGV4A_CONFIG_OPTIONS = { + environmentVariableSelector(env) { + if (env.AWS_SIGV4A_SIGNING_REGION_SET) { + return env.AWS_SIGV4A_SIGNING_REGION_SET.split(",").map((_) => _.trim()); + } + throw new ProviderError("AWS_SIGV4A_SIGNING_REGION_SET not set in env.", { + tryNextLink: true, + }); + }, + configFileSelector(profile) { + if (profile.sigv4a_signing_region_set) { + return (profile.sigv4a_signing_region_set ?? "").split(",").map((_) => _.trim()); + } + throw new ProviderError("sigv4a_signing_region_set not set in profile.", { + tryNextLink: true, + }); + }, + default: undefined, +}; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.js b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.js new file mode 100644 index 00000000..6da968db --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.js @@ -0,0 +1,131 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { doesIdentityRequireRefresh, isIdentityExpired, memoizeIdentityProvider, normalizeProvider, } from "@smithy/core"; +import { SignatureV4 } from "@smithy/signature-v4"; +export const resolveAwsSdkSigV4Config = (config) => { + let inputCredentials = config.credentials; + let isUserSupplied = !!config.credentials; + let resolvedCredentials = undefined; + Object.defineProperty(config, "credentials", { + set(credentials) { + if (credentials && credentials !== inputCredentials && credentials !== resolvedCredentials) { + isUserSupplied = true; + } + inputCredentials = credentials; + const memoizedProvider = normalizeCredentialProvider(config, { + credentials: inputCredentials, + credentialDefaultProvider: config.credentialDefaultProvider, + }); + const boundProvider = bindCallerConfig(config, memoizedProvider); + if (isUserSupplied && !boundProvider.attributed) { + resolvedCredentials = async (options) => boundProvider(options).then((creds) => setCredentialFeature(creds, "CREDENTIALS_CODE", "e")); + resolvedCredentials.memoized = boundProvider.memoized; + resolvedCredentials.configBound = boundProvider.configBound; + resolvedCredentials.attributed = true; + } + else { + resolvedCredentials = boundProvider; + } + }, + get() { + return resolvedCredentials; + }, + enumerable: true, + configurable: true, + }); + config.credentials = inputCredentials; + const { signingEscapePath = true, systemClockOffset = config.systemClockOffset || 0, sha256, } = config; + let signer; + if (config.signer) { + signer = normalizeProvider(config.signer); + } + else if (config.regionInfoProvider) { + signer = () => normalizeProvider(config.region)() + .then(async (region) => [ + (await config.regionInfoProvider(region, { + useFipsEndpoint: await config.useFipsEndpoint(), + useDualstackEndpoint: await config.useDualstackEndpoint(), + })) || {}, + region, + ]) + .then(([regionInfo, region]) => { + const { signingRegion, signingService } = regionInfo; + config.signingRegion = config.signingRegion || signingRegion || region; + config.signingName = config.signingName || signingService || config.serviceId; + const params = { + ...config, + credentials: config.credentials, + region: config.signingRegion, + service: config.signingName, + sha256, + uriEscapePath: signingEscapePath, + }; + const SignerCtor = config.signerConstructor || SignatureV4; + return new SignerCtor(params); + }); + } + else { + signer = async (authScheme) => { + authScheme = Object.assign({}, { + name: "sigv4", + signingName: config.signingName || config.defaultSigningName, + signingRegion: await normalizeProvider(config.region)(), + properties: {}, + }, authScheme); + const signingRegion = authScheme.signingRegion; + const signingService = authScheme.signingName; + config.signingRegion = config.signingRegion || signingRegion; + config.signingName = config.signingName || signingService || config.serviceId; + const params = { + ...config, + credentials: config.credentials, + region: config.signingRegion, + service: config.signingName, + sha256, + uriEscapePath: signingEscapePath, + }; + const SignerCtor = config.signerConstructor || SignatureV4; + return new SignerCtor(params); + }; + } + const resolvedConfig = Object.assign(config, { + systemClockOffset, + signingEscapePath, + signer, + }); + return resolvedConfig; +}; +export const resolveAWSSDKSigV4Config = resolveAwsSdkSigV4Config; +function normalizeCredentialProvider(config, { credentials, credentialDefaultProvider, }) { + let credentialsProvider; + if (credentials) { + if (!credentials?.memoized) { + credentialsProvider = memoizeIdentityProvider(credentials, isIdentityExpired, doesIdentityRequireRefresh); + } + else { + credentialsProvider = credentials; + } + } + else { + if (credentialDefaultProvider) { + credentialsProvider = normalizeProvider(credentialDefaultProvider(Object.assign({}, config, { + parentClientConfig: config, + }))); + } + else { + credentialsProvider = async () => { + throw new Error("@aws-sdk/core::resolveAwsSdkSigV4Config - `credentials` not provided and no credentialDefaultProvider was configured."); + }; + } + } + credentialsProvider.memoized = true; + return credentialsProvider; +} +function bindCallerConfig(config, credentialsProvider) { + if (credentialsProvider.configBound) { + return credentialsProvider; + } + const fn = async (options) => credentialsProvider({ ...options, callerClientConfig: config }); + fn.memoized = credentialsProvider.memoized; + fn.configBound = true; + return fn; +} diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/index.js b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/index.js new file mode 100644 index 00000000..29d0c3bd --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/index.js @@ -0,0 +1 @@ +export * from "./aws_sdk"; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.js b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.js new file mode 100644 index 00000000..aa60799c --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.js @@ -0,0 +1 @@ +export const getArrayForCommaSeparatedString = (str) => typeof str === "string" && str.length > 0 ? str.split(",").map((item) => item.trim()) : []; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getDateHeader.js b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getDateHeader.js new file mode 100644 index 00000000..449c182b --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getDateHeader.js @@ -0,0 +1,2 @@ +import { HttpResponse } from "@smithy/protocol-http"; +export const getDateHeader = (response) => HttpResponse.isInstance(response) ? response.headers?.date ?? response.headers?.Date : undefined; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.js b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.js new file mode 100644 index 00000000..6ee80363 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.js @@ -0,0 +1 @@ +export const getSkewCorrectedDate = (systemClockOffset) => new Date(Date.now() + systemClockOffset); diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.js b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.js new file mode 100644 index 00000000..859c41a2 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.js @@ -0,0 +1,8 @@ +import { isClockSkewed } from "./isClockSkewed"; +export const getUpdatedSystemClockOffset = (clockTime, currentSystemClockOffset) => { + const clockTimeInMs = Date.parse(clockTime); + if (isClockSkewed(clockTimeInMs, currentSystemClockOffset)) { + return clockTimeInMs - Date.now(); + } + return currentSystemClockOffset; +}; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/index.js b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/index.js new file mode 100644 index 00000000..07c21953 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/index.js @@ -0,0 +1,3 @@ +export * from "./getDateHeader"; +export * from "./getSkewCorrectedDate"; +export * from "./getUpdatedSystemClockOffset"; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/isClockSkewed.js b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/isClockSkewed.js new file mode 100644 index 00000000..086d7a87 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/isClockSkewed.js @@ -0,0 +1,2 @@ +import { getSkewCorrectedDate } from "./getSkewCorrectedDate"; +export const isClockSkewed = (clockTime, systemClockOffset) => Math.abs(getSkewCorrectedDate(systemClockOffset).getTime() - clockTime) >= 300000; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/protocols/coercing-serializers.js b/node_modules/@aws-sdk/core/dist-es/submodules/protocols/coercing-serializers.js new file mode 100644 index 00000000..fce893b4 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/protocols/coercing-serializers.js @@ -0,0 +1,53 @@ +export const _toStr = (val) => { + if (val == null) { + return val; + } + if (typeof val === "number" || typeof val === "bigint") { + const warning = new Error(`Received number ${val} where a string was expected.`); + warning.name = "Warning"; + console.warn(warning); + return String(val); + } + if (typeof val === "boolean") { + const warning = new Error(`Received boolean ${val} where a string was expected.`); + warning.name = "Warning"; + console.warn(warning); + return String(val); + } + return val; +}; +export const _toBool = (val) => { + if (val == null) { + return val; + } + if (typeof val === "number") { + } + if (typeof val === "string") { + const lowercase = val.toLowerCase(); + if (val !== "" && lowercase !== "false" && lowercase !== "true") { + const warning = new Error(`Received string "${val}" where a boolean was expected.`); + warning.name = "Warning"; + console.warn(warning); + } + return val !== "" && lowercase !== "false"; + } + return val; +}; +export const _toNum = (val) => { + if (val == null) { + return val; + } + if (typeof val === "boolean") { + } + if (typeof val === "string") { + const num = Number(val); + if (num.toString() !== val) { + const warning = new Error(`Received string "${val}" where a number was expected.`); + warning.name = "Warning"; + console.warn(warning); + return val; + } + return num; + } + return val; +}; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/protocols/common.js b/node_modules/@aws-sdk/core/dist-es/submodules/protocols/common.js new file mode 100644 index 00000000..4348b08d --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/protocols/common.js @@ -0,0 +1,2 @@ +import { collectBody } from "@smithy/smithy-client"; +export const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/protocols/index.js b/node_modules/@aws-sdk/core/dist-es/submodules/protocols/index.js new file mode 100644 index 00000000..09a6ac21 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/protocols/index.js @@ -0,0 +1,4 @@ +export * from "./coercing-serializers"; +export * from "./json/awsExpectUnion"; +export * from "./json/parseJsonBody"; +export * from "./xml/parseXmlBody"; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/awsExpectUnion.js b/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/awsExpectUnion.js new file mode 100644 index 00000000..1c6cc322 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/awsExpectUnion.js @@ -0,0 +1,10 @@ +import { expectUnion } from "@smithy/smithy-client"; +export const awsExpectUnion = (value) => { + if (value == null) { + return undefined; + } + if (typeof value === "object" && "__type" in value) { + delete value.__type; + } + return expectUnion(value); +}; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/parseJsonBody.js b/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/parseJsonBody.js new file mode 100644 index 00000000..d9c15641 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/parseJsonBody.js @@ -0,0 +1,51 @@ +import { collectBodyString } from "../common"; +export const parseJsonBody = (streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + try { + return JSON.parse(encoded); + } + catch (e) { + if (e?.name === "SyntaxError") { + Object.defineProperty(e, "$responseBodyText", { + value: encoded, + }); + } + throw e; + } + } + return {}; +}); +export const parseJsonErrorBody = async (errorBody, context) => { + const value = await parseJsonBody(errorBody, context); + value.message = value.message ?? value.Message; + return value; +}; +export const loadRestJsonErrorCode = (output, data) => { + const findKey = (object, key) => Object.keys(object).find((k) => k.toLowerCase() === key.toLowerCase()); + const sanitizeErrorCode = (rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + const headerKey = findKey(output.headers, "x-amzn-errortype"); + if (headerKey !== undefined) { + return sanitizeErrorCode(output.headers[headerKey]); + } + if (data.code !== undefined) { + return sanitizeErrorCode(data.code); + } + if (data["__type"] !== undefined) { + return sanitizeErrorCode(data["__type"]); + } +}; diff --git a/node_modules/@aws-sdk/core/dist-es/submodules/protocols/xml/parseXmlBody.js b/node_modules/@aws-sdk/core/dist-es/submodules/protocols/xml/parseXmlBody.js new file mode 100644 index 00000000..556a9670 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-es/submodules/protocols/xml/parseXmlBody.js @@ -0,0 +1,57 @@ +import { getValueFromTextNode } from "@smithy/smithy-client"; +import { XMLParser } from "fast-xml-parser"; +import { collectBodyString } from "../common"; +export const parseXmlBody = (streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + const parser = new XMLParser({ + attributeNamePrefix: "", + htmlEntities: true, + ignoreAttributes: false, + ignoreDeclaration: true, + parseTagValue: false, + trimValues: false, + tagValueProcessor: (_, val) => (val.trim() === "" && val.includes("\n") ? "" : undefined), + }); + parser.addEntity("#xD", "\r"); + parser.addEntity("#10", "\n"); + let parsedObj; + try { + parsedObj = parser.parse(encoded, true); + } + catch (e) { + if (e && typeof e === "object") { + Object.defineProperty(e, "$responseBodyText", { + value: encoded, + }); + } + throw e; + } + const textNodeName = "#text"; + const key = Object.keys(parsedObj)[0]; + const parsedObjToReturn = parsedObj[key]; + if (parsedObjToReturn[textNodeName]) { + parsedObjToReturn[key] = parsedObjToReturn[textNodeName]; + delete parsedObjToReturn[textNodeName]; + } + return getValueFromTextNode(parsedObjToReturn); + } + return {}; +}); +export const parseXmlErrorBody = async (errorBody, context) => { + const value = await parseXmlBody(errorBody, context); + if (value.Error) { + value.Error.message = value.Error.message ?? value.Error.Message; + } + return value; +}; +export const loadRestXmlErrorCode = (output, data) => { + if (data?.Error?.Code !== undefined) { + return data.Error.Code; + } + if (data?.Code !== undefined) { + return data.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}; diff --git a/node_modules/@aws-sdk/core/dist-types/api-extractor-type-index.d.ts b/node_modules/@aws-sdk/core/dist-types/api-extractor-type-index.d.ts new file mode 100644 index 00000000..e83f927b --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/api-extractor-type-index.d.ts @@ -0,0 +1,5 @@ +export * from "./index"; +export * from "./submodules/account-id-endpoint/index"; +export * from "./submodules/client/index"; +export * from "./submodules/httpAuthSchemes/index"; +export * from "./submodules/protocols/index"; diff --git a/node_modules/@aws-sdk/core/dist-types/index.d.ts b/node_modules/@aws-sdk/core/dist-types/index.d.ts new file mode 100644 index 00000000..5d51cdbb --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/index.d.ts @@ -0,0 +1,22 @@ +/** + * Submodules annotated with "Legacy" are from prior to the submodule system. + * They are exported from the package's root index to preserve backwards compatibility. + * + * New development should go in a proper submodule and not be exported from the root index. + */ +/** + * Legacy submodule. + */ +export * from "./submodules/client/index"; +/** + * Legacy submodule. + */ +export * from "./submodules/httpAuthSchemes/index"; +/** + * Legacy submodule. + */ +export * from "./submodules/protocols/index"; +/** + * Warning: do not export any additional submodules from the root of this package. See readme.md for + * guide on developing submodules. + */ diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts new file mode 100644 index 00000000..bf612a22 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts @@ -0,0 +1,27 @@ +import { Provider } from "@smithy/types"; +import { AccountIdEndpointMode } from "./AccountIdEndpointModeConstants"; +/** + * @public + */ +export interface AccountIdEndpointModeInputConfig { + /** + * The account ID endpoint mode to use. + */ + accountIdEndpointMode?: AccountIdEndpointMode | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { +} +/** + * @internal + */ +export interface AccountIdEndpointModeResolvedConfig { + accountIdEndpointMode: Provider; +} +/** + * @internal + */ +export declare const resolveAccountIdEndpointModeConfig: (input: T & AccountIdEndpointModeInputConfig & PreviouslyResolved) => T & AccountIdEndpointModeResolvedConfig; +export {}; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts new file mode 100644 index 00000000..640a7472 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts @@ -0,0 +1,16 @@ +/** + * @public + */ +export type AccountIdEndpointMode = "disabled" | "preferred" | "required"; +/** + * @internal + */ +export declare const DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = "preferred"; +/** + * @internal + */ +export declare const ACCOUNT_ID_ENDPOINT_MODE_VALUES: AccountIdEndpointMode[]; +/** + * @internal + */ +export declare function validateAccountIdEndpointMode(value: any): value is AccountIdEndpointMode; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts new file mode 100644 index 00000000..96b80591 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts @@ -0,0 +1,14 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { AccountIdEndpointMode } from "./AccountIdEndpointModeConstants"; +/** + * @internal + */ +export declare const ENV_ACCOUNT_ID_ENDPOINT_MODE = "AWS_ACCOUNT_ID_ENDPOINT_MODE"; +/** + * @internal + */ +export declare const CONFIG_ACCOUNT_ID_ENDPOINT_MODE = "account_id_endpoint_mode"; +/** + * @internal + */ +export declare const NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/index.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/index.d.ts new file mode 100644 index 00000000..52af11df --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/index.d.ts @@ -0,0 +1,3 @@ +export * from "./AccountIdEndpointModeConfigResolver"; +export * from "./AccountIdEndpointModeConstants"; +export * from "./NodeAccountIdEndpointModeConfigOptions"; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/client/emitWarningIfUnsupportedVersion.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/client/emitWarningIfUnsupportedVersion.d.ts new file mode 100644 index 00000000..d97bc8c8 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/client/emitWarningIfUnsupportedVersion.d.ts @@ -0,0 +1,12 @@ +export declare const state: { + warningEmitted: boolean; +}; +/** + * @internal + * + * Emits warning if the provided Node.js version string is + * pending deprecation by AWS SDK JSv3. + * + * @param version - The Node.js version string. + */ +export declare const emitWarningIfUnsupportedVersion: (version: string) => void; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/client/index.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/client/index.d.ts new file mode 100644 index 00000000..1a2cc9d1 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/client/index.d.ts @@ -0,0 +1,3 @@ +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./setCredentialFeature"; +export * from "./setFeature"; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/client/setCredentialFeature.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/client/setCredentialFeature.d.ts new file mode 100644 index 00000000..b3b4a688 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/client/setCredentialFeature.d.ts @@ -0,0 +1,7 @@ +import type { AttributedAwsCredentialIdentity, AwsSdkCredentialsFeatures } from "@aws-sdk/types"; +/** + * @internal + * + * @returns the credentials with source feature attribution. + */ +export declare function setCredentialFeature(credentials: AttributedAwsCredentialIdentity, feature: F, value: AwsSdkCredentialsFeatures[F]): AttributedAwsCredentialIdentity; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/client/setFeature.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/client/setFeature.d.ts new file mode 100644 index 00000000..93458bf3 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/client/setFeature.d.ts @@ -0,0 +1,12 @@ +import type { AwsHandlerExecutionContext, AwsSdkFeatures } from "@aws-sdk/types"; +/** + * @internal + * Indicates to the request context that a given feature is active. + * + * @param context - handler execution context. + * @param feature - readable name of feature. + * @param value - encoding value of feature. This is required because the + * specification asks the SDK not to include a runtime lookup of all + * the feature identifiers. + */ +export declare function setFeature(context: AwsHandlerExecutionContext, feature: F, value: AwsSdkFeatures[F]): void; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts new file mode 100644 index 00000000..051b17ce --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts @@ -0,0 +1,10 @@ +import { AwsCredentialIdentity, HttpRequest as IHttpRequest } from "@smithy/types"; +import { AwsSdkSigV4Signer } from "./AwsSdkSigV4Signer"; +/** + * @internal + * Note: this is not a signing algorithm implementation. The sign method + * accepts the real signer as an input parameter. + */ +export declare class AwsSdkSigV4ASigner extends AwsSdkSigV4Signer { + sign(httpRequest: IHttpRequest, identity: AwsCredentialIdentity, signingProperties: Record): Promise; +} diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts new file mode 100644 index 00000000..7c1b5504 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts @@ -0,0 +1,43 @@ +import { AuthScheme, AwsCredentialIdentity, HttpRequest as IHttpRequest, HttpResponse, HttpSigner, RequestSigner } from "@smithy/types"; +import { AwsSdkSigV4AAuthResolvedConfig } from "./resolveAwsSdkSigV4AConfig"; +/** + * @internal + */ +interface AwsSdkSigV4Config extends AwsSdkSigV4AAuthResolvedConfig { + systemClockOffset: number; + signer: (authScheme?: AuthScheme) => Promise; +} +/** + * @internal + */ +interface AwsSdkSigV4AuthSigningProperties { + config: AwsSdkSigV4Config; + signer: RequestSigner; + signingRegion?: string; + signingRegionSet?: string[]; + signingName?: string; +} +/** + * @internal + */ +export declare const validateSigningProperties: (signingProperties: Record) => Promise; +/** + * Note: this is not a signing algorithm implementation. The sign method + * accepts the real signer as an input parameter. + * @internal + */ +export declare class AwsSdkSigV4Signer implements HttpSigner { + sign(httpRequest: IHttpRequest, + /** + * `identity` is bound in {@link resolveAWSSDKSigV4Config} + */ + identity: AwsCredentialIdentity, signingProperties: Record): Promise; + errorHandler(signingProperties: Record): (error: Error) => never; + successHandler(httpResponse: HttpResponse | unknown, signingProperties: Record): void; +} +/** + * @internal + * @deprecated renamed to {@link AwsSdkSigV4Signer} + */ +export declare const AWSSDKSigV4Signer: typeof AwsSdkSigV4Signer; +export {}; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts new file mode 100644 index 00000000..edf3162b --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts @@ -0,0 +1,5 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @public + */ +export declare const NODE_AUTH_SCHEME_PREFERENCE_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/index.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/index.d.ts new file mode 100644 index 00000000..40712255 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/index.d.ts @@ -0,0 +1,5 @@ +export { AwsSdkSigV4Signer, AWSSDKSigV4Signer, validateSigningProperties } from "./AwsSdkSigV4Signer"; +export { AwsSdkSigV4ASigner } from "./AwsSdkSigV4ASigner"; +export * from "./NODE_AUTH_SCHEME_PREFERENCE_OPTIONS"; +export * from "./resolveAwsSdkSigV4AConfig"; +export * from "./resolveAwsSdkSigV4Config"; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts new file mode 100644 index 00000000..f7416257 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts @@ -0,0 +1,38 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { Provider } from "@smithy/types"; +/** + * @public + */ +export interface AwsSdkSigV4AAuthInputConfig { + /** + * This option will override the AWS sigv4a + * signing regionSet from any other source. + * + * The lookup order is: + * 1. this value + * 2. configuration file value of sigv4a_signing_region_set. + * 3. environment value of AWS_SIGV4A_SIGNING_REGION_SET. + * 4. signingRegionSet given by endpoint resolution. + * 5. the singular region of the SDK client. + */ + sigv4aSigningRegionSet?: string[] | undefined | Provider; +} +/** + * @internal + */ +export interface AwsSdkSigV4APreviouslyResolved { +} +/** + * @internal + */ +export interface AwsSdkSigV4AAuthResolvedConfig { + sigv4aSigningRegionSet: Provider; +} +/** + * @internal + */ +export declare const resolveAwsSdkSigV4AConfig: (config: T & AwsSdkSigV4AAuthInputConfig & AwsSdkSigV4APreviouslyResolved) => T & AwsSdkSigV4AAuthResolvedConfig; +/** + * @internal + */ +export declare const NODE_SIGV4A_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts new file mode 100644 index 00000000..cf42eecb --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts @@ -0,0 +1,117 @@ +import type { MergeFunctions } from "@aws-sdk/types"; +import { SignatureV4CryptoInit, SignatureV4Init } from "@smithy/signature-v4"; +import { AuthScheme, AwsCredentialIdentity, AwsCredentialIdentityProvider, ChecksumConstructor, HashConstructor, MemoizedProvider, Provider, RegionInfoProvider, RequestSigner } from "@smithy/types"; +/** + * @public + */ +export interface AwsSdkSigV4AuthInputConfig { + /** + * The credentials used to sign requests. + */ + credentials?: AwsCredentialIdentity | AwsCredentialIdentityProvider; + /** + * The signer to use when signing requests. + */ + signer?: RequestSigner | ((authScheme?: AuthScheme) => Promise); + /** + * Whether to escape request path when signing the request. + */ + signingEscapePath?: boolean; + /** + * An offset value in milliseconds to apply to all signing times. + */ + systemClockOffset?: number; + /** + * The region where you want to sign your request against. This + * can be different to the region in the endpoint. + */ + signingRegion?: string; + /** + * The injectable SigV4-compatible signer class constructor. If not supplied, + * regular SignatureV4 constructor will be used. + * + * @internal + */ + signerConstructor?: new (options: SignatureV4Init & SignatureV4CryptoInit) => RequestSigner; +} +/** + * Used to indicate whether a credential provider function was memoized by this resolver. + * @public + */ +export type AwsSdkSigV4Memoized = { + /** + * The credential provider has been memoized by the AWS SDK SigV4 config resolver. + */ + memoized?: boolean; + /** + * The credential provider has the caller client config object bound to its arguments. + */ + configBound?: boolean; + /** + * Function is wrapped with attribution transform. + */ + attributed?: boolean; +}; +/** + * @internal + */ +export interface AwsSdkSigV4PreviouslyResolved { + credentialDefaultProvider?: (input: any) => MemoizedProvider; + region: string | Provider; + sha256: ChecksumConstructor | HashConstructor; + signingName?: string; + regionInfoProvider?: RegionInfoProvider; + defaultSigningName?: string; + serviceId: string; + useFipsEndpoint: Provider; + useDualstackEndpoint: Provider; +} +/** + * @internal + */ +export interface AwsSdkSigV4AuthResolvedConfig { + /** + * Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.credentials} + * This provider MAY memoize the loaded credentials for certain period. + */ + credentials: MergeFunctions> & AwsSdkSigV4Memoized; + /** + * Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.signer} + */ + signer: (authScheme?: AuthScheme) => Promise; + /** + * Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.signingEscapePath} + */ + signingEscapePath: boolean; + /** + * Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.systemClockOffset} + */ + systemClockOffset: number; +} +/** + * @internal + */ +export declare const resolveAwsSdkSigV4Config: (config: T & AwsSdkSigV4AuthInputConfig & AwsSdkSigV4PreviouslyResolved) => T & AwsSdkSigV4AuthResolvedConfig; +/** + * @internal + * @deprecated renamed to {@link AwsSdkSigV4AuthInputConfig} + */ +export interface AWSSDKSigV4AuthInputConfig extends AwsSdkSigV4AuthInputConfig { +} +/** + * @internal + * @deprecated renamed to {@link AwsSdkSigV4PreviouslyResolved} + */ +export interface AWSSDKSigV4PreviouslyResolved extends AwsSdkSigV4PreviouslyResolved { +} +/** + * @internal + * @deprecated renamed to {@link AwsSdkSigV4AuthResolvedConfig} + */ +export interface AWSSDKSigV4AuthResolvedConfig extends AwsSdkSigV4AuthResolvedConfig { +} +/** + * @internal + * @deprecated renamed to {@link resolveAwsSdkSigV4Config} + */ +export declare const resolveAWSSDKSigV4Config: (config: T & AwsSdkSigV4AuthInputConfig & AwsSdkSigV4PreviouslyResolved) => T & AwsSdkSigV4AuthResolvedConfig; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/index.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/index.d.ts new file mode 100644 index 00000000..29d0c3bd --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/index.d.ts @@ -0,0 +1 @@ +export * from "./aws_sdk"; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts new file mode 100644 index 00000000..823921b1 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a comma-separated string into an array of trimmed strings + * @param str The comma-separated input string to split + * @returns Array of trimmed strings split from the input + * + * @internal + */ +export declare const getArrayForCommaSeparatedString: (str: string) => string[]; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getDateHeader.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getDateHeader.d.ts new file mode 100644 index 00000000..2c9157bb --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getDateHeader.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getDateHeader: (response: unknown) => string | undefined; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts new file mode 100644 index 00000000..4b726900 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * + * Returns a date that is corrected for clock skew. + * + * @param systemClockOffset The offset of the system clock in milliseconds. + */ +export declare const getSkewCorrectedDate: (systemClockOffset: number) => Date; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts new file mode 100644 index 00000000..2d554b8e --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts @@ -0,0 +1,10 @@ +/** + * @internal + * + * If clock is skewed, it returns the difference between serverTime and current time. + * If clock is not skewed, it returns currentSystemClockOffset. + * + * @param clockTime The string value of the server time. + * @param currentSystemClockOffset The current system clock offset. + */ +export declare const getUpdatedSystemClockOffset: (clockTime: string, currentSystemClockOffset: number) => number; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/index.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/index.d.ts new file mode 100644 index 00000000..07c21953 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/index.d.ts @@ -0,0 +1,3 @@ +export * from "./getDateHeader"; +export * from "./getSkewCorrectedDate"; +export * from "./getUpdatedSystemClockOffset"; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts new file mode 100644 index 00000000..970fa15b --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts @@ -0,0 +1,9 @@ +/** + * @internal + * + * Checks if the provided date is within the skew window of 300000ms. + * + * @param clockTime - The time to check for skew in milliseconds. + * @param systemClockOffset - The offset of the system clock in milliseconds. + */ +export declare const isClockSkewed: (clockTime: number, systemClockOffset: number) => boolean; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/protocols/coercing-serializers.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/protocols/coercing-serializers.d.ts new file mode 100644 index 00000000..10d9d396 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/protocols/coercing-serializers.d.ts @@ -0,0 +1,18 @@ +/** + * @internal + * + * Used for awsQueryCompatibility trait. + */ +export declare const _toStr: (val: unknown) => string | undefined; +/** + * @internal + * + * Used for awsQueryCompatibility trait. + */ +export declare const _toBool: (val: unknown) => boolean | undefined; +/** + * @internal + * + * Used for awsQueryCompatibility trait. + */ +export declare const _toNum: (val: unknown) => number | undefined; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/protocols/common.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/protocols/common.d.ts new file mode 100644 index 00000000..ec78fb22 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/protocols/common.d.ts @@ -0,0 +1,2 @@ +import type { SerdeContext } from "@smithy/types"; +export declare const collectBodyString: (streamBody: any, context: SerdeContext) => Promise; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/protocols/index.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/protocols/index.d.ts new file mode 100644 index 00000000..09a6ac21 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/protocols/index.d.ts @@ -0,0 +1,4 @@ +export * from "./coercing-serializers"; +export * from "./json/awsExpectUnion"; +export * from "./json/parseJsonBody"; +export * from "./xml/parseXmlBody"; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/awsExpectUnion.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/awsExpectUnion.d.ts new file mode 100644 index 00000000..98607ea4 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/awsExpectUnion.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Forwards to Smithy's expectUnion function, but also ignores + * the `__type` field if it is present. + */ +export declare const awsExpectUnion: (value: unknown) => Record | undefined; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/parseJsonBody.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/parseJsonBody.d.ts new file mode 100644 index 00000000..827ffe9c --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/parseJsonBody.d.ts @@ -0,0 +1,13 @@ +import type { HttpResponse, SerdeContext } from "@smithy/types"; +/** + * @internal + */ +export declare const parseJsonBody: (streamBody: any, context: SerdeContext) => any; +/** + * @internal + */ +export declare const parseJsonErrorBody: (errorBody: any, context: SerdeContext) => Promise; +/** + * @internal + */ +export declare const loadRestJsonErrorCode: (output: HttpResponse, data: any) => string | undefined; diff --git a/node_modules/@aws-sdk/core/dist-types/submodules/protocols/xml/parseXmlBody.d.ts b/node_modules/@aws-sdk/core/dist-types/submodules/protocols/xml/parseXmlBody.d.ts new file mode 100644 index 00000000..30cfc30d --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/submodules/protocols/xml/parseXmlBody.d.ts @@ -0,0 +1,13 @@ +import type { HttpResponse, SerdeContext } from "@smithy/types"; +/** + * @internal + */ +export declare const parseXmlBody: (streamBody: any, context: SerdeContext) => any; +/** + * @internal + */ +export declare const parseXmlErrorBody: (errorBody: any, context: SerdeContext) => Promise; +/** + * @internal + */ +export declare const loadRestXmlErrorCode: (output: HttpResponse, data: any) => string | undefined; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/api-extractor-type-index.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/api-extractor-type-index.d.ts new file mode 100644 index 00000000..e83f927b --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/api-extractor-type-index.d.ts @@ -0,0 +1,5 @@ +export * from "./index"; +export * from "./submodules/account-id-endpoint/index"; +export * from "./submodules/client/index"; +export * from "./submodules/httpAuthSchemes/index"; +export * from "./submodules/protocols/index"; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..239de7a6 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./submodules/client/index"; +export * from "./submodules/httpAuthSchemes/index"; +export * from "./submodules/protocols/index"; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts new file mode 100644 index 00000000..10d5c219 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts @@ -0,0 +1,15 @@ +import { Provider } from "@smithy/types"; +import { AccountIdEndpointMode } from "./AccountIdEndpointModeConstants"; +export interface AccountIdEndpointModeInputConfig { + accountIdEndpointMode?: + | AccountIdEndpointMode + | Provider; +} +interface PreviouslyResolved {} +export interface AccountIdEndpointModeResolvedConfig { + accountIdEndpointMode: Provider; +} +export declare const resolveAccountIdEndpointModeConfig: ( + input: T & AccountIdEndpointModeInputConfig & PreviouslyResolved +) => T & AccountIdEndpointModeResolvedConfig; +export {}; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts new file mode 100644 index 00000000..27bdce90 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts @@ -0,0 +1,6 @@ +export type AccountIdEndpointMode = "disabled" | "preferred" | "required"; +export declare const DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = "preferred"; +export declare const ACCOUNT_ID_ENDPOINT_MODE_VALUES: AccountIdEndpointMode[]; +export declare function validateAccountIdEndpointMode( + value: any +): value is AccountIdEndpointMode; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts new file mode 100644 index 00000000..9b045668 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts @@ -0,0 +1,7 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { AccountIdEndpointMode } from "./AccountIdEndpointModeConstants"; +export declare const ENV_ACCOUNT_ID_ENDPOINT_MODE = + "AWS_ACCOUNT_ID_ENDPOINT_MODE"; +export declare const CONFIG_ACCOUNT_ID_ENDPOINT_MODE = + "account_id_endpoint_mode"; +export declare const NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/index.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/index.d.ts new file mode 100644 index 00000000..52af11df --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/index.d.ts @@ -0,0 +1,3 @@ +export * from "./AccountIdEndpointModeConfigResolver"; +export * from "./AccountIdEndpointModeConstants"; +export * from "./NodeAccountIdEndpointModeConfigOptions"; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/emitWarningIfUnsupportedVersion.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/emitWarningIfUnsupportedVersion.d.ts new file mode 100644 index 00000000..84af5674 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/emitWarningIfUnsupportedVersion.d.ts @@ -0,0 +1,4 @@ +export declare const state: { + warningEmitted: boolean; +}; +export declare const emitWarningIfUnsupportedVersion: (version: string) => void; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/index.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/index.d.ts new file mode 100644 index 00000000..1a2cc9d1 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/index.d.ts @@ -0,0 +1,3 @@ +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./setCredentialFeature"; +export * from "./setFeature"; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setCredentialFeature.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setCredentialFeature.d.ts new file mode 100644 index 00000000..13366194 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setCredentialFeature.d.ts @@ -0,0 +1,11 @@ +import { + AttributedAwsCredentialIdentity, + AwsSdkCredentialsFeatures, +} from "@aws-sdk/types"; +export declare function setCredentialFeature< + F extends keyof AwsSdkCredentialsFeatures +>( + credentials: AttributedAwsCredentialIdentity, + feature: F, + value: AwsSdkCredentialsFeatures[F] +): AttributedAwsCredentialIdentity; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setFeature.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setFeature.d.ts new file mode 100644 index 00000000..84482ee6 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setFeature.d.ts @@ -0,0 +1,6 @@ +import { AwsHandlerExecutionContext, AwsSdkFeatures } from "@aws-sdk/types"; +export declare function setFeature( + context: AwsHandlerExecutionContext, + feature: F, + value: AwsSdkFeatures[F] +): void; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts new file mode 100644 index 00000000..b8c2b742 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts @@ -0,0 +1,12 @@ +import { + AwsCredentialIdentity, + HttpRequest as IHttpRequest, +} from "@smithy/types"; +import { AwsSdkSigV4Signer } from "./AwsSdkSigV4Signer"; +export declare class AwsSdkSigV4ASigner extends AwsSdkSigV4Signer { + sign( + httpRequest: IHttpRequest, + identity: AwsCredentialIdentity, + signingProperties: Record + ): Promise; +} diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts new file mode 100644 index 00000000..0be6b41e --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts @@ -0,0 +1,39 @@ +import { + AuthScheme, + AwsCredentialIdentity, + HttpRequest as IHttpRequest, + HttpResponse, + HttpSigner, + RequestSigner, +} from "@smithy/types"; +import { AwsSdkSigV4AAuthResolvedConfig } from "./resolveAwsSdkSigV4AConfig"; +interface AwsSdkSigV4Config extends AwsSdkSigV4AAuthResolvedConfig { + systemClockOffset: number; + signer: (authScheme?: AuthScheme) => Promise; +} +interface AwsSdkSigV4AuthSigningProperties { + config: AwsSdkSigV4Config; + signer: RequestSigner; + signingRegion?: string; + signingRegionSet?: string[]; + signingName?: string; +} +export declare const validateSigningProperties: ( + signingProperties: Record +) => Promise; +export declare class AwsSdkSigV4Signer implements HttpSigner { + sign( + httpRequest: IHttpRequest, + identity: AwsCredentialIdentity, + signingProperties: Record + ): Promise; + errorHandler( + signingProperties: Record + ): (error: Error) => never; + successHandler( + httpResponse: HttpResponse | unknown, + signingProperties: Record + ): void; +} +export declare const AWSSDKSigV4Signer: typeof AwsSdkSigV4Signer; +export {}; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts new file mode 100644 index 00000000..effc1e06 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts @@ -0,0 +1,4 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const NODE_AUTH_SCHEME_PREFERENCE_OPTIONS: LoadedConfigSelectors< + string[] +>; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/index.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/index.d.ts new file mode 100644 index 00000000..6047921c --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/index.d.ts @@ -0,0 +1,9 @@ +export { + AwsSdkSigV4Signer, + AWSSDKSigV4Signer, + validateSigningProperties, +} from "./AwsSdkSigV4Signer"; +export { AwsSdkSigV4ASigner } from "./AwsSdkSigV4ASigner"; +export * from "./NODE_AUTH_SCHEME_PREFERENCE_OPTIONS"; +export * from "./resolveAwsSdkSigV4AConfig"; +export * from "./resolveAwsSdkSigV4Config"; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts new file mode 100644 index 00000000..9f949b08 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts @@ -0,0 +1,18 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { Provider } from "@smithy/types"; +export interface AwsSdkSigV4AAuthInputConfig { + sigv4aSigningRegionSet?: + | string[] + | undefined + | Provider; +} +export interface AwsSdkSigV4APreviouslyResolved {} +export interface AwsSdkSigV4AAuthResolvedConfig { + sigv4aSigningRegionSet: Provider; +} +export declare const resolveAwsSdkSigV4AConfig: ( + config: T & AwsSdkSigV4AAuthInputConfig & AwsSdkSigV4APreviouslyResolved +) => T & AwsSdkSigV4AAuthResolvedConfig; +export declare const NODE_SIGV4A_CONFIG_OPTIONS: LoadedConfigSelectors< + string[] | undefined +>; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts new file mode 100644 index 00000000..fc562d99 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts @@ -0,0 +1,65 @@ +import { MergeFunctions } from "@aws-sdk/types"; +import { SignatureV4CryptoInit, SignatureV4Init } from "@smithy/signature-v4"; +import { + AuthScheme, + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + ChecksumConstructor, + HashConstructor, + MemoizedProvider, + Provider, + RegionInfoProvider, + RequestSigner, +} from "@smithy/types"; +export interface AwsSdkSigV4AuthInputConfig { + credentials?: AwsCredentialIdentity | AwsCredentialIdentityProvider; + signer?: + | RequestSigner + | ((authScheme?: AuthScheme) => Promise); + signingEscapePath?: boolean; + systemClockOffset?: number; + signingRegion?: string; + signerConstructor?: new ( + options: SignatureV4Init & SignatureV4CryptoInit + ) => RequestSigner; +} +export type AwsSdkSigV4Memoized = { + memoized?: boolean; + configBound?: boolean; + attributed?: boolean; +}; +export interface AwsSdkSigV4PreviouslyResolved { + credentialDefaultProvider?: ( + input: any + ) => MemoizedProvider; + region: string | Provider; + sha256: ChecksumConstructor | HashConstructor; + signingName?: string; + regionInfoProvider?: RegionInfoProvider; + defaultSigningName?: string; + serviceId: string; + useFipsEndpoint: Provider; + useDualstackEndpoint: Provider; +} +export interface AwsSdkSigV4AuthResolvedConfig { + credentials: MergeFunctions< + AwsCredentialIdentityProvider, + MemoizedProvider + > & + AwsSdkSigV4Memoized; + signer: (authScheme?: AuthScheme) => Promise; + signingEscapePath: boolean; + systemClockOffset: number; +} +export declare const resolveAwsSdkSigV4Config: ( + config: T & AwsSdkSigV4AuthInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & AwsSdkSigV4AuthResolvedConfig; +export interface AWSSDKSigV4AuthInputConfig + extends AwsSdkSigV4AuthInputConfig {} +export interface AWSSDKSigV4PreviouslyResolved + extends AwsSdkSigV4PreviouslyResolved {} +export interface AWSSDKSigV4AuthResolvedConfig + extends AwsSdkSigV4AuthResolvedConfig {} +export declare const resolveAWSSDKSigV4Config: ( + config: T & AwsSdkSigV4AuthInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & AwsSdkSigV4AuthResolvedConfig; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/index.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/index.d.ts new file mode 100644 index 00000000..29d0c3bd --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/index.d.ts @@ -0,0 +1 @@ +export * from "./aws_sdk"; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts new file mode 100644 index 00000000..aee23280 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts @@ -0,0 +1 @@ +export declare const getArrayForCommaSeparatedString: (str: string) => string[]; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getDateHeader.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getDateHeader.d.ts new file mode 100644 index 00000000..73fc5295 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getDateHeader.d.ts @@ -0,0 +1 @@ +export declare const getDateHeader: (response: unknown) => string | undefined; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts new file mode 100644 index 00000000..741c5ea3 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts @@ -0,0 +1 @@ +export declare const getSkewCorrectedDate: (systemClockOffset: number) => Date; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts new file mode 100644 index 00000000..eae33117 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts @@ -0,0 +1,4 @@ +export declare const getUpdatedSystemClockOffset: ( + clockTime: string, + currentSystemClockOffset: number +) => number; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/index.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/index.d.ts new file mode 100644 index 00000000..07c21953 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/index.d.ts @@ -0,0 +1,3 @@ +export * from "./getDateHeader"; +export * from "./getSkewCorrectedDate"; +export * from "./getUpdatedSystemClockOffset"; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts new file mode 100644 index 00000000..9f994f87 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts @@ -0,0 +1,4 @@ +export declare const isClockSkewed: ( + clockTime: number, + systemClockOffset: number +) => boolean; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/coercing-serializers.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/coercing-serializers.d.ts new file mode 100644 index 00000000..7657ceb9 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/coercing-serializers.d.ts @@ -0,0 +1,3 @@ +export declare const _toStr: (val: unknown) => string | undefined; +export declare const _toBool: (val: unknown) => boolean | undefined; +export declare const _toNum: (val: unknown) => number | undefined; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/common.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/common.d.ts new file mode 100644 index 00000000..73486db0 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/common.d.ts @@ -0,0 +1,5 @@ +import { SerdeContext } from "@smithy/types"; +export declare const collectBodyString: ( + streamBody: any, + context: SerdeContext +) => Promise; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/index.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/index.d.ts new file mode 100644 index 00000000..09a6ac21 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/index.d.ts @@ -0,0 +1,4 @@ +export * from "./coercing-serializers"; +export * from "./json/awsExpectUnion"; +export * from "./json/parseJsonBody"; +export * from "./xml/parseXmlBody"; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/awsExpectUnion.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/awsExpectUnion.d.ts new file mode 100644 index 00000000..fdc331e0 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/awsExpectUnion.d.ts @@ -0,0 +1,3 @@ +export declare const awsExpectUnion: ( + value: unknown +) => Record | undefined; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/parseJsonBody.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/parseJsonBody.d.ts new file mode 100644 index 00000000..b4004197 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/parseJsonBody.d.ts @@ -0,0 +1,13 @@ +import { HttpResponse, SerdeContext } from "@smithy/types"; +export declare const parseJsonBody: ( + streamBody: any, + context: SerdeContext +) => any; +export declare const parseJsonErrorBody: ( + errorBody: any, + context: SerdeContext +) => Promise; +export declare const loadRestJsonErrorCode: ( + output: HttpResponse, + data: any +) => string | undefined; diff --git a/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/xml/parseXmlBody.d.ts b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/xml/parseXmlBody.d.ts new file mode 100644 index 00000000..f1518341 --- /dev/null +++ b/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/xml/parseXmlBody.d.ts @@ -0,0 +1,13 @@ +import { HttpResponse, SerdeContext } from "@smithy/types"; +export declare const parseXmlBody: ( + streamBody: any, + context: SerdeContext +) => any; +export declare const parseXmlErrorBody: ( + errorBody: any, + context: SerdeContext +) => Promise; +export declare const loadRestXmlErrorCode: ( + output: HttpResponse, + data: any +) => string | undefined; diff --git a/node_modules/@aws-sdk/core/httpAuthSchemes.d.ts b/node_modules/@aws-sdk/core/httpAuthSchemes.d.ts new file mode 100644 index 00000000..3783b5e7 --- /dev/null +++ b/node_modules/@aws-sdk/core/httpAuthSchemes.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/core/httpAuthSchemes" { + export * from "@aws-sdk/core/dist-types/submodules/httpAuthSchemes/index.d"; +} diff --git a/node_modules/@aws-sdk/core/httpAuthSchemes.js b/node_modules/@aws-sdk/core/httpAuthSchemes.js new file mode 100644 index 00000000..17685b07 --- /dev/null +++ b/node_modules/@aws-sdk/core/httpAuthSchemes.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/httpAuthSchemes/index.js"); diff --git a/node_modules/@aws-sdk/core/node_modules/.bin/fxparser b/node_modules/@aws-sdk/core/node_modules/.bin/fxparser new file mode 120000 index 00000000..75327ed9 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/.bin/fxparser @@ -0,0 +1 @@ +../fast-xml-parser/src/cli/cli.js \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/CHANGELOG.md b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/CHANGELOG.md new file mode 100644 index 00000000..021eab6a --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/CHANGELOG.md @@ -0,0 +1,594 @@ +Note: If you find missing information about particular minor version, that version must have been changed without any functional change in this library. + +**4.4.1 / 2024-07-28** +- v5 fix: maximum length limit to currency value +- fix #634: build attributes with oneListGroup and attributesGroupName (#653)(By [Andreas Naziris](https://github.com/a-rasin)) +- fix: get oneListGroup to work as expected for array of strings (#662)(By [Andreas Naziris](https://github.com/a-rasin)) + +**4.4.0 / 2024-05-18** +- fix #654: parse attribute list correctly for self closing stop node. +- fix: validator bug when closing tag is not opened. (#647) (By [Ryosuke Fukatani](https://github.com/RyosukeFukatani)) +- fix #581: typings; return type of `tagValueProcessor` & `attributeValueProcessor` (#582) (By [monholm]()) + +**4.3.6 / 2024-03-16** +- Add support for parsing HTML numeric entities (#645) (By [Jonas Schade ](https://github.com/DerZade)) + +**4.3.5 / 2024-02-24** +- code for v5 is added for experimental use + +**4.3.4 / 2024-01-10** +- fix: Don't escape entities in CDATA sections (#633) (By [wackbyte](https://github.com/wackbyte)) + +**4.3.3 / 2024-01-10** +- Remove unnecessary regex + +**4.3.2 / 2023-10-02** +- fix `jObj.hasOwnProperty` when give input is null (By [Arda TANRIKULU](https://github.com/ardatan)) + +**4.3.1 / 2023-09-24** +- revert back "Fix typings for builder and parser to make return type generic" to avoid failure of existing projects. Need to decide a common approach. + +**4.3.0 / 2023-09-20** +- Fix stopNodes to work with removeNSPrefix (#607) (#608) (By [Craig Andrews]https://github.com/candrews)) +- Fix #610 ignore properties set to Object.prototype +- Fix typings for builder and parser to make return type generic (By [Sarah Dayan](https://github.com/sarahdayan)) + +**4.2.7 / 2023-07-30** +- Fix: builder should set text node correctly when only textnode is present (#589) (By [qianqing](https://github.com/joneqian)) +- Fix: Fix for null and undefined attributes when building xml (#585) (#598). A null or undefined value should be ignored. (By [Eugenio Ceschia](https://github.com/cecia234)) + +**4.2.6 / 2023-07-17** +- Fix: Remove trailing slash from jPath for self-closing tags (#595) (By [Maciej Radzikowski](https://github.com/m-radzikowski)) + +**4.2.5 / 2023-06-22** +- change code implementation + +**4.2.4 / 2023-06-06** +- fix security bug + +**4.2.3 / 2023-06-05** +- fix security bug + +**4.2.2 / 2023-04-18** +- fix #562: fix unpaired tag when it comes in last of a nested tag. Also throw error when unpaired tag is used as closing tag + +**4.2.1 / 2023-04-18** +- fix: jpath after unpaired tags + +**4.2.0 / 2023-04-09** +- support `updateTag` parser property + +**4.1.4 / 2023-04-08** +- update typings to let user create XMLBuilder instance without options (#556) (By [Patrick](https://github.com/omggga)) +- fix: IsArray option isn't parsing tags with 0 as value correctly #490 (#557) (By [Aleksandr Murashkin](https://github.com/p-kuen)) +- feature: support `oneListGroup` to group repeated children tags udder single group + +**4.1.3 / 2023-02-26** +- fix #546: Support complex entity value + +**4.1.2 / 2023-02-12** +- Security Fix + +**4.1.1 / 2023-02-03** +- Fix #540: ignoreAttributes breaks unpairedTags +- Refactor XML builder code + +**4.1.0 / 2023-02-02** +- Fix '<' or '>' in DTD comment throwing an error. (#533) (By [Adam Baker](https://github.com/Cwazywierdo)) +- Set "eNotation" to 'true' as default + +**4.0.15 / 2023-01-25** +- make "eNotation" optional + +**4.0.14 / 2023-01-22** +- fixed: add missed typing "eNotation" to parse values + +**4.0.13 / 2023-01-07** +- preserveorder formatting (By [mdeknowis](https://github.com/mdeknowis)) +- support `transformAttributeName` (By [Erik Rothoff Andersson](https://github.com/erkie)) + +**4.0.12 / 2022-11-19** +- fix typescript + +**4.0.11 / 2022-10-05** +- fix #501: parse for entities only once + +**4.0.10 / 2022-09-14** +- fix broken links in demo site (By [Yannick Lang](https://github.com/layaxx)) +- fix #491: tagValueProcessor type definition (By [Andrea Francesco Speziale](https://github.com/andreafspeziale)) +- Add jsdocs for tagValueProcessor + + +**4.0.9 / 2022-07-10** +- fix #470: stop-tag can have self-closing tag with same name +- fix #472: stopNode can have any special tag inside +- Allow !ATTLIST and !NOTATION with DOCTYPE +- Add transformTagName option to transform tag names when parsing (#469) (By [Erik Rothoff Andersson](https://github.com/erkie)) + +**4.0.8 / 2022-05-28** +- Fix CDATA parsing returning empty string when value = 0 (#451) (By [ndelanou](https://github.com/ndelanou)) +- Fix stopNodes when same tag appears inside node (#456) (By [patrickshipe](https://github.com/patrickshipe)) +- fix #468: prettify own properties only + +**4.0.7 / 2022-03-18** +- support CDATA even if tag order is not preserved +- support Comments even if tag order is not preserved +- fix #446: XMLbuilder should not indent XML declaration + +**4.0.6 / 2022-03-08** +- fix: call tagValueProcessor only once for array items +- fix: missing changed for #437 + +**4.0.5 / 2022-03-06** +- fix #437: call tagValueProcessor from XML builder + +**4.0.4 / 2022-03-03** +- fix #435: should skip unpaired and self-closing nodes when set as stopnodes + +**4.0.3 / 2022-02-15** +- fix: ReferenceError when Bundled with Strict (#431) (By [Andreas Heissenberger](https://github.com/aheissenberger)) + + +**4.0.2 / 2022-02-04** +- builder supports `suppressUnpairedNode` +- parser supports `ignoreDeclaration` and `ignorePiTags` +- fix: when comment is parsed as text value if given as ` ...` #423 +- builder supports decoding `&` + +**4.0.1 / 2022-01-08** +- fix builder for pi tag +- fix: support suppressBooleanAttrs by builder + +**4.0.0 / 2022-01-06** +- Generating different combined, parser only, builder only, validator only browser bundles +- Keeping cjs modules as they can be imported in cjs and esm modules both. Otherwise refer `esm` branch. + +**4.0.0-beta.8 / 2021-12-13** +- call tagValueProcessor for stop nodes + +**4.0.0-beta.7 / 2021-12-09** +- fix Validator bug when an attribute has no value but '=' only +- XML Builder should suppress unpaired tags by default. +- documents update for missing features +- refactoring to use Object.assign +- refactoring to remove repeated code + +**4.0.0-beta.6 / 2021-12-05** +- Support PI Tags processing +- Support `suppressBooleanAttributes` by XML Builder for attributes with value `true`. + +**4.0.0-beta.5 / 2021-12-04** +- fix: when a tag with name "attributes" + +**4.0.0-beta.4 / 2021-12-02** +- Support HTML document parsing +- skip stop nodes parsing when building the XML from JS object +- Support external entites without DOCTYPE +- update dev dependency: strnum v1.0.5 to fix long number issue + +**4.0.0-beta.3 / 2021-11-30** +- support global stopNodes expression like "*.stop" +- support self-closing and paired unpaired tags +- fix: CDATA should not be parsed. +- Fix typings for XMLBuilder (#396)(By [Anders Emil Salvesen](https://github.com/andersem)) +- supports XML entities, HTML entities, DOCTYPE entities + +**⚠️ 4.0.0-beta.2 / 2021-11-19** +- rename `attrMap` to `attibutes` in parser output when `preserveOrder:true` +- supports unpairedTags + +**⚠️ 4.0.0-beta.1 / 2021-11-18** +- Parser returns an array now + - to make the structure common + - and to return root level detail +- renamed `cdataTagName` to `cdataPropName` +- Added `commentPropName` +- fix typings + +**⚠️ 4.0.0-beta.0 / 2021-11-16** +- Name change of many configuration properties. + - `attrNodeName` to `attributesGroupName` + - `attrValueProcessor` to `attributeValueProcessor` + - `parseNodeValue` to `parseTagValue` + - `ignoreNameSpace` to `removeNSPrefix` + - `numParseOptions` to `numberParseOptions` + - spelling correction for `suppressEmptyNode` +- Name change of cli and browser bundle to **fxparser** +- `isArray` option is added to parse a tag into array +- `preserveOrder` option is added to render XML in such a way that the result js Object maintains the order of properties same as in XML. +- Processing behaviour of `tagValueProcessor` and `attributeValueProcessor` are changes with extra input parameters +- j2xparser is renamed to XMLBuilder. +- You need to build XML parser instance for given options first before parsing XML. +- fix #327, #336: throw error when extra text after XML content +- fix #330: attribute value can have '\n', +- fix #350: attrbiutes can be separated by '\n' from tagname + +3.21.1 / 2021-10-31 +- Correctly format JSON elements with a text prop but no attribute props ( By [haddadnj](https://github.com/haddadnj) ) + +3.21.0 / 2021-10-25 + - feat: added option `rootNodeName` to set tag name for array input when converting js object to XML. + - feat: added option `alwaysCreateTextNode` to force text node creation (by: *@massimo-ua*) + - ⚠️ feat: Better error location for unclosed tags. (by *@Gei0r*) + - Some error messages would be changed when validating XML. Eg + - `{ InvalidXml: "Invalid '[ \"rootNode\"]' found." }` → `{InvalidTag: "Unclosed tag 'rootNode'."}` + - `{ InvalidTag: "Closing tag 'rootNode' is expected inplace of 'rootnode'." }` → `{ InvalidTag: "Expected closing tag 'rootNode' (opened in line 1) instead of closing tag 'rootnode'."}` + - ⚠️ feat: Column in error response when validating XML +```js +{ + "code": "InvalidAttr", + "msg": "Attribute 'abc' is repeated.", + "line": 1, + "col": 22 +} +``` + +3.20.1 / 2021-09-25 + - update strnum package + +3.20.0 / 2021-09-10 + - Use strnum npm package to parse string to number + - breaking change: long number will be parsed to scientific notation. + +3.19.0 / 2021-03-14 + - License changed to MIT original + - Fix #321 : namespace tag parsing + +3.18.0 / 2021-02-05 + - Support RegEx and function in arrayMode option + - Fix #317 : validate nested PI tags + +3.17.4 / 2020-06-07 + - Refactor some code to support IE11 + - Fix: `` space as attribute string + +3.17.3 / 2020-05-23 + - Fix: tag name separated by \n \t + - Fix: throw error for unclosed tags + +3.17.2 / 2020-05-23 + - Fixed an issue in processing doctype tag + - Fixed tagName where it should not have whitespace chars + +3.17.1 / 2020-05-19 + - Fixed an issue in checking opening tag + +3.17.0 / 2020-05-18 + - parser: fix '<' issue when it comes in aatr value + - parser: refactoring to remove dependency from regex + - validator: fix IE 11 issue for error messages + - updated dev dependencies + - separated benchmark module to sub-module + - breaking change: comments will not be removed from CDATA data + +3.16.0 / 2020-01-12 + - validaor: fix for ampersand characters (#215) + - refactoring to support unicode chars in tag name + - update typing for validator error + +3.15.1 / 2019-12-09 + - validaor: fix multiple roots are not allowed + +3.15.0 / 2019-11-23 + - validaor: improve error messaging + - validator: add line number in case of error + - validator: add more error scenarios to make it more descriptive + +3.14.0 / 2019-10-25 + - arrayMode for XML to JS obj parsing + +3.13.0 / 2019-10-02 + - pass tag/attr name to tag/attr value processor + - inbuilt optional validation with XML parser + +3.12.21 / 2019-10-02 + - Fix validator for unclosed XMLs + - move nimnjs dependency to dev dependency + - update dependencies + +3.12.20 / 2019-08-16 + - Revert: Fix #167: '>' in attribute value as it is causing high performance degrade. + +3.12.19 / 2019-07-28 + - Fix js to xml parser should work for date values. (broken: `tagValueProcessor` will receive the original value instead of string always) (breaking change) + +3.12.18 / 2019-07-27 + - remove configstore dependency + +3.12.17 / 2019-07-14 + - Fix #167: '>' in attribute value + +3.12.16 / 2019-03-23 + - Support a new option "stopNodes". (#150) +Accept the list of tags which are not required to be parsed. Instead, all the nested tag and data will be assigned as string. + - Don't show post-install message + +3.12.12 / 2019-01-11 + - fix : IE parseInt, parseFloat error + +3.12.11 / 2018-12-24 + - fix #132: "/" should not be parsed as boolean attr in case of self closing tags + +3.12.9 / 2018-11-23 + - fix #129 : validator should not fail when an atrribute name is 'length' + +3.12.8 / 2018-11-22 + - fix #128 : use 'attrValueProcessor' to process attribute value in json2xml parser + +3.12.6 / 2018-11-10 + - Fix #126: check for type + +3.12.4 / 2018-09-12 + - Fix: include tasks in npm package + +3.12.3 / 2018-09-12 + - Fix CLI issue raised in last PR + +3.12.2 / 2018-09-11 + - Fix formatting for JSON to XML output + - Migrate to webpack (PR merged) + - fix cli (PR merged) + +3.12.0 / 2018-08-06 + - Support hexadecimal values + - Support true number parsing + +3.11.2 / 2018-07-23 + - Update Demo for more options + - Update license information + - Update readme for formatting, users, and spelling mistakes + - Add missing typescript definition for j2xParser + - refactoring: change filenames + +3.11.1 / 2018-06-05 + - fix #93: read the text after self closing tag + +3.11.0 / 2018-05-20 + - return defaultOptions if there are not options in buildOptions function + - added localeRange declaration in parser.d.ts + - Added support of cyrillic characters in validator XML + - fixed bug in validator work when XML data with byte order marker + +3.10.0 / 2018-05-13 + - Added support of cyrillic characters in parsing XML to JSON + +3.9.11 / 2018-05-09 + - fix https://github.com/NaturalIntelligence/fast-xml-parser/issues/80 fix nimn chars + - update package information + - fix https://github.com/NaturalIntelligence/fast-xml-parser/issues/86: json 2 xml parser : property with null value should be parsed to self closing tag. + - update online demo + - revert zombiejs to old version to support old version of node + - update dependencies + +3.3.10 / 2018-04-23 + - fix #77 : parse even if closing tag has space before '>' + - include all css & js lib in demo app + - remove babel dependencies until needed + +3.3.9 / 2018-04-18 + - fix #74 : TS2314 TypeScript compiler error + +3.3.8 / 2018-04-17 + - fix #73 : IE doesn't support Object.assign + +3.3.7 / 2018-04-14 + - fix: use let insted of const in for loop of validator + - Merge pull request + https://github.com/NaturalIntelligence/fast-xml-parser/issues/71 from bb/master + first draft of typings for typescript + https://github.com/NaturalIntelligence/fast-xml-parser/issues/69 + - Merge pull request + https://github.com/NaturalIntelligence/fast-xml-parser/issues/70 from bb/patch-1 + fix some typos in readme + +3.3.6 / 2018-03-21 + - change arrow functions to full notation for IE compatibility + +3.3.5 / 2018-03-15 + - fix https://github.com/NaturalIntelligence/fast-xml-parser/issues/67 : attrNodeName invalid behavior + - fix: remove decodeHTML char condition + +3.3.4 / 2018-03-14 + - remove dependency on "he" package + - refactor code to separate methods in separate files. + - draft code for transforming XML to json string. It is not officially documented due to performance issue. + +3.3.0 / 2018-03-05 + - use common default options for XML parsing for consistency. And add `parseToNimn` method. + - update nexttodo + - update README about XML to Nimn transformation and remove special notes about 3.x release + - update CONTRIBUTING.ms mentioning nexttodo + - add negative case for XML PIs + - validate xml processing instruction tags https://github.com/NaturalIntelligence/fast-xml-parser/issues/62 + - nimndata: handle array with object + - nimndata: node with nested node and text node + - nimndata: handle attributes and text node + - nimndata: add options, handle array + - add xml to nimn data converter + - x2j: direct access property with tagname + - update changelog + - fix validator when single quote presents in value enclosed with double quotes or vice versa + - Revert "remove unneded nimnjs dependency, move opencollective to devDependencies and replace it + with more light opencollective-postinstall" + This reverts commit d47aa7181075d82db4fee97fd8ea32b056fe3f46. + - Merge pull request: https://github.com/NaturalIntelligence/fast-xml-parser/issues/63 from HaroldPutman/suppress-undefined + Keep undefined nodes out of the XML output : This is useful when you are deleting nodes from the JSON and rewriting XML. + +3.2.4 / 2018-03-01 + - fix #59 fix in validator when open quote presents in attribute value + - Create nexttodo.md + - exclude static from bitHound tests + - add package lock + +3.2.3 / 2018-02-28 + - Merge pull request from Delagen/master: fix namespaces can contain the same characters as xml names + +3.2.2 / 2018-02-22 + - fix: attribute xmlns should not be removed if ignoreNameSpace is false + - create CONTRIBUTING.md + +3.2.1 / 2018-02-17 + - fix: empty attribute should be parsed + +3.2.0 / 2018-02-16 + - Merge pull request : Dev to Master + - Update README and version + - j2x:add performance test + - j2x: Remove extra empty line before closing tag + - j2x: suppress empty nodes to self closing node if configured + - j2x: provide option to give indentation depth + - j2x: make optional formatting + - j2x: encodeHTMLchat + - j2x: handle cdata tag + - j2x: handle grouped attributes + - convert json to xml + - nested object + - array + - attributes + - text value + - small refactoring + - Merge pull request: Update cli.js to let user validate XML file or data + - Add option for rendering CDATA as separate property + +3.0.1 / 2018-02-09 + - fix CRLF: replace it with single space in attributes value only. + +3.0.0 / 2018-02-08 + - change online tool with new changes + - update info about new options + - separate tag value processing to separate function + - make HTML decoding optional + - give an option to allow boolean attributes + - change cli options as per v3 + - Correct comparison table format on README + - update v3 information + - some performance improvement changes + - Make regex object local to the method and move some common methods to util + - Change parser to + - handle multiple instances of CDATA + - make triming of value optionals + - HTML decode attribute and text value + - refactor code to separate files + - Ignore newline chars without RE (in validator) + - validate for XML prolog + - Validate DOCTYPE without RE + - Update validator to return error response + - Update README to add detail about V3 + - Separate xmlNode model class + - include vscode debug config + - fix for repeated object + - fix attribute regex for boolean attributes + - Fix validator for invalid attributes +2.9.4 / 2018-02-02 + - Merge pull request: Decode HTML characters + - refactor source folder name + - ignore bundle / browser js to be published to npm +2.9.3 / 2018-01-26 + - Merge pull request: Correctly remove CRLF line breaks + - Enable to parse attribute in online editor + - Fix testing demo app test + - Describe parsing options + - Add options for online demo +2.9.2 / 2018-01-18 + - Remove check if tag starting with "XML" + - Fix: when there are spaces before / after CDATA + +2.9.1 / 2018-01-16 + - Fix: newline should be replaced with single space + - Fix: for single and multiline comments + - validate xml with CDATA + - Fix: the issue when there is no space between 2 attributes + - Fix: https://github.com/NaturalIntelligence/fast-xml-parser/issues/33: when there is newline char in attr val, it doesn't parse + - Merge pull request: fix ignoreNamespace + - fix: don't wrap attributes if only namespace attrs + - fix: use portfinder for run tests, update deps + - fix: don't treat namespaces as attributes when ignoreNamespace enabled + +2.9.0 / 2018-01-10 + - Rewrite the validator to handle large files. + Ignore DOCTYPE validation. + - Fix: When attribute value has equal sign + +2.8.3 / 2017-12-15 + - Fix: when a tag has value along with subtags + +2.8.2 / 2017-12-04 + - Fix value parsing for IE + +2.8.1 / 2017-12-01 + - fix: validator should return false instead of err when invalid XML + +2.8.0 / 2017-11-29 + - Add CLI option to ignore value conversion + - Fix variable name when filename is given on CLI + - Update CLI help text + - Merge pull request: xml2js: Accept standard input + - Test Node 8 + - Update dependencies + - Bundle readToEnd + - Add ability to read from standard input + +2.7.4 / 2017-09-22 + - Merge pull request: Allow wrap attributes with subobject to compatible with other parsers output + +2.7.3 / 2017-08-02 + - fix: handle CDATA with regx + +2.7.2 / 2017-07-30 + - Change travis config for yarn caching + - fix validator: when tag property is same as array property + - Merge pull request: Failing test case in validator for valid SVG + +2.7.1 / 2017-07-26 + - Fix: Handle val 0 + +2.7.0 / 2017-07-25 + - Fix test for arrayMode + - Merge pull request: Add arrayMode option to parse any nodes as arrays + +2.6.0 / 2017-07-14 + - code improvement + - Add unit tests for value conversion for attr + - Merge pull request: option of an attribute value conversion to a number (textAttrConversion) the same way as the textNodeConversion option does. Default value is false. + +2.5.1 / 2017-07-01 + - Fix XML element name pattern + - Fix XML element name pattern while parsing + - Fix validation for xml tag element + +2.5.0 / 2017-06-25 + - Improve Validator performance + - update attr matching regex + - Add perf tests + - Improve atrr regex to handle all cases + +2.4.4 / 2017-06-08 + - Bug fix: when an attribute has single or double quote in value + +2.4.3 / 2017-06-05 + - Bug fix: when multiple CDATA tags are given + - Merge pull request: add option "textNodeConversion" + - add option "textNodeConversion" + +2.4.1 / 2017-04-14 + - fix tests + - Bug fix: preserve initial space of node value + - Handle CDATA + +2.3.1 / 2017-03-15 + - Bug fix: when single self closing tag + - Merge pull request: fix .codeclimate.yml + - Update .codeclimate.yml - Fixed config so it does not error anymore. + - Update .codeclimate.yml + +2.3.0 / 2017-02-26 + - Code improvement + - add bithound config + - Update usage + - Update travis to generate bundle js before running tests + - 1.Browserify, 2. add more tests for validator + - Add validator + - Fix CLI default parameter bug + +2.2.1 / 2017-02-05 + - Bug fix: CLI default option diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/LICENSE b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/LICENSE new file mode 100644 index 00000000..d7da622a --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Amit Kumar Gupta + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/README.md b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/README.md new file mode 100644 index 00000000..18918389 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/README.md @@ -0,0 +1,236 @@ +# [fast-xml-parser](https://www.npmjs.com/package/fast-xml-parser) +[![NPM quality][quality-image]][quality-url] +[![Coverage Status](https://coveralls.io/repos/github/NaturalIntelligence/fast-xml-parser/badge.svg?branch=master)](https://coveralls.io/github/NaturalIntelligence/fast-xml-parser?branch=master) +[Try me](https://naturalintelligence.github.io/fast-xml-parser/) +[![NPM total downloads](https://img.shields.io/npm/dt/fast-xml-parser.svg)](https://npm.im/fast-xml-parser) + + +Validate XML, Parse XML to JS Object, or Build XML from JS Object without C/C++ based libraries and no callback. + +--- + +ads-thePowerGlassesBook +I had recently published a book, The Power Glasses. Please have a look. Your feedback would be helpful. You can [mail](githubissues@proton.me) me for a free copy. +
+ +Sponsor this project + + + + + + + + Stubmatic donate button +
+
+
+ + + +![fxp_sponsors](https://github.com/NaturalIntelligence/fast-xml-parser/assets/7692328/c9367497-d67e-410a-90a6-66e3808be929) + +## Users + + + + + + + + + + + + + + + + + + + +[more](./USERs.md) + +The list of users are mostly published by Github or communicated directly. Feel free to contact if you find any information wrong. + +--- + +## Main Features + +FXP logo + +* Validate XML data syntactically +* Parse XML to JS Object +* Build XML from JS Object +* Compatible to node js packages, in browser, and in CLI (click try me button above for demo) +* Faster than any other pure JS implementation. +* It can handle big files (tested up to 100mb). +* Controlled parsing using various options +* XML Entities, HTML entities, and DOCTYPE entites are supported. +* unpaired tags (Eg `
` in HTML), stop nodes (Eg ` +: + +``` + +Bundle size + +| Bundle Name | Size | +| ------------------ | ---- | +| fxbuilder.min.js | 6.5K | +| fxparser.min.js | 20K | +| fxp.min.js | 26K | +| fxvalidator.min.js | 5.7K | + +### Documents + + + + + + + +
v3v4v5
+ documents +
    +
  1. Getting Started
  2. +
  3. XML Parser
  4. +
  5. XML Builder
  6. +
  7. XML Validator
  8. +
  9. Entities
  10. +
  11. HTML Document Parsing
  12. +
  13. PI Tag processing
  14. +
    +
  1. Getting Started +
  2. Features
  3. +
  4. Options
  5. +
  6. Output Builders
  7. +
  8. Value Parsers
  9. +
+ +**note**: version 5 is released with version 4 tfor experimental use. Based on it's demand, it'll be developed and the features can be different in final release. + +## Performance +negative means error + +### XML Parser + + + + +* Y-axis: requests per second +* X-axis: File size + +### XML Builder + + +* Y-axis: requests per second + + + + + + +## Usage Trend + +[Usage Trend of fast-xml-parser](https://npm-compare.com/fast-xml-parser#timeRange=THREE_YEARS) + + + NPM Usage Trend of fast-xml-parser + + +## Supporters +### Contributors + +This project exists thanks to [all](graphs/contributors) the people who contribute. [[Contribute](docs/CONTRIBUTING.md)]. + + + + +### Backers + +Thank you to all our backers! 🙏 [[Become a backer](https://opencollective.com/fast-xml-parser#backer)] + + + + + +# License +* MIT License + +![Donate $5](static/img/donation_quote.png) diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/package.json b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/package.json new file mode 100644 index 00000000..1fd52c17 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/package.json @@ -0,0 +1,74 @@ +{ + "name": "fast-xml-parser", + "version": "4.4.1", + "description": "Validate XML, Parse XML, Build XML without C/C++ based libraries", + "main": "./src/fxp.js", + "scripts": { + "test": "nyc --reporter=lcov --reporter=text jasmine spec/*spec.js", + "test-types": "tsc --noEmit spec/typings/typings-test.ts", + "unit": "jasmine", + "coverage": "nyc report --reporter html --reporter text -t .nyc_output --report-dir .nyc_output/summary", + "perf": "node ./benchmark/perfTest3.js", + "lint": "eslint src/*.js spec/*.js", + "bundle": "webpack --config webpack-prod.config.js", + "prettier": "prettier --write src/**/*.js", + "publish-please": "publish-please", + "checkReadiness": "publish-please --dry-run" + }, + "bin": { + "fxparser": "./src/cli/cli.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/NaturalIntelligence/fast-xml-parser" + }, + "keywords": [ + "fast", + "xml", + "json", + "parser", + "xml2js", + "x2js", + "xml2json", + "js", + "cli", + "validator", + "validate", + "transformer", + "assert", + "js2xml", + "json2xml", + "html" + ], + "author": "Amit Gupta (https://solothought.com)", + "license": "MIT", + "devDependencies": { + "@babel/core": "^7.13.10", + "@babel/plugin-transform-runtime": "^7.13.10", + "@babel/preset-env": "^7.13.10", + "@babel/register": "^7.13.8", + "@types/node": "20", + "babel-loader": "^8.2.2", + "cytorus": "^0.2.9", + "eslint": "^8.3.0", + "he": "^1.2.0", + "jasmine": "^3.6.4", + "nyc": "^15.1.0", + "prettier": "^1.19.1", + "publish-please": "^5.5.2", + "typescript": "5", + "webpack": "^5.64.4", + "webpack-cli": "^4.9.1" + }, + "typings": "src/fxp.d.ts", + "funding": [{ + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + },{ + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + }], + "dependencies": { + "strnum": "^1.0.5" + } +} diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/cli/cli.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/cli/cli.js new file mode 100755 index 00000000..984534ca --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/cli/cli.js @@ -0,0 +1,93 @@ +#!/usr/bin/env node +'use strict'; +/*eslint-disable no-console*/ +const fs = require('fs'); +const path = require('path'); +const {XMLParser, XMLValidator} = require("../fxp"); +const readToEnd = require('./read').readToEnd; + +const version = require('./../../package.json').version; +if (process.argv[2] === '--help' || process.argv[2] === '-h') { + console.log(require("./man")); +} else if (process.argv[2] === '--version') { + console.log(version); +} else { + const options = { + removeNSPrefix: true, + ignoreAttributes: false, + parseTagValue: true, + parseAttributeValue: true, + }; + let fileName = ''; + let outputFileName; + let validate = false; + let validateOnly = false; + for (let i = 2; i < process.argv.length; i++) { + if (process.argv[i] === '-ns') { + options.removeNSPrefix = false; + } else if (process.argv[i] === '-a') { + options.ignoreAttributes = true; + } else if (process.argv[i] === '-c') { + options.parseTagValue = false; + options.parseAttributeValue = false; + } else if (process.argv[i] === '-o') { + outputFileName = process.argv[++i]; + } else if (process.argv[i] === '-v') { + validate = true; + } else if (process.argv[i] === '-V') { + validateOnly = true; + } else { + //filename + fileName = process.argv[i]; + } + } + + const callback = function(xmlData) { + let output = ''; + if (validate) { + const parser = new XMLParser(options); + output = parser.parse(xmlData,validate); + } else if (validateOnly) { + output = XMLValidator.validate(xmlData); + process.exitCode = output === true ? 0 : 1; + } else { + const parser = new XMLParser(options); + output = JSON.stringify(parser.parse(xmlData,validate), null, 4); + } + if (outputFileName) { + writeToFile(outputFileName, output); + } else { + console.log(output); + } + }; + + try { + + if (!fileName) { + readToEnd(process.stdin, function(err, data) { + if (err) { + throw err; + } + callback(data.toString()); + }); + } else { + fs.readFile(fileName, function(err, data) { + if (err) { + throw err; + } + callback(data.toString()); + }); + } + } catch (e) { + console.log('Seems an invalid file or stream.' + e); + } +} + +function writeToFile(fileName, data) { + fs.writeFile(fileName, data, function(err) { + if (err) { + throw err; + } + console.log('JSON output has been written to ' + fileName); + }); +} diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/cli/man.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/cli/man.js new file mode 100644 index 00000000..89947cc7 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/cli/man.js @@ -0,0 +1,12 @@ +module.exports = `Fast XML Parser 4.0.0 +---------------- +$ fxparser [-ns|-a|-c|-v|-V] [-o outputfile.json] +$ cat xmlfile.xml | fxparser [-ns|-a|-c|-v|-V] [-o outputfile.json] + +Options +---------------- +-ns: remove namespace from tag and atrribute name. +-a: don't parse attributes. +-c: parse values to premitive type. +-v: validate before parsing. +-V: validate only.` \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/cli/read.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/cli/read.js new file mode 100644 index 00000000..642da527 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/cli/read.js @@ -0,0 +1,92 @@ +'use strict'; + +// Copyright 2013 Timothy J Fontaine +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the 'Software'), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE + +/* + +Read any stream all the way to the end and trigger a single cb + +const http = require('http'); + +const rte = require('readtoend'); + +http.get('http://nodejs.org', function(response) { + rte.readToEnd(response, function(err, body) { + console.log(body); + }); +}); + +*/ + +let stream = require('stream'); +const util = require('util'); + +if (!stream.Transform) { + stream = require('readable-stream'); +} + +function ReadToEnd(opts) { + if (!(this instanceof ReadToEnd)) { + return new ReadToEnd(opts); + } + + stream.Transform.call(this, opts); + + this._rte_encoding = opts.encoding || 'utf8'; + + this._buff = ''; +} + +module.exports = ReadToEnd; +util.inherits(ReadToEnd, stream.Transform); + +ReadToEnd.prototype._transform = function(chunk, encoding, done) { + this._buff += chunk.toString(this._rte_encoding); + this.push(chunk); + done(); +}; + +ReadToEnd.prototype._flush = function(done) { + this.emit('complete', undefined, this._buff); + done(); +}; + +ReadToEnd.readToEnd = function(stream, options, cb) { + if (!cb) { + cb = options; + options = {}; + } + + const dest = new ReadToEnd(options); + + stream.pipe(dest); + + stream.on('error', function(err) { + stream.unpipe(dest); + cb(err); + }); + + dest.on('complete', cb); + + dest.resume(); + + return dest; +}; diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/fxp.d.ts b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/fxp.d.ts new file mode 100644 index 00000000..bddcfefe --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/fxp.d.ts @@ -0,0 +1,402 @@ +type X2jOptions = { + /** + * Preserve the order of tags in resulting JS object + * + * Defaults to `false` + */ + preserveOrder?: boolean; + + /** + * Give a prefix to the attribute name in the resulting JS object + * + * Defaults to '@_' + */ + attributeNamePrefix?: string; + + /** + * A name to group all attributes of a tag under, or `false` to disable + * + * Defaults to `false` + */ + attributesGroupName?: false | string; + + /** + * The name of the next node in the resulting JS + * + * Defaults to `#text` + */ + textNodeName?: string; + + /** + * Whether to ignore attributes when parsing + * + * Defaults to `true` + */ + ignoreAttributes?: boolean; + + /** + * Whether to remove namespace string from tag and attribute names + * + * Defaults to `false` + */ + removeNSPrefix?: boolean; + + /** + * Whether to allow attributes without value + * + * Defaults to `false` + */ + allowBooleanAttributes?: boolean; + + /** + * Whether to parse tag value with `strnum` package + * + * Defaults to `true` + */ + parseTagValue?: boolean; + + /** + * Whether to parse tag value with `strnum` package + * + * Defaults to `false` + */ + parseAttributeValue?: boolean; + + /** + * Whether to remove surrounding whitespace from tag or attribute value + * + * Defaults to `true` + */ + trimValues?: boolean; + + /** + * Give a property name to set CDATA values to instead of merging to tag's text value + * + * Defaults to `false` + */ + cdataPropName?: false | string; + + /** + * If set, parse comments and set as this property + * + * Defaults to `false` + */ + commentPropName?: false | string; + + /** + * Control how tag value should be parsed. Called only if tag value is not empty + * + * @returns {undefined|null} `undefined` or `null` to set original value. + * @returns {unknown} + * + * 1. Different value or value with different data type to set new value. + * 2. Same value to set parsed value if `parseTagValue: true`. + * + * Defaults to `(tagName, val, jPath, hasAttributes, isLeafNode) => val` + */ + tagValueProcessor?: (tagName: string, tagValue: string, jPath: string, hasAttributes: boolean, isLeafNode: boolean) => unknown; + + /** + * Control how attribute value should be parsed + * + * @param attrName + * @param attrValue + * @param jPath + * @returns {undefined|null} `undefined` or `null` to set original value + * @returns {unknown} + * + * Defaults to `(attrName, val, jPath) => val` + */ + attributeValueProcessor?: (attrName: string, attrValue: string, jPath: string) => unknown; + + /** + * Options to pass to `strnum` for parsing numbers + * + * Defaults to `{ hex: true, leadingZeros: true, eNotation: true }` + */ + numberParseOptions?: strnumOptions; + + /** + * Nodes to stop parsing at + * + * Defaults to `[]` + */ + stopNodes?: string[]; + + /** + * List of tags without closing tags + * + * Defaults to `[]` + */ + unpairedTags?: string[]; + + /** + * Whether to always create a text node + * + * Defaults to `false` + */ + alwaysCreateTextNode?: boolean; + + /** + * Determine whether a tag should be parsed as an array + * + * @param tagName + * @param jPath + * @param isLeafNode + * @param isAttribute + * @returns {boolean} + * + * Defaults to `() => false` + */ + isArray?: (tagName: string, jPath: string, isLeafNode: boolean, isAttribute: boolean) => boolean; + + /** + * Whether to process default and DOCTYPE entities + * + * Defaults to `true` + */ + processEntities?: boolean; + + /** + * Whether to process HTML entities + * + * Defaults to `false` + */ + htmlEntities?: boolean; + + /** + * Whether to ignore the declaration tag from output + * + * Defaults to `false` + */ + ignoreDeclaration?: boolean; + + /** + * Whether to ignore Pi tags + * + * Defaults to `false` + */ + ignorePiTags?: boolean; + + /** + * Transform tag names + * + * Defaults to `false` + */ + transformTagName?: ((tagName: string) => string) | false; + + /** + * Transform attribute names + * + * Defaults to `false` + */ + transformAttributeName?: ((attributeName: string) => string) | false; + + /** + * Change the tag name when a different name is returned. Skip the tag from parsed result when false is returned. + * Modify `attrs` object to control attributes for the given tag. + * + * @returns {string} new tag name. + * @returns false to skip the tag + * + * Defaults to `(tagName, jPath, attrs) => tagName` + */ + updateTag?: (tagName: string, jPath: string, attrs: {[k: string]: string}) => string | boolean; +}; + +type strnumOptions = { + hex: boolean; + leadingZeros: boolean, + skipLike?: RegExp, + eNotation?: boolean +} + +type validationOptions = { + /** + * Whether to allow attributes without value + * + * Defaults to `false` + */ + allowBooleanAttributes?: boolean; + + /** + * List of tags without closing tags + * + * Defaults to `[]` + */ + unpairedTags?: string[]; +}; + +type XmlBuilderOptions = { + /** + * Give a prefix to the attribute name in the resulting JS object + * + * Defaults to '@_' + */ + attributeNamePrefix?: string; + + /** + * A name to group all attributes of a tag under, or `false` to disable + * + * Defaults to `false` + */ + attributesGroupName?: false | string; + + /** + * The name of the next node in the resulting JS + * + * Defaults to `#text` + */ + textNodeName?: string; + + /** + * Whether to ignore attributes when parsing + * + * Defaults to `true` + */ + ignoreAttributes?: boolean; + + /** + * Give a property name to set CDATA values to instead of merging to tag's text value + * + * Defaults to `false` + */ + cdataPropName?: false | string; + + /** + * If set, parse comments and set as this property + * + * Defaults to `false` + */ + commentPropName?: false | string; + + /** + * Whether to make output pretty instead of single line + * + * Defaults to `false` + */ + format?: boolean; + + + /** + * If `format` is set to `true`, sets the indent string + * + * Defaults to ` ` + */ + indentBy?: string; + + /** + * Give a name to a top-level array + * + * Defaults to `undefined` + */ + arrayNodeName?: string; + + /** + * Create empty tags for tags with no text value + * + * Defaults to `false` + */ + suppressEmptyNode?: boolean; + + /** + * Suppress an unpaired tag + * + * Defaults to `true` + */ + suppressUnpairedNode?: boolean; + + /** + * Don't put a value for boolean attributes + * + * Defaults to `true` + */ + suppressBooleanAttributes?: boolean; + + /** + * Preserve the order of tags in resulting JS object + * + * Defaults to `false` + */ + preserveOrder?: boolean; + + /** + * List of tags without closing tags + * + * Defaults to `[]` + */ + unpairedTags?: string[]; + + /** + * Nodes to stop parsing at + * + * Defaults to `[]` + */ + stopNodes?: string[]; + + /** + * Control how tag value should be parsed. Called only if tag value is not empty + * + * @returns {undefined|null} `undefined` or `null` to set original value. + * @returns {unknown} + * + * 1. Different value or value with different data type to set new value. + * 2. Same value to set parsed value if `parseTagValue: true`. + * + * Defaults to `(tagName, val, jPath, hasAttributes, isLeafNode) => val` + */ + tagValueProcessor?: (name: string, value: unknown) => unknown; + + /** + * Control how attribute value should be parsed + * + * @param attrName + * @param attrValue + * @param jPath + * @returns {undefined|null} `undefined` or `null` to set original value + * @returns {unknown} + * + * Defaults to `(attrName, val, jPath) => val` + */ + attributeValueProcessor?: (name: string, value: unknown) => unknown; + + /** + * Whether to process default and DOCTYPE entities + * + * Defaults to `true` + */ + processEntities?: boolean; + + + oneListGroup?: boolean; +}; + +type ESchema = string | object | Array; + +type ValidationError = { + err: { + code: string; + msg: string, + line: number, + col: number + }; +}; + +export class XMLParser { + constructor(options?: X2jOptions); + parse(xmlData: string | Buffer ,validationOptions?: validationOptions | boolean): any; + /** + * Add Entity which is not by default supported by this library + * @param entityIdentifier {string} Eg: 'ent' for &ent; + * @param entityValue {string} Eg: '\r' + */ + addEntity(entityIdentifier: string, entityValue: string): void; +} + +export class XMLValidator{ + static validate( xmlData: string, options?: validationOptions): true | ValidationError; +} +export class XMLBuilder { + constructor(options?: XmlBuilderOptions); + build(jObj: any): any; +} diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/fxp.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/fxp.js new file mode 100644 index 00000000..9cfa0ac0 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/fxp.js @@ -0,0 +1,11 @@ +'use strict'; + +const validator = require('./validator'); +const XMLParser = require('./xmlparser/XMLParser'); +const XMLBuilder = require('./xmlbuilder/json2xml'); + +module.exports = { + XMLParser: XMLParser, + XMLValidator: validator, + XMLBuilder: XMLBuilder +} \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/util.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/util.js new file mode 100644 index 00000000..df0a60d5 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/util.js @@ -0,0 +1,72 @@ +'use strict'; + +const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; +const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; +const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' +const regexName = new RegExp('^' + nameRegexp + '$'); + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +const isName = function(string) { + const match = regexName.exec(string); + return !(match === null || typeof match === 'undefined'); +}; + +exports.isExist = function(v) { + return typeof v !== 'undefined'; +}; + +exports.isEmptyObject = function(obj) { + return Object.keys(obj).length === 0; +}; + +/** + * Copy all the properties of a into b. + * @param {*} target + * @param {*} a + */ +exports.merge = function(target, a, arrayMode) { + if (a) { + const keys = Object.keys(a); // will return an array of own properties + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + if (arrayMode === 'strict') { + target[keys[i]] = [ a[keys[i]] ]; + } else { + target[keys[i]] = a[keys[i]]; + } + } + } +}; +/* exports.merge =function (b,a){ + return Object.assign(b,a); +} */ + +exports.getValue = function(v) { + if (exports.isExist(v)) { + return v; + } else { + return ''; + } +}; + +// const fakeCall = function(a) {return a;}; +// const fakeCallNoReturn = function() {}; + +exports.isName = isName; +exports.getAllMatches = getAllMatches; +exports.nameRegexp = nameRegexp; diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/CharsSymbol.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/CharsSymbol.js new file mode 100644 index 00000000..fa5ce9e8 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/CharsSymbol.js @@ -0,0 +1,16 @@ +modules.export = { + "<" : "<", //tag start + ">" : ">", //tag end + "/" : "/", //close tag + "!" : "!", //comment or docttype + "!--" : "!--", //comment + "-->" : "-->", //comment end + "?" : "?", //pi + "?>" : "?>", //pi end + "?xml" : "?xml", //pi end + "![" : "![", //cdata + "]]>" : "]]>", //cdata end + "[" : "[", + "-" : "-", + "D" : "D", +} \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/EntitiesParser.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/EntitiesParser.js new file mode 100644 index 00000000..62cc02ff --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/EntitiesParser.js @@ -0,0 +1,107 @@ +const ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; +const htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, +}; + +class EntitiesParser{ + constructor(replaceHtmlEntities) { + this.replaceHtmlEntities = replaceHtmlEntities; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + } + + addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.addExternalEntity(ent,externalEntities[ent]) + } + } + addExternalEntity(key,val){ + validateEntityName(key); + if(val.indexOf("&") !== -1) { + reportWarning(`Entity ${key} is not added as '&' is found in value;`) + return; + }else{ + this.lastEntities[ent] = { + regex: new RegExp("&"+key+";","g"), + val : val + } + } + } + + addDocTypeEntities(entities){ + const entKeys = Object.keys(entities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.docTypeEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : entities[ent] + } + } + } + + parse(val){ + return this.replaceEntitiesValue(val) + } + + /** + * 1. Replace DOCTYPE entities + * 2. Replace external entities + * 3. Replace HTML entities if asked + * @param {string} val + */ + replaceEntitiesValue(val){ + if(typeof val === "string" && val.length > 0){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.replaceHtmlEntities){ + for(let entityName in htmlEntities){ + const entity = htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( ampEntity.regex, ampEntity.val); + } + return val; + } +}; + +//an entity name should not contains special characters that may be used in regex +//Eg !?\\\/[]$%{}^&*()<> +const specialChar = "!?\\\/[]$%{}^&*()<>|+"; + +function validateEntityName(name){ + for (let i = 0; i < specialChar.length; i++) { + const ch = specialChar[i]; + if(name.indexOf(ch) !== -1) throw new Error(`Invalid character ${ch} in entity name`); + } + return name; +} + +module.exports = EntitiesParser; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/OptionsBuilder.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/OptionsBuilder.js new file mode 100755 index 00000000..be1f1d4f --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/OptionsBuilder.js @@ -0,0 +1,64 @@ + +const JsObjOutputBuilder = require("./OutputBuilders/JsObjBuilder"); + +const defaultOptions = { + preserveOrder: false, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + //ignoreRootElement : false, + stopNodes: [], //nested tags will not be parsed even for errors + // isArray: () => false, //User will set it + htmlEntities: false, + // skipEmptyListItem: false + tags:{ + unpaired: [], + nameFor:{ + cdata: false, + comment: false, + text: '#text' + }, + separateTextProperty: false, + }, + attributes:{ + ignore: false, + booleanType: true, + entities: true, + }, + + // select: ["img[src]"], + // stop: ["anim", "[ads]"] + only: [], // rest tags will be skipped. It will result in flat array + hierarchy: false, //will be used when a particular tag is set to be parsed. + skip: [], // will be skipped from parse result. on('skip') will be triggered + + select: [], // on('select', tag => tag ) will be called if match + stop: [], //given tagPath will not be parsed. innerXML will be set as string value + OutputBuilder: new JsObjOutputBuilder(), +}; + +const buildOptions = function(options) { + const finalOptions = { ... defaultOptions}; + copyProperties(finalOptions,options) + return finalOptions; +}; + +function copyProperties(target, source) { + for (let key in source) { + if (source.hasOwnProperty(key)) { + if (key === 'OutputBuilder') { + target[key] = source[key]; + }else if (typeof source[key] === 'object' && !Array.isArray(source[key])) { + // Recursively copy nested properties + if (typeof target[key] === 'undefined') { + target[key] = {}; + } + copyProperties(target[key], source[key]); + } else { + // Copy non-nested properties + target[key] = source[key]; + } + } + } +} + +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/OutputBuilders/BaseOutputBuilder.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/OutputBuilders/BaseOutputBuilder.js new file mode 100644 index 00000000..be2d478e --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/OutputBuilders/BaseOutputBuilder.js @@ -0,0 +1,71 @@ +class BaseOutputBuilder{ + constructor(){ + // this.attributes = {}; + } + + addAttribute(name, value){ + if(this.options.onAttribute){ + //TODO: better to pass tag path + const v = this.options.onAttribute(name, value, this.tagName); + if(v) this.attributes[v.name] = v.value; + }else{ + name = this.options.attributes.prefix + name + this.options.attributes.suffix; + this.attributes[name] = this.parseValue(value, this.options.attributes.valueParsers); + } + } + + /** + * parse value by chain of parsers + * @param {string} val + * @returns {any} parsed value if matching parser found + */ + parseValue = function(val, valParsers){ + for (let i = 0; i < valParsers.length; i++) { + let valParser = valParsers[i]; + if(typeof valParser === "string"){ + valParser = this.registeredParsers[valParser]; + } + if(valParser){ + val = valParser.parse(val); + } + } + return val; + } + + /** + * To add a nested empty tag. + * @param {string} key + * @param {any} val + */ + _addChild(key, val){} + + /** + * skip the comment if property is not set + */ + addComment(text){ + if(this.options.nameFor.comment) + this._addChild(this.options.nameFor.comment, text); + } + + //store CDATA separately if property is set + //otherwise add to tag's value + addCdata(text){ + if (this.options.nameFor.cdata) { + this._addChild(this.options.nameFor.cdata, text); + } else { + this.addRawValue(text || ""); + } + } + + addRawValue = text => this.addValue(text); + + addDeclaration(){ + if(!this.options.declaration){ + }else{ + this.addPi("?xml"); + } + this.attributes = {} + } +} + +module.exports = BaseOutputBuilder; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsArrBuilder.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsArrBuilder.js new file mode 100644 index 00000000..c63f6274 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsArrBuilder.js @@ -0,0 +1,103 @@ +const {buildOptions,registerCommonValueParsers} = require("./ParserOptionsBuilder"); + +class OutputBuilder{ + constructor(options){ + this.options = buildOptions(options); + this.registeredParsers = registerCommonValueParsers(this.options); + } + + registerValueParser(name,parserInstance){//existing name will override the parser without warning + this.registeredParsers[name] = parserInstance; + } + + getInstance(parserOptions){ + return new JsArrBuilder(parserOptions, this.options, this.registeredParsers); + } +} + +const rootName = '!js_arr'; +const BaseOutputBuilder = require("./BaseOutputBuilder"); + +class JsArrBuilder extends BaseOutputBuilder{ + + constructor(parserOptions, options,registeredParsers) { + super(); + this.tagsStack = []; + this.parserOptions = parserOptions; + this.options = options; + this.registeredParsers = registeredParsers; + + this.root = new Node(rootName); + this.currentNode = this.root; + this.attributes = {}; + } + + addTag(tag){ + //when a new tag is added, it should be added as child of current node + //TODO: shift this check to the parser + if(tag.name === "__proto__") tag.name = "#__proto__"; + + this.tagsStack.push(this.currentNode); + this.currentNode = new Node(tag.name, this.attributes); + this.attributes = {}; + } + + /** + * Check if the node should be added by checking user's preference + * @param {Node} node + * @returns boolean: true if the node should not be added + */ + closeTag(){ + const node = this.currentNode; + this.currentNode = this.tagsStack.pop(); //set parent node in scope + if(this.options.onClose !== undefined){ + //TODO TagPathMatcher + const resultTag = this.options.onClose(node, + new TagPathMatcher(this.tagsStack,node)); + + if(resultTag) return; + } + this.currentNode.child.push(node); //to parent node + } + + //Called by parent class methods + _addChild(key, val){ + // if(key === "__proto__") tagName = "#__proto__"; + this.currentNode.child.push( {[key]: val }); + // this.currentNode.leafType = false; + } + + /** + * Add text value child node + * @param {string} text + */ + addValue(text){ + this.currentNode.child.push( {[this.options.nameFor.text]: this.parseValue(text, this.options.tags.valueParsers) }); + } + + addPi(name){ + //TODO: set pi flag + if(!this.options.ignorePiTags){ + const node = new Node(name, this.attributes); + this.currentNode[":@"] = this.attributes; + this.currentNode.child.push(node); + } + this.attributes = {}; + } + getOutput(){ + return this.root.child[0]; + } +} + + + +class Node{ + constructor(tagname, attributes){ + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments + if(attributes && Object.keys(attributes).length > 0) + this[":@"] = attributes; + } +} + +module.exports = OutputBuilder; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsMinArrBuilder.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsMinArrBuilder.js new file mode 100644 index 00000000..e0dc1e94 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsMinArrBuilder.js @@ -0,0 +1,102 @@ +const {buildOptions,registerCommonValueParsers} = require("./ParserOptionsBuilder"); + +class OutputBuilder{ + constructor(options){ + this.options = buildOptions(options); + this.registeredParsers = registerCommonValueParsers(this.options); + } + + registerValueParser(name,parserInstance){//existing name will override the parser without warning + this.registeredParsers[name] = parserInstance; + } + + getInstance(parserOptions){ + return new JsMinArrBuilder(parserOptions, this.options, this.registeredParsers); + } +} + +const BaseOutputBuilder = require("./BaseOutputBuilder"); +const rootName = '^'; + +class JsMinArrBuilder extends BaseOutputBuilder{ + + constructor(parserOptions, options,registeredParsers) { + super(); + this.tagsStack = []; + this.parserOptions = parserOptions; + this.options = options; + this.registeredParsers = registeredParsers; + + this.root = {[rootName]: []}; + this.currentNode = this.root; + this.currentNodeTagName = rootName; + this.attributes = {}; + } + + addTag(tag){ + //when a new tag is added, it should be added as child of current node + //TODO: shift this check to the parser + if(tag.name === "__proto__") tag.name = "#__proto__"; + + this.tagsStack.push([this.currentNodeTagName,this.currentNode]); //this.currentNode is parent node here + this.currentNodeTagName = tag.name; + this.currentNode = { [tag.name]:[]} + if(Object.keys(this.attributes).length > 0){ + this.currentNode[":@"] = this.attributes; + this.attributes = {}; + } + } + + /** + * Check if the node should be added by checking user's preference + * @param {Node} node + * @returns boolean: true if the node should not be added + */ + closeTag(){ + const node = this.currentNode; + const nodeName = this.currentNodeTagName; + const arr = this.tagsStack.pop(); //set parent node in scope + this.currentNodeTagName = arr[0]; + this.currentNode = arr[1]; + + if(this.options.onClose !== undefined){ + //TODO TagPathMatcher + const resultTag = this.options.onClose(node, + new TagPathMatcher(this.tagsStack,node)); + + if(resultTag) return; + } + this.currentNode[this.currentNodeTagName].push(node); //to parent node + } + + //Called by parent class methods + _addChild(key, val){ + // if(key === "__proto__") tagName = "#__proto__"; + this.currentNode.push( {[key]: val }); + // this.currentNode.leafType = false; + } + + /** + * Add text value child node + * @param {string} text + */ + addValue(text){ + this.currentNode[this.currentNodeTagName].push( {[this.options.nameFor.text]: this.parseValue(text, this.options.tags.valueParsers) }); + } + + addPi(name){ + if(!this.options.ignorePiTags){ + const node = { [name]:[]} + if(this.attributes){ + node[":@"] = this.attributes; + } + this.currentNode.push(node); + } + this.attributes = {}; + } + getOutput(){ + return this.root[rootName]; + } +} + +module.exports = OutputBuilder; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsObjBuilder.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsObjBuilder.js new file mode 100644 index 00000000..37036c52 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsObjBuilder.js @@ -0,0 +1,156 @@ + + +const {buildOptions,registerCommonValueParsers} = require("./ParserOptionsBuilder"); + +class OutputBuilder{ + constructor(builderOptions){ + this.options = buildOptions(builderOptions); + this.registeredParsers = registerCommonValueParsers(this.options); + } + + registerValueParser(name,parserInstance){//existing name will override the parser without warning + this.registeredParsers[name] = parserInstance; + } + + getInstance(parserOptions){ + return new JsObjBuilder(parserOptions, this.options, this.registeredParsers); + } +} + +const BaseOutputBuilder = require("./BaseOutputBuilder"); +const rootName = '^'; + +class JsObjBuilder extends BaseOutputBuilder{ + + constructor(parserOptions, builderOptions,registeredParsers) { + super(); + //hold the raw detail of a tag and sequence with reference to the output + this.tagsStack = []; + this.parserOptions = parserOptions; + this.options = builderOptions; + this.registeredParsers = registeredParsers; + + this.root = {}; + this.parent = this.root; + this.tagName = rootName; + this.value = {}; + this.textValue = ""; + this.attributes = {}; + } + + addTag(tag){ + + let value = ""; + if( !isEmpty(this.attributes)){ + value = {}; + if(this.options.attributes.groupBy){ + value[this.options.attributes.groupBy] = this.attributes; + }else{ + value = this.attributes; + } + } + + this.tagsStack.push([this.tagName, this.textValue, this.value]); //parent tag, parent text value, parent tag value (jsobj) + this.tagName = tag.name; + this.value = value; + this.textValue = ""; + this.attributes = {}; + } + + /** + * Check if the node should be added by checking user's preference + * @param {Node} node + * @returns boolean: true if the node should not be added + */ + closeTag(){ + const tagName = this.tagName; + let value = this.value; + let textValue = this.textValue; + + //update tag text value + if(typeof value !== "object" && !Array.isArray(value)){ + value = this.parseValue(textValue.trim(), this.options.tags.valueParsers); + }else if(textValue.length > 0){ + value[this.options.nameFor.text] = this.parseValue(textValue.trim(), this.options.tags.valueParsers); + } + + + let resultTag= { + tagName: tagName, + value: value + }; + + if(this.options.onTagClose !== undefined){ + //TODO TagPathMatcher + resultTag = this.options.onClose(tagName, value, this.textValue, new TagPathMatcher(this.tagsStack,node)); + + if(!resultTag) return; + } + + //set parent node in scope + let arr = this.tagsStack.pop(); + let parentTag = arr[2]; + parentTag=this._addChildTo(resultTag.tagName, resultTag.value, parentTag); + + this.tagName = arr[0]; + this.textValue = arr[1]; + this.value = parentTag; + } + + _addChild(key, val){ + if(typeof this.value === "string"){ + this.value = { [this.options.nameFor.text] : this.value }; + } + + this._addChildTo(key, val, this.value); + // this.currentNode.leafType = false; + this.attributes = {}; + } + + _addChildTo(key, val, node){ + if(typeof node === 'string') node = {}; + if(!node[key]){ + node[key] = val; + }else{ //Repeated + if(!Array.isArray(node[key])){ //but not stored as array + node[key] = [node[key]]; + } + node[key].push(val); + } + return node; + } + + + /** + * Add text value child node + * @param {string} text + */ + addValue(text){ + //TODO: use bytes join + if(this.textValue.length > 0) this.textValue += " " + text; + else this.textValue = text; + } + + addPi(name){ + let value = ""; + if( !isEmpty(this.attributes)){ + value = {}; + if(this.options.attributes.groupBy){ + value[this.options.attributes.groupBy] = this.attributes; + }else{ + value = this.attributes; + } + } + this._addChild(name, value); + + } + getOutput(){ + return this.value; + } +} + +function isEmpty(obj) { + return Object.keys(obj).length === 0; +} + +module.exports = OutputBuilder; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/OutputBuilders/ParserOptionsBuilder.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/OutputBuilders/ParserOptionsBuilder.js new file mode 100644 index 00000000..c71ea94a --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/OutputBuilders/ParserOptionsBuilder.js @@ -0,0 +1,99 @@ +const trimParser = require("../valueParsers/trim") +const booleanParser = require("../valueParsers/booleanParser") +const currencyParser = require("../valueParsers/currency") +const numberParser = require("../valueParsers/number") + +const defaultOptions={ + nameFor:{ + text: "#text", + comment: "", + cdata: "", + }, + // onTagClose: () => {}, + // onAttribute: () => {}, + piTag: false, + declaration: false, //"?xml" + tags: { + valueParsers: [ + // "trim", + // "boolean", + // "number", + // "currency", + // "date", + ] + }, + attributes:{ + prefix: "@_", + suffix: "", + groupBy: "", + + valueParsers: [ + // "trim", + // "boolean", + // "number", + // "currency", + // "date", + ] + }, + dataType:{ + + } +} + +//TODO +const withJoin = ["trim","join", /*"entities",*/"number","boolean","currency"/*, "date"*/] +const withoutJoin = ["trim", /*"entities",*/"number","boolean","currency"/*, "date"*/] + +function buildOptions(options){ + //clone + const finalOptions = { ... defaultOptions}; + + //add config missed in cloning + finalOptions.tags.valueParsers.push(...withJoin) + if(!this.preserveOrder) + finalOptions.tags.valueParsers.push(...withoutJoin); + + //add config missed in cloning + finalOptions.attributes.valueParsers.push(...withJoin) + + //override configuration + copyProperties(finalOptions,options); + return finalOptions; +} + +function copyProperties(target, source) { + for (let key in source) { + if (source.hasOwnProperty(key)) { + if (typeof source[key] === 'object' && !Array.isArray(source[key])) { + // Recursively copy nested properties + if (typeof target[key] === 'undefined') { + target[key] = {}; + } + copyProperties(target[key], source[key]); + } else { + // Copy non-nested properties + target[key] = source[key]; + } + } + } +} + +function registerCommonValueParsers(options){ + return { + "trim": new trimParser(), + // "join": this.entityParser.parse, + "boolean": new booleanParser(), + "number": new numberParser({ + hex: true, + leadingZeros: true, + eNotation: true + }), + "currency": new currencyParser(), + // "date": this.entityParser.parse, + } +} + +module.exports = { + buildOptions : buildOptions, + registerCommonValueParsers: registerCommonValueParsers +} \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/Report.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/Report.js new file mode 100644 index 00000000..e69de29b diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/TagPath.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/TagPath.js new file mode 100644 index 00000000..d901cc3c --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/TagPath.js @@ -0,0 +1,81 @@ +class TagPath{ + constructor(pathStr){ + let text = ""; + let tName = ""; + let pos; + let aName = ""; + let aVal = ""; + this.stack = [] + + for (let i = 0; i < pathStr.length; i++) { + let ch = pathStr[i]; + if(ch === " ") { + if(text.length === 0) continue; + tName = text; text = ""; + }else if(ch === "["){ + if(tName.length === 0){ + tName = text; text = ""; + } + i++; + for (; i < pathStr.length; i++) { + ch = pathStr[i]; + if(ch=== "=") continue; + else if(ch=== "]") {aName = text.trim(); text=""; break; i--;} + else if(ch === "'" || ch === '"'){ + let attrEnd = pathStr.indexOf(ch,i+1); + aVal = pathStr.substring(i+1, attrEnd); + i = attrEnd; + }else{ + text +=ch; + } + } + }else if(ch !== " " && text.length === 0 && tName.length > 0){//reading tagName + //save previous tag + this.stack.push(new TagPathNode(tName,pos,aName,aVal)); + text = ch; tName = ""; aName = ""; aVal = ""; + }else{ + text+=ch; + } + } + + //last tag in the path + if(tName.length >0 || text.length>0){ + this.stack.push(new TagPathNode(text||tName,pos,aName,aVal)); + } + } + + match(tagStack,node){ + if(this.stack[0].name !== "*"){ + if(this.stack.length !== tagStack.length +1) return false; + + //loop through tagPath and tagStack and match + for (let i = 0; i < this.tagStack.length; i++) { + if(!this.stack[i].match(tagStack[i])) return false; + } + } + if(!this.stack[this.stack.length - 1].match(node)) return false; + return true; + } +} + +class TagPathNode{ + constructor(name,position,attrName,attrVal){ + this.name = name; + this.position = position; + this.attrName = attrName, + this.attrVal = attrVal; + } + + match(node){ + let matching = true; + matching = node.name === this.name; + if(this.position) matching = node.position === this.position; + if(this.attrName) matching = node.attrs[this.attrName !== undefined]; + if(this.attrVal) matching = node.attrs[this.attrName !== this.attrVal]; + return matching; + } +} + +// console.log((new TagPath("* b[b]")).stack); +// console.log((new TagPath("a[a] b[b] c")).stack); +// console.log((new TagPath(" b [ b= 'cf sdadwa' ] a ")).stack); \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/TagPathMatcher.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/TagPathMatcher.js new file mode 100644 index 00000000..af236070 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/TagPathMatcher.js @@ -0,0 +1,15 @@ +const TagPath = require("./TagPath"); + +class TagPathMatcher{ + constructor(stack,node){ + this.stack = stack; + this.node= node; + } + + match(path){ + const tagPath = new TagPath(path); + return tagPath.match(this.stack, this.node); + } +} + +module.exports = TagPathMatcher; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/XMLParser.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/XMLParser.js new file mode 100755 index 00000000..6de58ed1 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/XMLParser.js @@ -0,0 +1,85 @@ +const { buildOptions} = require("./OptionsBuilder"); +const Xml2JsParser = require("./Xml2JsParser"); + +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + // console.log(this.options) + } + /** + * Parse XML data string to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData){ + if(Array.isArray(xmlData) && xmlData.byteLength !== undefined){ + return this.parse(xmlData); + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + // if( validationOption){ + // if(validationOption === true) validationOption = {}; //validate with default options + + // const result = validator.validate(xmlData, validationOption); + // if (result !== true) { + // throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + // } + // } + const parser = new Xml2JsParser(this.options); + parser.entityParser.addExternalEntities(this.externalEntities); + return parser.parse(xmlData); + } + /** + * Parse XML data buffer to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parseBytesArr(xmlData){ + if(Array.isArray(xmlData) && xmlData.byteLength !== undefined){ + }else{ + throw new Error("XML data is accepted in Bytes[] form.") + } + const parser = new Xml2JsParser(this.options); + parser.entityParser.addExternalEntities(this.externalEntities); + return parser.parseBytesArr(xmlData); + } + /** + * Parse XML data stream to JS object + * @param {fs.ReadableStream} xmlDataStream + */ + parseStream(xmlDataStream){ + if(!isStream(xmlDataStream)) throw new Error("FXP: Invalid stream input"); + + const orderedObjParser = new Xml2JsParser(this.options); + orderedObjParser.entityParser.addExternalEntities(this.externalEntities); + return orderedObjParser.parseStream(xmlDataStream); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} + +function isStream(stream){ + if(stream && typeof stream.read === "function" && typeof stream.on === "function" && typeof stream.readableEnded === "boolean") return true; + return false; +} + +module.exports = XMLParser; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/Xml2JsParser.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/Xml2JsParser.js new file mode 100644 index 00000000..c4baab45 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/Xml2JsParser.js @@ -0,0 +1,237 @@ +const StringSource = require("./inputSource/StringSource"); +const BufferSource = require("./inputSource/BufferSource"); +const {readTagExp,readClosingTagName} = require("./XmlPartReader"); +const {readComment, readCdata,readDocType,readPiTag} = require("./XmlSpecialTagsReader"); +const TagPath = require("./TagPath"); +const TagPathMatcher = require("./TagPathMatcher"); +const EntitiesParser = require('./EntitiesParser'); + +//To hold the data of current tag +//This is usually used to compare jpath expression against current tag +class TagDetail{ + constructor(name){ + this.name = name; + this.position = 0; + // this.attributes = {}; + } +} + +class Xml2JsParser { + constructor(options) { + this.options = options; + + this.currentTagDetail = null; + this.tagTextData = ""; + this.tagsStack = []; + this.entityParser = new EntitiesParser(options.htmlEntities); + this.stopNodes = []; + for (let i = 0; i < this.options.stopNodes.length; i++) { + this.stopNodes.push(new TagPath(this.options.stopNodes[i])); + } + } + + parse(strData) { + this.source = new StringSource(strData); + this.parseXml(); + return this.outputBuilder.getOutput(); + } + parseBytesArr(data) { + this.source = new BufferSource(data ); + this.parseXml(); + return this.outputBuilder.getOutput(); + } + + parseXml() { + //TODO: Separate TagValueParser as separate class. So no scope issue in node builder class + + //OutputBuilder should be set in XML Parser + this.outputBuilder = this.options.OutputBuilder.getInstance(this.options); + this.root = { root: true}; + this.currentTagDetail = this.root; + + while(this.source.canRead()){ + let ch = this.source.readCh(); + if (ch === "") break; + + if(ch === "<"){//tagStart + let nextChar = this.source.readChAt(0); + if (nextChar === "" ) throw new Error("Unexpected end of source"); + + + if(nextChar === "!" || nextChar === "?"){ + this.source.updateBufferBoundary(); + //previously collected text should be added to current node + this.addTextNode(); + + this.readSpecialTag(nextChar);// Read DOCTYPE, comment, CDATA, PI tag + }else if(nextChar === "/"){ + this.source.updateBufferBoundary(); + this.readClosingTag(); + // console.log(this.source.buffer.length, this.source.readable); + // console.log(this.tagsStack.length); + }else{//opening tag + this.readOpeningTag(); + } + }else{ + this.tagTextData += ch; + } + }//End While loop + if(this.tagsStack.length > 0 || ( this.tagTextData !== "undefined" && this.tagTextData.trimEnd().length > 0) ) throw new Error("Unexpected data in the end of document"); + } + + /** + * read closing paired tag. Set parent tag in scope. + * skip a node on user's choice + */ + readClosingTag(){ + const tagName = this.processTagName(readClosingTagName(this.source)); + // console.log(tagName, this.tagsStack.length); + this.validateClosingTag(tagName); + // All the text data collected, belongs to current tag. + if(!this.currentTagDetail.root) this.addTextNode(); + this.outputBuilder.closeTag(); + // Since the tag is closed now, parent tag comes in scope + this.currentTagDetail = this.tagsStack.pop(); + } + + validateClosingTag(tagName){ + // This can't be unpaired tag, or a stop tag. + if(this.isUnpaired(tagName) || this.isStopNode(tagName)) throw new Error(`Unexpected closing tag '${tagName}'`); + // This must match with last opening tag + else if(tagName !== this.currentTagDetail.name) + throw new Error(`Unexpected closing tag '${tagName}' expecting '${this.currentTagDetail.name}'`) + } + + /** + * Read paired, unpaired, self-closing, stop and special tags. + * Create a new node + * Push paired tag in stack. + */ + readOpeningTag(){ + //save previously collected text data to current node + this.addTextNode(); + + //create new tag + let tagExp = readTagExp(this, ">" ); + + // process and skip from tagsStack For unpaired tag, self closing tag, and stop node + const tagDetail = new TagDetail(tagExp.tagName); + if(this.isUnpaired(tagExp.tagName)) { + //TODO: this will lead 2 extra stack operation + this.outputBuilder.addTag(tagDetail); + this.outputBuilder.closeTag(); + } else if(tagExp.selfClosing){ + this.outputBuilder.addTag(tagDetail); + this.outputBuilder.closeTag(); + } else if(this.isStopNode(this.currentTagDetail)){ + // TODO: let's user set a stop node boundary detector for complex contents like script tag + //TODO: pass tag name only to avoid string operations + const content = source.readUptoCloseTag(` 0){ + //TODO: shift parsing to output builder + + this.outputBuilder.addValue(this.replaceEntities(this.tagTextData)); + } + this.tagTextData = ""; + } + // } + } + + processAttrName(name){ + if(name === "__proto__") name = "#__proto__"; + name = resolveNameSpace(name, this.removeNSPrefix); + return name; + } + + processTagName(name){ + if(name === "__proto__") name = "#__proto__"; + name = resolveNameSpace(name, this.removeNSPrefix); + return name; + } + + /** + * Generate tags path from tagsStack + */ + tagsPath(tagName){ + //TODO: return TagPath Object. User can call match method with path + return ""; + } + + isUnpaired(tagName){ + return this.options.tags.unpaired.indexOf(tagName) !== -1; + } + + /** + * valid expressions are + * tag nested + * * nested + * tag nested[attribute] + * tag nested[attribute=""] + * tag nested[attribute!=""] + * tag nested:0 //for future + * @param {string} tagName + * @returns + */ + isStopNode(node){ + for (let i = 0; i < this.stopNodes.length; i++) { + const givenPath = this.stopNodes[i]; + if(givenPath.match(this.tagsStack, node)) return true; + } + return false + } + + replaceEntities(text){ + //TODO: if option is set then replace entities + return this.entityParser.parse(text) + } +} + +function resolveNameSpace(name, removeNSPrefix) { + if (removeNSPrefix) { + const parts = name.split(':'); + if(parts.length === 2){ + if (parts[0] === 'xmlns') return ''; + else return parts[1]; + }else reportError(`Multiple namespaces ${name}`) + } + return name; +} + +module.exports = Xml2JsParser; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/XmlPartReader.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/XmlPartReader.js new file mode 100644 index 00000000..56b180e9 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/XmlPartReader.js @@ -0,0 +1,212 @@ +'use strict'; + +/** + * find paired tag for a stop node + * @param {string} xmlDoc + * @param {string} tagName + * @param {number} i : start index + */ +function readStopNode(xmlDoc, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlDoc.length; i++) { + if( xmlDoc[i] === "<"){ + if (xmlDoc[i+1] === "/") {//close tag + const closeIndex = findSubStrIndex(xmlDoc, ">", i, `${tagName} is not closed`); + let closeTagName = xmlDoc.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlDoc.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlDoc[i+1] === '?') { + const closeIndex = findSubStrIndex(xmlDoc, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlDoc.substr(i + 1, 3) === '!--') { + const closeIndex = findSubStrIndex(xmlDoc, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlDoc.substr(i + 1, 2) === '![') { + const closeIndex = findSubStrIndex(xmlDoc, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlDoc, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +/** + * Read closing tag name + * @param {Source} source + * @returns tag name + */ +function readClosingTagName(source){ + let text = ""; //temporary data + while(source.canRead()){ + let ch = source.readCh(); + // if (ch === null || ch === undefined) break; + // source.updateBuffer(); + + if (ch === ">") return text.trimEnd(); + else text += ch; + } + throw new Error(`Unexpected end of source. Reading '${substr}'`); +} + +/** + * Read XML tag and build attributes map + * This function can be used to read normal tag, pi tag. + * This function can't be used to read comment, CDATA, DOCTYPE. + * Eg + * @param {string} xmlDoc + * @param {number} startIndex starting index + * @returns tag expression includes tag name & attribute string + */ +function readTagExp(parser) { + let inSingleQuotes = false; + let inDoubleQuotes = false; + let i; + let EOE = false; + + for (i = 0; parser.source.canRead(i); i++) { + const char = parser.source.readChAt(i); + + if (char === "'" && !inDoubleQuotes) { + inSingleQuotes = !inSingleQuotes; + } else if (char === '"' && !inSingleQuotes) { + inDoubleQuotes = !inDoubleQuotes; + } else if (char === '>' && !inSingleQuotes && !inDoubleQuotes) { + // If not inside quotes, stop reading at '>' + EOE = true; + break; + } + + } + if(inSingleQuotes || inDoubleQuotes){ + throw new Error("Invalid attribute expression. Quote is not properly closed"); + }else if(!EOE) throw new Error("Unexpected closing of source. Waiting for '>'"); + + + const exp = parser.source.readStr(i); + parser.source.updateBufferBoundary(i + 1); + return buildTagExpObj(exp, parser) +} + +function readPiExp(parser) { + let inSingleQuotes = false; + let inDoubleQuotes = false; + let i; + let EOE = false; + + for (i = 0; parser.source.canRead(i) ; i++) { + const currentChar = parser.source.readChAt(i); + const nextChar = parser.source.readChAt(i+1); + + if (currentChar === "'" && !inDoubleQuotes) { + inSingleQuotes = !inSingleQuotes; + } else if (currentChar === '"' && !inSingleQuotes) { + inDoubleQuotes = !inDoubleQuotes; + } + + if (!inSingleQuotes && !inDoubleQuotes) { + if (currentChar === '?' && nextChar === '>') { + EOE = true; + break; // Exit the loop when '?>' is found + } + } + } + if(inSingleQuotes || inDoubleQuotes){ + throw new Error("Invalid attribute expression. Quote is not properly closed in PI tag expression"); + }else if(!EOE) throw new Error("Unexpected closing of source. Waiting for '?>'"); + + if(!parser.options.attributes.ignore){ + //TODO: use regex to verify attributes if not set to ignore + } + + const exp = parser.source.readStr(i); + parser.source.updateBufferBoundary(i + 1); + return buildTagExpObj(exp, parser) +} + +function buildTagExpObj(exp, parser){ + const tagExp = { + tagName: "", + selfClosing: false + }; + let attrsExp = ""; + + if(exp[exp.length -1] === "/") tagExp.selfClosing = true; + + //separate tag name + let i = 0; + for (; i < exp.length; i++) { + const char = exp[i]; + if(char === " "){ + tagExp.tagName = exp.substring(0, i); + attrsExp = exp.substring(i + 1); + break; + } + } + //only tag + if(tagExp.tagName.length === 0 && i === exp.length)tagExp.tagName = exp; + + tagExp.tagName = tagExp.tagName.trimEnd(); + + if(!parser.options.attributes.ignore && attrsExp.length > 0){ + parseAttributesExp(attrsExp,parser) + } + + return tagExp; +} + +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function parseAttributesExp(attrStr, parser) { + const matches = getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + for (let i = 0; i < len; i++) { + let attrName = parser.processAttrName(matches[i][1]); + let attrVal = parser.replaceEntities(matches[i][4] || true); + + parser.outputBuilder.addAttribute(attrName, attrVal); + } +} + + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +module.exports = { + readStopNode: readStopNode, + readClosingTagName: readClosingTagName, + readTagExp: readTagExp, + readPiExp: readPiExp, +} \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/XmlSpecialTagsReader.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/XmlSpecialTagsReader.js new file mode 100644 index 00000000..0fba196a --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/XmlSpecialTagsReader.js @@ -0,0 +1,118 @@ +const {readPiExp} = require("./XmlPartReader"); + +function readCdata(parser){ + //"); + parser.outputBuilder.addCdata(text); +} +function readPiTag(parser){ + //"); + if(!tagExp) throw new Error("Invalid Pi Tag expression."); + + if (tagExp.tagName === "?xml") {//TODO: test if tagName is just xml + parser.outputBuilder.addDeclaration(); + } else { + parser.outputBuilder.addPi("?"+tagExp.tagName); + } +} + +function readComment(parser){ + //"); + parser.outputBuilder.addComment(text); +} + +const DOCTYPE_tags = { + "EL":/^EMENT\s+([^\s>]+)\s+(ANY|EMPTY|\(.+\)\s*$)/m, + "AT":/^TLIST\s+[^\s]+\s+[^\s]+\s+[^\s]+\s+[^\s]+\s+$/m, + "NO":/^TATION.+$/m +} +function readDocType(parser){ + //"); + const regx = DOCTYPE_tags[str]; + if(regx){ + const match = dTagExp.match(regx); + if(!match) throw new Error("Invalid DOCTYPE"); + }else throw new Error("Invalid DOCTYPE"); + } + }else if( ch === '>' && lastch === "]"){//end of doctype + return; + } + }else if( ch === '>'){//end of doctype + return; + }else if( ch === '['){ + hasBody = true; + }else{ + lastch = ch; + } + }//End While loop + +} + +function registerEntity(parser){ + //read Entity + let attrBoundary=""; + let name ="", val =""; + while(source.canRead()){ + let ch = source.readCh(); + + if(attrBoundary){ + if (ch === attrBoundary){ + val = text; + text = "" + } + }else if(ch === " " || ch === "\t"){ + if(!name){ + name = text.trimStart(); + text = ""; + } + }else if (ch === '"' || ch === "'") {//start of attrBoundary + attrBoundary = ch; + }else if(ch === ">"){ + parser.entityParser.addExternalEntity(name,val); + return; + }else{ + text+=ch; + } + } +} + +module.exports = { + readCdata: readCdata, + readComment:readComment, + readDocType:readDocType, + readPiTag:readPiTag +} \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/inputSource/BufferSource.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/inputSource/BufferSource.js new file mode 100644 index 00000000..b83ce460 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/inputSource/BufferSource.js @@ -0,0 +1,118 @@ +const Constants = { + space: 32, + tab: 9 +} +class BufferSource{ + constructor(bytesArr){ + this.line = 1; + this.cols = 0; + this.buffer = bytesArr; + this.startIndex = 0; + } + + + + readCh() { + return String.fromCharCode(this.buffer[this.startIndex++]); + } + + readChAt(index) { + return String.fromCharCode(this.buffer[this.startIndex+index]); + } + + readStr(n,from){ + if(typeof from === "undefined") from = this.startIndex; + return this.buffer.slice(from, from + n).toString(); + } + + readUpto(stopStr) { + const inputLength = this.buffer.length; + const stopLength = stopStr.length; + const stopBuffer = Buffer.from(stopStr); + + for (let i = this.startIndex; i < inputLength; i++) { + let match = true; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopBuffer[j]) { + match = false; + break; + } + } + + if (match) { + const result = this.buffer.slice(this.startIndex, i).toString(); + this.startIndex = i + stopLength; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); +} + +readUptoCloseTag(stopStr) { //stopStr: "'){ //TODO: if it should be equivalent ASCII + match = 2; + //tag boundary found + // this.startIndex + } + }else{ + match = 1; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopBuffer[j]) { + match = 0; + break; + } + } + } + if (match === 2) {//matched closing part + const result = this.buffer.slice(this.startIndex, stopIndex - 1 ).toString(); + this.startIndex = i + 1; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); +} + + readFromBuffer(n, shouldUpdate) { + let ch; + if (n === 1) { + ch = this.buffer[this.startIndex]; + if (ch === 10) { + this.line++; + this.cols = 1; + } else { + this.cols++; + } + ch = String.fromCharCode(ch); + } else { + this.cols += n; + ch = this.buffer.slice(this.startIndex, this.startIndex + n).toString(); + } + if (shouldUpdate) this.updateBuffer(n); + return ch; + } + + updateBufferBoundary(n = 1) { //n: number of characters read + this.startIndex += n; + } + + canRead(n){ + n = n || this.startIndex; + return this.buffer.length - n + 1 > 0; + } + +} + +module.exports = BufferSource; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/inputSource/StringSource.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/inputSource/StringSource.js new file mode 100644 index 00000000..a996528b --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/inputSource/StringSource.js @@ -0,0 +1,123 @@ +const whiteSpaces = [" ", "\n", "\t"]; + + +class StringSource{ + constructor(str){ + this.line = 1; + this.cols = 0; + this.buffer = str; + //a boundary pointer to indicate where from the buffer dat should be read + // data before this pointer can be deleted to free the memory + this.startIndex = 0; + } + + readCh() { + return this.buffer[this.startIndex++]; + } + + readChAt(index) { + return this.buffer[this.startIndex+index]; + } + + readStr(n,from){ + if(typeof from === "undefined") from = this.startIndex; + return this.buffer.substring(from, from + n); + } + + readUpto(stopStr) { + const inputLength = this.buffer.length; + const stopLength = stopStr.length; + + for (let i = this.startIndex; i < inputLength; i++) { + let match = true; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopStr[j]) { + match = false; + break; + } + } + + if (match) { + const result = this.buffer.substring(this.startIndex, i); + this.startIndex = i + stopLength; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); + } + + readUptoCloseTag(stopStr) { //stopStr: "'){ + match = 2; + //tag boundary found + // this.startIndex + } + }else{ + match = 1; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopStr[j]) { + match = 0; + break; + } + } + } + if (match === 2) {//matched closing part + const result = this.buffer.substring(this.startIndex, stopIndex - 1 ); + this.startIndex = i + 1; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); + } + + readFromBuffer(n, updateIndex){ + let ch; + if(n===1){ + ch = this.buffer[this.startIndex]; + // if(ch === "\n") { + // this.line++; + // this.cols = 1; + // }else{ + // this.cols++; + // } + }else{ + ch = this.buffer.substring(this.startIndex, this.startIndex + n); + // if("".indexOf("\n") !== -1){ + // //TODO: handle the scenario when there are multiple lines + // //TODO: col should be set to number of chars after last '\n' + // // this.cols = 1; + // }else{ + // this.cols += n; + + // } + } + if(updateIndex) this.updateBufferBoundary(n); + return ch; + } + + //TODO: rename to updateBufferReadIndex + + updateBufferBoundary(n = 1) { //n: number of characters read + this.startIndex += n; + } + + canRead(n){ + n = n || this.startIndex; + return this.buffer.length - n + 1 > 0; + } + +} + +module.exports = StringSource; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/EntitiesParser.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/EntitiesParser.js new file mode 100644 index 00000000..62cc02ff --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/EntitiesParser.js @@ -0,0 +1,107 @@ +const ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; +const htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, +}; + +class EntitiesParser{ + constructor(replaceHtmlEntities) { + this.replaceHtmlEntities = replaceHtmlEntities; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + } + + addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.addExternalEntity(ent,externalEntities[ent]) + } + } + addExternalEntity(key,val){ + validateEntityName(key); + if(val.indexOf("&") !== -1) { + reportWarning(`Entity ${key} is not added as '&' is found in value;`) + return; + }else{ + this.lastEntities[ent] = { + regex: new RegExp("&"+key+";","g"), + val : val + } + } + } + + addDocTypeEntities(entities){ + const entKeys = Object.keys(entities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.docTypeEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : entities[ent] + } + } + } + + parse(val){ + return this.replaceEntitiesValue(val) + } + + /** + * 1. Replace DOCTYPE entities + * 2. Replace external entities + * 3. Replace HTML entities if asked + * @param {string} val + */ + replaceEntitiesValue(val){ + if(typeof val === "string" && val.length > 0){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.replaceHtmlEntities){ + for(let entityName in htmlEntities){ + const entity = htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( ampEntity.regex, ampEntity.val); + } + return val; + } +}; + +//an entity name should not contains special characters that may be used in regex +//Eg !?\\\/[]$%{}^&*()<> +const specialChar = "!?\\\/[]$%{}^&*()<>|+"; + +function validateEntityName(name){ + for (let i = 0; i < specialChar.length; i++) { + const ch = specialChar[i]; + if(name.indexOf(ch) !== -1) throw new Error(`Invalid character ${ch} in entity name`); + } + return name; +} + +module.exports = EntitiesParser; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParser.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParser.js new file mode 100644 index 00000000..f8f5d12a --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParser.js @@ -0,0 +1,23 @@ +class boolParser{ + constructor(trueList, falseList){ + if(trueList) + this.trueList = trueList; + else + this.trueList = ["true"]; + + if(falseList) + this.falseList = falseList; + else + this.falseList = ["false"]; + } + parse(val){ + if (typeof val === 'string') { + //TODO: performance: don't convert + const temp = val.toLowerCase(); + if(this.trueList.indexOf(temp) !== -1) return true; + else if(this.falseList.indexOf(temp) !== -1 ) return false; + } + return val; + } +} +module.exports = boolParser; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParserExt.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParserExt.js new file mode 100644 index 00000000..21b80502 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParserExt.js @@ -0,0 +1,20 @@ +function boolParserExt(val){ + if(isArray(val)){ + for (let i = 0; i < val.length; i++) { + val[i] = parse(val[i]) + } + }else{ + val = parse(val) + } + return val; +} + +function parse(val){ + if (typeof val === 'string') { + const temp = val.toLowerCase(); + if(temp === 'true' || temp ==="yes" || temp==="1") return true; + else if(temp === 'false' || temp ==="no" || temp==="0") return false; + } + return val; +} +module.exports = boolParserExt; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/currency.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/currency.js new file mode 100644 index 00000000..82e21e75 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/currency.js @@ -0,0 +1,40 @@ +const defaultOptions = { + maxLength: 200, + // locale: "en-IN" +} +const localeMap = { + "$":"en-US", + "€":"de-DE", + "£":"en-GB", + "¥":"ja-JP", + "₹":"en-IN", +} +const sign = "(?:-|\+)?"; +const digitsAndSeparator = "(?:\d+|\d{1,3}(?:,\d{3})+)"; +const decimalPart = "(?:\.\d{1,2})?"; +const symbol = "(?:\$|€|¥|₹)?"; + +const currencyCheckRegex = /^\s*(?:-|\+)?(?:\d+|\d{1,3}(?:,\d{3})+)?(?:\.\d{1,2})?\s*(?:\$|€|¥|₹)?\s*$/u; + +class CurrencyParser{ + constructor(options){ + this.options = options || defaultOptions; + } + parse(val){ + if (typeof val === 'string' && val.length <= this.options.maxLength) { + if(val.indexOf(",,") !== -1 && val.indexOf(".." !== -1)){ + const match = val.match(currencyCheckRegex); + if(match){ + const locale = this.options.locale || localeMap[match[2]||match[5]||"₹"]; + const formatter = new Intl.NumberFormat(locale) + val = val.replace(/[^0-9,.]/g, '').trim(); + val = Number(val.replace(formatter.format(1000)[1], '')); + } + } + } + return val; + } +} +CurrencyParser.defaultOptions = defaultOptions; + +module.exports = CurrencyParser; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/join.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/join.js new file mode 100644 index 00000000..d7f2027d --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/join.js @@ -0,0 +1,14 @@ +/** + * + * @param {array} val + * @param {string} by + * @returns + */ +function join(val, by=" "){ + if(isArray(val)){ + val.join(by) + } + return val; +} + +module.exports = join; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/number.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/number.js new file mode 100644 index 00000000..bef3803c --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/number.js @@ -0,0 +1,16 @@ +const toNumber = require("strnum"); + + +class numParser{ + constructor(options){ + this.options = options; + } + parse(val){ + if (typeof val === 'string') { + val = toNumber(val,this.options); + } + return val; + } +} + +module.exports = numParser; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/trim.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/trim.js new file mode 100644 index 00000000..ecce49a1 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/v5/valueParsers/trim.js @@ -0,0 +1,8 @@ +class trimmer{ + parse(val){ + if(typeof val === "string") return val.trim(); + else return val; + } +} + +module.exports = trimmer; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/validator.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/validator.js new file mode 100644 index 00000000..3b1b2efb --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/validator.js @@ -0,0 +1,425 @@ +'use strict'; + +const util = require('./util'); + +const defaultOptions = { + allowBooleanAttributes: false, //A tag can have attributes without any value + unpairedTags: [] +}; + +//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); +exports.validate = function (xmlData, options) { + options = Object.assign({}, defaultOptions, options); + + //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line + //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag + //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE + const tags = []; + let tagFound = false; + + //indicates that the root tag has been closed (aka. depth 0 has been reached) + let reachedRoot = false; + + if (xmlData[0] === '\ufeff') { + // check for byte order mark (BOM) + xmlData = xmlData.substr(1); + } + + for (let i = 0; i < xmlData.length; i++) { + + if (xmlData[i] === '<' && xmlData[i+1] === '?') { + i+=2; + i = readPI(xmlData,i); + if (i.err) return i; + }else if (xmlData[i] === '<') { + //starting of tag + //read until you reach to '>' avoiding any '>' in attribute value + let tagStartPos = i; + i++; + + if (xmlData[i] === '!') { + i = readCommentAndCDATA(xmlData, i); + continue; + } else { + let closingTag = false; + if (xmlData[i] === '/') { + //closing tag + closingTag = true; + i++; + } + //read tagname + let tagName = ''; + for (; i < xmlData.length && + xmlData[i] !== '>' && + xmlData[i] !== ' ' && + xmlData[i] !== '\t' && + xmlData[i] !== '\n' && + xmlData[i] !== '\r'; i++ + ) { + tagName += xmlData[i]; + } + tagName = tagName.trim(); + //console.log(tagName); + + if (tagName[tagName.length - 1] === '/') { + //self closing tag without attributes + tagName = tagName.substring(0, tagName.length - 1); + //continue; + i--; + } + if (!validateTagName(tagName)) { + let msg; + if (tagName.trim().length === 0) { + msg = "Invalid space after '<'."; + } else { + msg = "Tag '"+tagName+"' is an invalid name."; + } + return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); + } + + const result = readAttributeStr(xmlData, i); + if (result === false) { + return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); + } + let attrStr = result.value; + i = result.index; + + if (attrStr[attrStr.length - 1] === '/') { + //self closing tag + const attrStrStart = i - attrStr.length; + attrStr = attrStr.substring(0, attrStr.length - 1); + const isValid = validateAttributeString(attrStr, options); + if (isValid === true) { + tagFound = true; + //continue; //text may presents after self closing tag + } else { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); + } + } else if (closingTag) { + if (!result.tagClosed) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); + } else if (attrStr.trim().length > 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); + } else if (tags.length === 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' has not been opened.", getLineNumberForPosition(xmlData, tagStartPos)); + } else { + const otg = tags.pop(); + if (tagName !== otg.tagName) { + let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); + return getErrorObject('InvalidTag', + "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", + getLineNumberForPosition(xmlData, tagStartPos)); + } + + //when there are no more tags, we reached the root level. + if (tags.length == 0) { + reachedRoot = true; + } + } + } else { + const isValid = validateAttributeString(attrStr, options); + if (isValid !== true) { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); + } + + //if the root level has been reached before ... + if (reachedRoot === true) { + return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); + } else if(options.unpairedTags.indexOf(tagName) !== -1){ + //don't push into stack + } else { + tags.push({tagName, tagStartPos}); + } + tagFound = true; + } + + //skip tag text value + //It may include comments and CDATA value + for (i++; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + if (xmlData[i + 1] === '!') { + //comment or CADATA + i++; + i = readCommentAndCDATA(xmlData, i); + continue; + } else if (xmlData[i+1] === '?') { + i = readPI(xmlData, ++i); + if (i.err) return i; + } else{ + break; + } + } else if (xmlData[i] === '&') { + const afterAmp = validateAmpersand(xmlData, i); + if (afterAmp == -1) + return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); + i = afterAmp; + }else{ + if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { + return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); + } + } + } //end of reading tag text value + if (xmlData[i] === '<') { + i--; + } + } + } else { + if ( isWhiteSpace(xmlData[i])) { + continue; + } + return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); + } + } + + if (!tagFound) { + return getErrorObject('InvalidXml', 'Start tag expected.', 1); + }else if (tags.length == 1) { + return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); + }else if (tags.length > 0) { + return getErrorObject('InvalidXml', "Invalid '"+ + JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ + "' found.", {line: 1, col: 1}); + } + + return true; +}; + +function isWhiteSpace(char){ + return char === ' ' || char === '\t' || char === '\n' || char === '\r'; +} +/** + * Read Processing insstructions and skip + * @param {*} xmlData + * @param {*} i + */ +function readPI(xmlData, i) { + const start = i; + for (; i < xmlData.length; i++) { + if (xmlData[i] == '?' || xmlData[i] == ' ') { + //tagname + const tagname = xmlData.substr(start, i - start); + if (i > 5 && tagname === 'xml') { + return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); + } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { + //check if valid attribut string + i++; + break; + } else { + continue; + } + } + } + return i; +} + +function readCommentAndCDATA(xmlData, i) { + if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { + //comment + for (i += 3; i < xmlData.length; i++) { + if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } else if ( + xmlData.length > i + 8 && + xmlData[i + 1] === 'D' && + xmlData[i + 2] === 'O' && + xmlData[i + 3] === 'C' && + xmlData[i + 4] === 'T' && + xmlData[i + 5] === 'Y' && + xmlData[i + 6] === 'P' && + xmlData[i + 7] === 'E' + ) { + let angleBracketsCount = 1; + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + angleBracketsCount++; + } else if (xmlData[i] === '>') { + angleBracketsCount--; + if (angleBracketsCount === 0) { + break; + } + } + } + } else if ( + xmlData.length > i + 9 && + xmlData[i + 1] === '[' && + xmlData[i + 2] === 'C' && + xmlData[i + 3] === 'D' && + xmlData[i + 4] === 'A' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'A' && + xmlData[i + 7] === '[' + ) { + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } + + return i; +} + +const doubleQuote = '"'; +const singleQuote = "'"; + +/** + * Keep reading xmlData until '<' is found outside the attribute value. + * @param {string} xmlData + * @param {number} i + */ +function readAttributeStr(xmlData, i) { + let attrStr = ''; + let startChar = ''; + let tagClosed = false; + for (; i < xmlData.length; i++) { + if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { + if (startChar === '') { + startChar = xmlData[i]; + } else if (startChar !== xmlData[i]) { + //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa + } else { + startChar = ''; + } + } else if (xmlData[i] === '>') { + if (startChar === '') { + tagClosed = true; + break; + } + } + attrStr += xmlData[i]; + } + if (startChar !== '') { + return false; + } + + return { + value: attrStr, + index: i, + tagClosed: tagClosed + }; +} + +/** + * Select all the attributes whether valid or invalid. + */ +const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); + +//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" + +function validateAttributeString(attrStr, options) { + //console.log("start:"+attrStr+":end"); + + //if(attrStr.trim().length === 0) return true; //empty string + + const matches = util.getAllMatches(attrStr, validAttrStrRegxp); + const attrNames = {}; + + for (let i = 0; i < matches.length; i++) { + if (matches[i][1].length === 0) { + //nospace before attribute name: a="sd"b="saf" + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) + } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); + } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { + //independent attribute: ab + return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); + } + /* else if(matches[i][6] === undefined){//attribute without value: ab= + return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; + } */ + const attrName = matches[i][2]; + if (!validateAttrName(attrName)) { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); + } + if (!attrNames.hasOwnProperty(attrName)) { + //check for duplicate attribute. + attrNames[attrName] = 1; + } else { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); + } + } + + return true; +} + +function validateNumberAmpersand(xmlData, i) { + let re = /\d/; + if (xmlData[i] === 'x') { + i++; + re = /[\da-fA-F]/; + } + for (; i < xmlData.length; i++) { + if (xmlData[i] === ';') + return i; + if (!xmlData[i].match(re)) + break; + } + return -1; +} + +function validateAmpersand(xmlData, i) { + // https://www.w3.org/TR/xml/#dt-charref + i++; + if (xmlData[i] === ';') + return -1; + if (xmlData[i] === '#') { + i++; + return validateNumberAmpersand(xmlData, i); + } + let count = 0; + for (; i < xmlData.length; i++, count++) { + if (xmlData[i].match(/\w/) && count < 20) + continue; + if (xmlData[i] === ';') + break; + return -1; + } + return i; +} + +function getErrorObject(code, message, lineNumber) { + return { + err: { + code: code, + msg: message, + line: lineNumber.line || lineNumber, + col: lineNumber.col, + }, + }; +} + +function validateAttrName(attrName) { + return util.isName(attrName); +} + +// const startsWithXML = /^xml/i; + +function validateTagName(tagname) { + return util.isName(tagname) /* && !tagname.match(startsWithXML) */; +} + +//this function returns the line number for the character at the given index +function getLineNumberForPosition(xmlData, index) { + const lines = xmlData.substring(0, index).split(/\r?\n/); + return { + line: lines.length, + + // column number is last line's length + 1, because column numbering starts at 1: + col: lines[lines.length - 1].length + 1 + }; +} + +//this function returns the position of the first character of match within attrStr +function getPositionFromMatch(match) { + return match.startIndex + match[1].length; +} diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlbuilder/json2xml.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlbuilder/json2xml.js new file mode 100644 index 00000000..f30604a4 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlbuilder/json2xml.js @@ -0,0 +1,281 @@ +'use strict'; +//parse Empty Node as self closing node +const buildFromOrderedJs = require('./orderedJs2Xml'); + +const defaultOptions = { + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + cdataPropName: false, + format: false, + indentBy: ' ', + suppressEmptyNode: false, + suppressUnpairedNode: true, + suppressBooleanAttributes: true, + tagValueProcessor: function(key, a) { + return a; + }, + attributeValueProcessor: function(attrName, a) { + return a; + }, + preserveOrder: false, + commentPropName: false, + unpairedTags: [], + entities: [ + { regex: new RegExp("&", "g"), val: "&" },//it must be on top + { regex: new RegExp(">", "g"), val: ">" }, + { regex: new RegExp("<", "g"), val: "<" }, + { regex: new RegExp("\'", "g"), val: "'" }, + { regex: new RegExp("\"", "g"), val: """ } + ], + processEntities: true, + stopNodes: [], + // transformTagName: false, + // transformAttributeName: false, + oneListGroup: false +}; + +function Builder(options) { + this.options = Object.assign({}, defaultOptions, options); + if (this.options.ignoreAttributes || this.options.attributesGroupName) { + this.isAttribute = function(/*a*/) { + return false; + }; + } else { + this.attrPrefixLen = this.options.attributeNamePrefix.length; + this.isAttribute = isAttribute; + } + + this.processTextOrObjNode = processTextOrObjNode + + if (this.options.format) { + this.indentate = indentate; + this.tagEndChar = '>\n'; + this.newLine = '\n'; + } else { + this.indentate = function() { + return ''; + }; + this.tagEndChar = '>'; + this.newLine = ''; + } +} + +Builder.prototype.build = function(jObj) { + if(this.options.preserveOrder){ + return buildFromOrderedJs(jObj, this.options); + }else { + if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ + jObj = { + [this.options.arrayNodeName] : jObj + } + } + return this.j2x(jObj, 0).val; + } +}; + +Builder.prototype.j2x = function(jObj, level) { + let attrStr = ''; + let val = ''; + for (let key in jObj) { + if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue; + if (typeof jObj[key] === 'undefined') { + // supress undefined node only if it is not an attribute + if (this.isAttribute(key)) { + val += ''; + } + } else if (jObj[key] === null) { + // null attribute should be ignored by the attribute list, but should not cause the tag closing + if (this.isAttribute(key)) { + val += ''; + } else if (key[0] === '?') { + val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + } else { + val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (jObj[key] instanceof Date) { + val += this.buildTextValNode(jObj[key], key, '', level); + } else if (typeof jObj[key] !== 'object') { + //premitive type + const attr = this.isAttribute(key); + if (attr) { + attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); + }else { + //tag value + if (key === this.options.textNodeName) { + let newval = this.options.tagValueProcessor(key, '' + jObj[key]); + val += this.replaceEntitiesValue(newval); + } else { + val += this.buildTextValNode(jObj[key], key, '', level); + } + } + } else if (Array.isArray(jObj[key])) { + //repeated nodes + const arrLen = jObj[key].length; + let listTagVal = ""; + let listTagAttr = ""; + for (let j = 0; j < arrLen; j++) { + const item = jObj[key][j]; + if (typeof item === 'undefined') { + // supress undefined node + } else if (item === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (typeof item === 'object') { + if(this.options.oneListGroup){ + const result = this.j2x(item, level + 1); + listTagVal += result.val; + if (this.options.attributesGroupName && item.hasOwnProperty(this.options.attributesGroupName)) { + listTagAttr += result.attrStr + } + }else{ + listTagVal += this.processTextOrObjNode(item, key, level) + } + } else { + if (this.options.oneListGroup) { + let textValue = this.options.tagValueProcessor(key, item); + textValue = this.replaceEntitiesValue(textValue); + listTagVal += textValue; + } else { + listTagVal += this.buildTextValNode(item, key, '', level); + } + } + } + if(this.options.oneListGroup){ + listTagVal = this.buildObjectNode(listTagVal, key, listTagAttr, level); + } + val += listTagVal; + } else { + //nested node + if (this.options.attributesGroupName && key === this.options.attributesGroupName) { + const Ks = Object.keys(jObj[key]); + const L = Ks.length; + for (let j = 0; j < L; j++) { + attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); + } + } else { + val += this.processTextOrObjNode(jObj[key], key, level) + } + } + } + return {attrStr: attrStr, val: val}; +}; + +Builder.prototype.buildAttrPairStr = function(attrName, val){ + val = this.options.attributeValueProcessor(attrName, '' + val); + val = this.replaceEntitiesValue(val); + if (this.options.suppressBooleanAttributes && val === "true") { + return ' ' + attrName; + } else return ' ' + attrName + '="' + val + '"'; +} + +function processTextOrObjNode (object, key, level) { + const result = this.j2x(object, level + 1); + if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { + return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); + } else { + return this.buildObjectNode(result.val, key, result.attrStr, level); + } +} + +Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { + if(val === ""){ + if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + else { + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + } + }else{ + + let tagEndExp = '' + val + tagEndExp ); + } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { + return this.indentate(level) + `` + this.newLine; + }else { + return ( + this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + + val + + this.indentate(level) + tagEndExp ); + } + } +} + +Builder.prototype.closeTag = function(key){ + let closeTag = ""; + if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired + if(!this.options.suppressUnpairedNode) closeTag = "/" + }else if(this.options.suppressEmptyNode){ //empty + closeTag = "/"; + }else{ + closeTag = `>` + this.newLine; + }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { + return this.indentate(level) + `` + this.newLine; + }else if(key[0] === "?") {//PI tag + return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + }else{ + let textValue = this.options.tagValueProcessor(key, val); + textValue = this.replaceEntitiesValue(textValue); + + if( textValue === ''){ + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + }else{ + return this.indentate(level) + '<' + key + attrStr + '>' + + textValue + + ' 0 && this.options.processEntities){ + for (let i=0; i 0) { + indentation = EOL; + } + return arrToStr(jArray, options, "", indentation); +} + +function arrToStr(arr, options, jPath, indentation) { + let xmlStr = ""; + let isPreviousElementTag = false; + + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const tagName = propName(tagObj); + if(tagName === undefined) continue; + + let newJPath = ""; + if (jPath.length === 0) newJPath = tagName + else newJPath = `${jPath}.${tagName}`; + + if (tagName === options.textNodeName) { + let tagText = tagObj[tagName]; + if (!isStopNode(newJPath, options)) { + tagText = options.tagValueProcessor(tagName, tagText); + tagText = replaceEntitiesValue(tagText, options); + } + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += tagText; + isPreviousElementTag = false; + continue; + } else if (tagName === options.cdataPropName) { + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += ``; + isPreviousElementTag = false; + continue; + } else if (tagName === options.commentPropName) { + xmlStr += indentation + ``; + isPreviousElementTag = true; + continue; + } else if (tagName[0] === "?") { + const attStr = attr_to_str(tagObj[":@"], options); + const tempInd = tagName === "?xml" ? "" : indentation; + let piTextNodeName = tagObj[tagName][0][options.textNodeName]; + piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing + xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; + isPreviousElementTag = true; + continue; + } + let newIdentation = indentation; + if (newIdentation !== "") { + newIdentation += options.indentBy; + } + const attStr = attr_to_str(tagObj[":@"], options); + const tagStart = indentation + `<${tagName}${attStr}`; + const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); + if (options.unpairedTags.indexOf(tagName) !== -1) { + if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; + else xmlStr += tagStart + "/>"; + } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { + xmlStr += tagStart + "/>"; + } else if (tagValue && tagValue.endsWith(">")) { + xmlStr += tagStart + `>${tagValue}${indentation}`; + } else { + xmlStr += tagStart + ">"; + if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; + } + isPreviousElementTag = true; + } + + return xmlStr; +} + +function propName(obj) { + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(!obj.hasOwnProperty(key)) continue; + if (key !== ":@") return key; + } +} + +function attr_to_str(attrMap, options) { + let attrStr = ""; + if (attrMap && !options.ignoreAttributes) { + for (let attr in attrMap) { + if(!attrMap.hasOwnProperty(attr)) continue; + let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); + attrVal = replaceEntitiesValue(attrVal, options); + if (attrVal === true && options.suppressBooleanAttributes) { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; + } else { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; + } + } + } + return attrStr; +} + +function isStopNode(jPath, options) { + jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); + let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); + for (let index in options.stopNodes) { + if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; + } + return false; +} + +function replaceEntitiesValue(textValue, options) { + if (textValue && textValue.length > 0 && options.processEntities) { + for (let i = 0; i < options.entities.length; i++) { + const entity = options.entities[i]; + textValue = textValue.replace(entity.regex, entity.val); + } + } + return textValue; +} +module.exports = toXml; diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlbuilder/prettifyJs2Xml.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlbuilder/prettifyJs2Xml.js new file mode 100644 index 00000000..e69de29b diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlparser/DocTypeReader.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlparser/DocTypeReader.js new file mode 100644 index 00000000..bcf9dee4 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlparser/DocTypeReader.js @@ -0,0 +1,152 @@ +const util = require('../util'); + +//TODO: handle comments +function readDocType(xmlData, i){ + + const entities = {}; + if( xmlData[i + 3] === 'O' && + xmlData[i + 4] === 'C' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'Y' && + xmlData[i + 7] === 'P' && + xmlData[i + 8] === 'E') + { + i = i+9; + let angleBracketsCount = 1; + let hasBody = false, comment = false; + let exp = ""; + for(;i') { //Read tag content + if(comment){ + if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ + comment = false; + angleBracketsCount--; + } + }else{ + angleBracketsCount--; + } + if (angleBracketsCount === 0) { + break; + } + }else if( xmlData[i] === '['){ + hasBody = true; + }else{ + exp += xmlData[i]; + } + } + if(angleBracketsCount !== 0){ + throw new Error(`Unclosed DOCTYPE`); + } + }else{ + throw new Error(`Invalid Tag instead of DOCTYPE`); + } + return {entities, i}; +} + +function readEntityExp(xmlData,i){ + //External entities are not supported + // + + //Parameter entities are not supported + // + + //Internal entities are supported + // + + //read EntityName + let entityName = ""; + for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { + // if(xmlData[i] === " ") continue; + // else + entityName += xmlData[i]; + } + entityName = entityName.trim(); + if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); + + //read Entity Value + const startChar = xmlData[i++]; + let val = "" + for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { + val += xmlData[i]; + } + return [entityName, val, i]; +} + +function isComment(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === '-' && + xmlData[i+3] === '-') return true + return false +} +function isEntity(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'N' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'I' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'Y') return true + return false +} +function isElement(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'L' && + xmlData[i+4] === 'E' && + xmlData[i+5] === 'M' && + xmlData[i+6] === 'E' && + xmlData[i+7] === 'N' && + xmlData[i+8] === 'T') return true + return false +} + +function isAttlist(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'A' && + xmlData[i+3] === 'T' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'L' && + xmlData[i+6] === 'I' && + xmlData[i+7] === 'S' && + xmlData[i+8] === 'T') return true + return false +} +function isNotation(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'N' && + xmlData[i+3] === 'O' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'A' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'I' && + xmlData[i+8] === 'O' && + xmlData[i+9] === 'N') return true + return false +} + +function validateEntityName(name){ + if (util.isName(name)) + return name; + else + throw new Error(`Invalid entity name ${name}`); +} + +module.exports = readDocType; diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlparser/OptionsBuilder.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlparser/OptionsBuilder.js new file mode 100644 index 00000000..bca37769 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlparser/OptionsBuilder.js @@ -0,0 +1,48 @@ + +const defaultOptions = { + preserveOrder: false, + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + allowBooleanAttributes: false, //a tag can have attributes without any value + //ignoreRootElement : false, + parseTagValue: true, + parseAttributeValue: false, + trimValues: true, //Trim string values of tag and attributes + cdataPropName: false, + numberParseOptions: { + hex: true, + leadingZeros: true, + eNotation: true + }, + tagValueProcessor: function(tagName, val) { + return val; + }, + attributeValueProcessor: function(attrName, val) { + return val; + }, + stopNodes: [], //nested tags will not be parsed even for errors + alwaysCreateTextNode: false, + isArray: () => false, + commentPropName: false, + unpairedTags: [], + processEntities: true, + htmlEntities: false, + ignoreDeclaration: false, + ignorePiTags: false, + transformTagName: false, + transformAttributeName: false, + updateTag: function(tagName, jPath, attrs){ + return tagName + }, + // skipEmptyListItem: false +}; + +const buildOptions = function(options) { + return Object.assign({}, defaultOptions, options); +}; + +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlparser/OrderedObjParser.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlparser/OrderedObjParser.js new file mode 100644 index 00000000..ffd3f24f --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlparser/OrderedObjParser.js @@ -0,0 +1,601 @@ +'use strict'; +///@ts-check + +const util = require('../util'); +const xmlNode = require('./xmlNode'); +const readDocType = require("./DocTypeReader"); +const toNumber = require("strnum"); + +// const regx = +// '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' +// .replace(/NAME/g, util.nameRegexp); + +//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); +//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); + +class OrderedObjParser{ + constructor(options){ + this.options = options; + this.currentNode = null; + this.tagsNodeStack = []; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; + this.htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, + }; + this.addExternalEntities = addExternalEntities; + this.parseXml = parseXml; + this.parseTextData = parseTextData; + this.resolveNameSpace = resolveNameSpace; + this.buildAttributesMap = buildAttributesMap; + this.isItStopNode = isItStopNode; + this.replaceEntitiesValue = replaceEntitiesValue; + this.readStopNodeData = readStopNodeData; + this.saveTextToParentTag = saveTextToParentTag; + this.addChild = addChild; + } + +} + +function addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.lastEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : externalEntities[ent] + } + } +} + +/** + * @param {string} val + * @param {string} tagName + * @param {string} jPath + * @param {boolean} dontTrim + * @param {boolean} hasAttributes + * @param {boolean} isLeafNode + * @param {boolean} escapeEntities + */ +function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { + if (val !== undefined) { + if (this.options.trimValues && !dontTrim) { + val = val.trim(); + } + if(val.length > 0){ + if(!escapeEntities) val = this.replaceEntitiesValue(val); + + const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); + if(newval === null || newval === undefined){ + //don't parse + return val; + }else if(typeof newval !== typeof val || newval !== val){ + //overwrite + return newval; + }else if(this.options.trimValues){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + const trimmedVal = val.trim(); + if(trimmedVal === val){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + return val; + } + } + } + } +} + +function resolveNameSpace(tagname) { + if (this.options.removeNSPrefix) { + const tags = tagname.split(':'); + const prefix = tagname.charAt(0) === '/' ? '/' : ''; + if (tags[0] === 'xmlns') { + return ''; + } + if (tags.length === 2) { + tagname = prefix + tags[1]; + } + } + return tagname; +} + +//TODO: change regex to capture NS +//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function buildAttributesMap(attrStr, jPath, tagName) { + if (!this.options.ignoreAttributes && typeof attrStr === 'string') { + // attrStr = attrStr.replace(/\r?\n/g, ' '); + //attrStr = attrStr || attrStr.trim(); + + const matches = util.getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + const attrs = {}; + for (let i = 0; i < len; i++) { + const attrName = this.resolveNameSpace(matches[i][1]); + let oldVal = matches[i][4]; + let aName = this.options.attributeNamePrefix + attrName; + if (attrName.length) { + if (this.options.transformAttributeName) { + aName = this.options.transformAttributeName(aName); + } + if(aName === "__proto__") aName = "#__proto__"; + if (oldVal !== undefined) { + if (this.options.trimValues) { + oldVal = oldVal.trim(); + } + oldVal = this.replaceEntitiesValue(oldVal); + const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); + if(newVal === null || newVal === undefined){ + //don't parse + attrs[aName] = oldVal; + }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ + //overwrite + attrs[aName] = newVal; + }else{ + //parse + attrs[aName] = parseValue( + oldVal, + this.options.parseAttributeValue, + this.options.numberParseOptions + ); + } + } else if (this.options.allowBooleanAttributes) { + attrs[aName] = true; + } + } + } + if (!Object.keys(attrs).length) { + return; + } + if (this.options.attributesGroupName) { + const attrCollection = {}; + attrCollection[this.options.attributesGroupName] = attrs; + return attrCollection; + } + return attrs + } +} + +const parseXml = function(xmlData) { + xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line + const xmlObj = new xmlNode('!xml'); + let currentNode = xmlObj; + let textData = ""; + let jPath = ""; + for(let i=0; i< xmlData.length; i++){//for each char in XML data + const ch = xmlData[i]; + if(ch === '<'){ + // const nextIndex = i+1; + // const _2ndChar = xmlData[nextIndex]; + if( xmlData[i+1] === '/') {//Closing Tag + const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") + let tagName = xmlData.substring(i+2,closeIndex).trim(); + + if(this.options.removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + } + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + if(currentNode){ + textData = this.saveTextToParentTag(textData, currentNode, jPath); + } + + //check if last tag of nested tag was unpaired tag + const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); + if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ + throw new Error(`Unpaired tag can not be used as closing tag: `); + } + let propIndex = 0 + if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ + propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) + this.tagsNodeStack.pop(); + }else{ + propIndex = jPath.lastIndexOf("."); + } + jPath = jPath.substring(0, propIndex); + + currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope + textData = ""; + i = closeIndex; + } else if( xmlData[i+1] === '?') { + + let tagData = readTagExp(xmlData,i, false, "?>"); + if(!tagData) throw new Error("Pi Tag is not closed."); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ + + }else{ + + const childNode = new xmlNode(tagData.tagName); + childNode.add(this.options.textNodeName, ""); + + if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); + } + this.addChild(currentNode, childNode, jPath) + + } + + + i = tagData.closeIndex + 1; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") + if(this.options.commentPropName){ + const comment = xmlData.substring(i + 4, endIndex - 2); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); + } + i = endIndex; + } else if( xmlData.substr(i + 1, 2) === '!D') { + const result = readDocType(xmlData, i); + this.docTypeEntities = result.entities; + i = result.i; + }else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; + const tagExp = xmlData.substring(i + 9,closeIndex); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true); + if(val == undefined) val = ""; + + //cdata should be set even if it is 0 length string + if(this.options.cdataPropName){ + currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); + }else{ + currentNode.add(this.options.textNodeName, val); + } + + i = closeIndex + 2; + }else {//Opening tag + let result = readTagExp(xmlData,i, this.options.removeNSPrefix); + let tagName= result.tagName; + const rawTagName = result.rawTagName; + let tagExp = result.tagExp; + let attrExpPresent = result.attrExpPresent; + let closeIndex = result.closeIndex; + + if (this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + //save text as child node + if (currentNode && textData) { + if(currentNode.tagname !== '!xml'){ + //when nested tag is found + textData = this.saveTextToParentTag(textData, currentNode, jPath, false); + } + } + + //check if last tag was unpaired tag + const lastTag = currentNode; + if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ + currentNode = this.tagsNodeStack.pop(); + jPath = jPath.substring(0, jPath.lastIndexOf(".")); + } + if(tagName !== xmlObj.tagname){ + jPath += jPath ? "." + tagName : tagName; + } + if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { + let tagContent = ""; + //self-closing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + i = result.closeIndex; + } + //unpaired tag + else if(this.options.unpairedTags.indexOf(tagName) !== -1){ + + i = result.closeIndex; + } + //normal tag + else{ + //read until closing tag is found + const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1); + if(!result) throw new Error(`Unexpected end of ${rawTagName}`); + i = result.i; + tagContent = result.tagContent; + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + if(tagContent) { + tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); + } + + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + childNode.add(this.options.textNodeName, tagContent); + + this.addChild(currentNode, childNode, jPath) + }else{ + //selfClosing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + } + //opening tag + else{ + const childNode = new xmlNode( tagName); + this.tagsNodeStack.push(currentNode); + + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + currentNode = childNode; + } + textData = ""; + i = closeIndex; + } + } + }else{ + textData += xmlData[i]; + } + } + return xmlObj.child; +} + +function addChild(currentNode, childNode, jPath){ + const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) + if(result === false){ + }else if(typeof result === "string"){ + childNode.tagname = result + currentNode.addChild(childNode); + }else{ + currentNode.addChild(childNode); + } +} + +const replaceEntitiesValue = function(val){ + + if(this.options.processEntities){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.options.htmlEntities){ + for(let entityName in this.htmlEntities){ + const entity = this.htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( this.ampEntity.regex, this.ampEntity.val); + } + return val; +} +function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { + if (textData) { //store previously collected data as textNode + if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0 + + textData = this.parseTextData(textData, + currentNode.tagname, + jPath, + false, + currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, + isLeafNode); + + if (textData !== undefined && textData !== "") + currentNode.add(this.options.textNodeName, textData); + textData = ""; + } + return textData; +} + +//TODO: use jPath to simplify the logic +/** + * + * @param {string[]} stopNodes + * @param {string} jPath + * @param {string} currentTagName + */ +function isItStopNode(stopNodes, jPath, currentTagName){ + const allNodesExp = "*." + currentTagName; + for (const stopNodePath in stopNodes) { + const stopNodeExp = stopNodes[stopNodePath]; + if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; + } + return false; +} + +/** + * Returns the tag Expression and where it is ending handling single-double quotes situation + * @param {string} xmlData + * @param {number} i starting index + * @returns + */ +function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ + let attrBoundary; + let tagExp = ""; + for (let index = i; index < xmlData.length; index++) { + let ch = xmlData[index]; + if (attrBoundary) { + if (ch === attrBoundary) attrBoundary = "";//reset + } else if (ch === '"' || ch === "'") { + attrBoundary = ch; + } else if (ch === closingChar[0]) { + if(closingChar[1]){ + if(xmlData[index + 1] === closingChar[1]){ + return { + data: tagExp, + index: index + } + } + }else{ + return { + data: tagExp, + index: index + } + } + } else if (ch === '\t') { + ch = " " + } + tagExp += ch; + } +} + +function findClosingIndex(xmlData, str, i, errMsg){ + const closingIndex = xmlData.indexOf(str, i); + if(closingIndex === -1){ + throw new Error(errMsg) + }else{ + return closingIndex + str.length - 1; + } +} + +function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ + const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); + if(!result) return; + let tagExp = result.data; + const closeIndex = result.index; + const separatorIndex = tagExp.search(/\s/); + let tagName = tagExp; + let attrExpPresent = true; + if(separatorIndex !== -1){//separate tag name and attributes expression + tagName = tagExp.substring(0, separatorIndex); + tagExp = tagExp.substring(separatorIndex + 1).trimStart(); + } + + const rawTagName = tagName; + if(removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + attrExpPresent = tagName !== result.data.substr(colonIndex + 1); + } + } + + return { + tagName: tagName, + tagExp: tagExp, + closeIndex: closeIndex, + attrExpPresent: attrExpPresent, + rawTagName: rawTagName, + } +} +/** + * find paired tag for a stop node + * @param {string} xmlData + * @param {string} tagName + * @param {number} i + */ +function readStopNodeData(xmlData, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlData.length; i++) { + if( xmlData[i] === "<"){ + if (xmlData[i+1] === "/") {//close tag + const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); + let closeTagName = xmlData.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlData.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlData[i+1] === '?') { + const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlData, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +function parseValue(val, shouldParse, options) { + if (shouldParse && typeof val === 'string') { + //console.log(options) + const newval = val.trim(); + if(newval === 'true' ) return true; + else if(newval === 'false' ) return false; + else return toNumber(val, options); + } else { + if (util.isExist(val)) { + return val; + } else { + return ''; + } + } +} + + +module.exports = OrderedObjParser; diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlparser/XMLParser.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlparser/XMLParser.js new file mode 100644 index 00000000..ffaf59b5 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlparser/XMLParser.js @@ -0,0 +1,58 @@ +const { buildOptions} = require("./OptionsBuilder"); +const OrderedObjParser = require("./OrderedObjParser"); +const { prettify} = require("./node2json"); +const validator = require('../validator'); + +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + + } + /** + * Parse XML dats to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData,validationOption){ + if(typeof xmlData === "string"){ + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + if( validationOption){ + if(validationOption === true) validationOption = {}; //validate with default options + + const result = validator.validate(xmlData, validationOption); + if (result !== true) { + throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + } + } + const orderedObjParser = new OrderedObjParser(this.options); + orderedObjParser.addExternalEntities(this.externalEntities); + const orderedResult = orderedObjParser.parseXml(xmlData); + if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; + else return prettify(orderedResult, this.options); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} + +module.exports = XMLParser; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlparser/node2json.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlparser/node2json.js new file mode 100644 index 00000000..30455738 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlparser/node2json.js @@ -0,0 +1,113 @@ +'use strict'; + +/** + * + * @param {array} node + * @param {any} options + * @returns + */ +function prettify(node, options){ + return compress( node, options); +} + +/** + * + * @param {array} arr + * @param {object} options + * @param {string} jPath + * @returns object + */ +function compress(arr, options, jPath){ + let text; + const compressedObj = {}; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const property = propName(tagObj); + let newJpath = ""; + if(jPath === undefined) newJpath = property; + else newJpath = jPath + "." + property; + + if(property === options.textNodeName){ + if(text === undefined) text = tagObj[property]; + else text += "" + tagObj[property]; + }else if(property === undefined){ + continue; + }else if(tagObj[property]){ + + let val = compress(tagObj[property], options, newJpath); + const isLeaf = isLeafTag(val, options); + + if(tagObj[":@"]){ + assignAttributes( val, tagObj[":@"], newJpath, options); + }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ + val = val[options.textNodeName]; + }else if(Object.keys(val).length === 0){ + if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; + else val = ""; + } + + if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { + if(!Array.isArray(compressedObj[property])) { + compressedObj[property] = [ compressedObj[property] ]; + } + compressedObj[property].push(val); + }else{ + //TODO: if a node is not an array, then check if it should be an array + //also determine if it is a leaf node + if (options.isArray(property, newJpath, isLeaf )) { + compressedObj[property] = [val]; + }else{ + compressedObj[property] = val; + } + } + } + + } + // if(text && text.length > 0) compressedObj[options.textNodeName] = text; + if(typeof text === "string"){ + if(text.length > 0) compressedObj[options.textNodeName] = text; + }else if(text !== undefined) compressedObj[options.textNodeName] = text; + return compressedObj; +} + +function propName(obj){ + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(key !== ":@") return key; + } +} + +function assignAttributes(obj, attrMap, jpath, options){ + if (attrMap) { + const keys = Object.keys(attrMap); + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + const atrrName = keys[i]; + if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { + obj[atrrName] = [ attrMap[atrrName] ]; + } else { + obj[atrrName] = attrMap[atrrName]; + } + } + } +} + +function isLeafTag(obj, options){ + const { textNodeName } = options; + const propCount = Object.keys(obj).length; + + if (propCount === 0) { + return true; + } + + if ( + propCount === 1 && + (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) + ) { + return true; + } + + return false; +} +exports.prettify = prettify; diff --git a/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlparser/xmlNode.js b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlparser/xmlNode.js new file mode 100644 index 00000000..93195242 --- /dev/null +++ b/node_modules/@aws-sdk/core/node_modules/fast-xml-parser/src/xmlparser/xmlNode.js @@ -0,0 +1,25 @@ +'use strict'; + +class XmlNode{ + constructor(tagname) { + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments in order + this[":@"] = {}; //attributes map + } + add(key,val){ + // this.child.push( {name : key, val: val, isCdata: isCdata }); + if(key === "__proto__") key = "#__proto__"; + this.child.push( {[key]: val }); + } + addChild(node) { + if(node.tagname === "__proto__") node.tagname = "#__proto__"; + if(node[":@"] && Object.keys(node[":@"]).length > 0){ + this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); + }else{ + this.child.push( { [node.tagname]: node.child }); + } + }; +}; + + +module.exports = XmlNode; \ No newline at end of file diff --git a/node_modules/@aws-sdk/core/package.json b/node_modules/@aws-sdk/core/package.json new file mode 100644 index 00000000..a41d77ae --- /dev/null +++ b/node_modules/@aws-sdk/core/package.json @@ -0,0 +1,119 @@ +{ + "name": "@aws-sdk/core", + "version": "3.799.0", + "description": "Core functions & classes shared by multiple AWS SDK clients.", + "scripts": { + "build": "yarn lint && concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline core && rimraf ./dist-cjs/api-extractor-type-index.js", + "build:es": "tsc -p tsconfig.es.json && rimraf ./dist-es/api-extractor-type-index.js", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "lint": "node ../../scripts/validation/submodules-linter.js --pkg core", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "exports": { + ".": { + "types": "./dist-types/index.d.ts", + "module": "./dist-es/index.js", + "node": "./dist-cjs/index.js", + "import": "./dist-es/index.js", + "require": "./dist-cjs/index.js" + }, + "./package.json": { + "module": "./package.json", + "node": "./package.json", + "import": "./package.json", + "require": "./package.json" + }, + "./client": { + "types": "./dist-types/submodules/client/index.d.ts", + "module": "./dist-es/submodules/client/index.js", + "node": "./dist-cjs/submodules/client/index.js", + "import": "./dist-es/submodules/client/index.js", + "require": "./dist-cjs/submodules/client/index.js" + }, + "./httpAuthSchemes": { + "types": "./dist-types/submodules/httpAuthSchemes/index.d.ts", + "module": "./dist-es/submodules/httpAuthSchemes/index.js", + "node": "./dist-cjs/submodules/httpAuthSchemes/index.js", + "import": "./dist-es/submodules/httpAuthSchemes/index.js", + "require": "./dist-cjs/submodules/httpAuthSchemes/index.js" + }, + "./account-id-endpoint": { + "types": "./dist-types/submodules/account-id-endpoint/index.d.ts", + "module": "./dist-es/submodules/account-id-endpoint/index.js", + "node": "./dist-cjs/submodules/account-id-endpoint/index.js", + "import": "./dist-es/submodules/account-id-endpoint/index.js", + "require": "./dist-cjs/submodules/account-id-endpoint/index.js" + }, + "./protocols": { + "types": "./dist-types/submodules/protocols/index.d.ts", + "module": "./dist-es/submodules/protocols/index.js", + "node": "./dist-cjs/submodules/protocols/index.js", + "import": "./dist-es/submodules/protocols/index.js", + "require": "./dist-cjs/submodules/protocols/index.js" + } + }, + "files": [ + "./account-id-endpoint.d.ts", + "./account-id-endpoint.js", + "./client.d.ts", + "./client.js", + "./httpAuthSchemes.d.ts", + "./httpAuthSchemes.js", + "./protocols.d.ts", + "./protocols.js", + "dist-*/**" + ], + "sideEffects": false, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/core": "^3.3.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "fast-xml-parser": "4.4.1", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/core", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/core" + } +} diff --git a/node_modules/@aws-sdk/core/protocols.d.ts b/node_modules/@aws-sdk/core/protocols.d.ts new file mode 100644 index 00000000..7a363347 --- /dev/null +++ b/node_modules/@aws-sdk/core/protocols.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/core/protocols" { + export * from "@aws-sdk/core/dist-types/submodules/protocols/index.d"; +} diff --git a/node_modules/@aws-sdk/core/protocols.js b/node_modules/@aws-sdk/core/protocols.js new file mode 100644 index 00000000..e2916e8f --- /dev/null +++ b/node_modules/@aws-sdk/core/protocols.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/protocols/index.js"); diff --git a/node_modules/@aws-sdk/credential-provider-env/LICENSE b/node_modules/@aws-sdk/credential-provider-env/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-env/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-sdk/credential-provider-env/README.md b/node_modules/@aws-sdk/credential-provider-env/README.md new file mode 100644 index 00000000..61a64361 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-env/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-env + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-env/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-env) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-env.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-env) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/node_modules/@aws-sdk/credential-provider-env/dist-cjs/index.js b/node_modules/@aws-sdk/credential-provider-env/dist-cjs/index.js new file mode 100644 index 00000000..c906a6bb --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-env/dist-cjs/index.js @@ -0,0 +1,76 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + ENV_ACCOUNT_ID: () => ENV_ACCOUNT_ID, + ENV_CREDENTIAL_SCOPE: () => ENV_CREDENTIAL_SCOPE, + ENV_EXPIRATION: () => ENV_EXPIRATION, + ENV_KEY: () => ENV_KEY, + ENV_SECRET: () => ENV_SECRET, + ENV_SESSION: () => ENV_SESSION, + fromEnv: () => fromEnv +}); +module.exports = __toCommonJS(index_exports); + +// src/fromEnv.ts +var import_client = require("@aws-sdk/core/client"); +var import_property_provider = require("@smithy/property-provider"); +var ENV_KEY = "AWS_ACCESS_KEY_ID"; +var ENV_SECRET = "AWS_SECRET_ACCESS_KEY"; +var ENV_SESSION = "AWS_SESSION_TOKEN"; +var ENV_EXPIRATION = "AWS_CREDENTIAL_EXPIRATION"; +var ENV_CREDENTIAL_SCOPE = "AWS_CREDENTIAL_SCOPE"; +var ENV_ACCOUNT_ID = "AWS_ACCOUNT_ID"; +var fromEnv = /* @__PURE__ */ __name((init) => async () => { + init?.logger?.debug("@aws-sdk/credential-provider-env - fromEnv"); + const accessKeyId = process.env[ENV_KEY]; + const secretAccessKey = process.env[ENV_SECRET]; + const sessionToken = process.env[ENV_SESSION]; + const expiry = process.env[ENV_EXPIRATION]; + const credentialScope = process.env[ENV_CREDENTIAL_SCOPE]; + const accountId = process.env[ENV_ACCOUNT_ID]; + if (accessKeyId && secretAccessKey) { + const credentials = { + accessKeyId, + secretAccessKey, + ...sessionToken && { sessionToken }, + ...expiry && { expiration: new Date(expiry) }, + ...credentialScope && { credentialScope }, + ...accountId && { accountId } + }; + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_ENV_VARS", "g"); + return credentials; + } + throw new import_property_provider.CredentialsProviderError("Unable to find environment variable credentials.", { logger: init?.logger }); +}, "fromEnv"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + ENV_KEY, + ENV_SECRET, + ENV_SESSION, + ENV_EXPIRATION, + ENV_CREDENTIAL_SCOPE, + ENV_ACCOUNT_ID, + fromEnv +}); + diff --git a/node_modules/@aws-sdk/credential-provider-env/dist-es/fromEnv.js b/node_modules/@aws-sdk/credential-provider-env/dist-es/fromEnv.js new file mode 100644 index 00000000..a6a29283 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-env/dist-es/fromEnv.js @@ -0,0 +1,30 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { CredentialsProviderError } from "@smithy/property-provider"; +export const ENV_KEY = "AWS_ACCESS_KEY_ID"; +export const ENV_SECRET = "AWS_SECRET_ACCESS_KEY"; +export const ENV_SESSION = "AWS_SESSION_TOKEN"; +export const ENV_EXPIRATION = "AWS_CREDENTIAL_EXPIRATION"; +export const ENV_CREDENTIAL_SCOPE = "AWS_CREDENTIAL_SCOPE"; +export const ENV_ACCOUNT_ID = "AWS_ACCOUNT_ID"; +export const fromEnv = (init) => async () => { + init?.logger?.debug("@aws-sdk/credential-provider-env - fromEnv"); + const accessKeyId = process.env[ENV_KEY]; + const secretAccessKey = process.env[ENV_SECRET]; + const sessionToken = process.env[ENV_SESSION]; + const expiry = process.env[ENV_EXPIRATION]; + const credentialScope = process.env[ENV_CREDENTIAL_SCOPE]; + const accountId = process.env[ENV_ACCOUNT_ID]; + if (accessKeyId && secretAccessKey) { + const credentials = { + accessKeyId, + secretAccessKey, + ...(sessionToken && { sessionToken }), + ...(expiry && { expiration: new Date(expiry) }), + ...(credentialScope && { credentialScope }), + ...(accountId && { accountId }), + }; + setCredentialFeature(credentials, "CREDENTIALS_ENV_VARS", "g"); + return credentials; + } + throw new CredentialsProviderError("Unable to find environment variable credentials.", { logger: init?.logger }); +}; diff --git a/node_modules/@aws-sdk/credential-provider-env/dist-es/index.js b/node_modules/@aws-sdk/credential-provider-env/dist-es/index.js new file mode 100644 index 00000000..17bf6daa --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-env/dist-es/index.js @@ -0,0 +1 @@ +export * from "./fromEnv"; diff --git a/node_modules/@aws-sdk/credential-provider-env/dist-types/fromEnv.d.ts b/node_modules/@aws-sdk/credential-provider-env/dist-types/fromEnv.d.ts new file mode 100644 index 00000000..541aa697 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-env/dist-types/fromEnv.d.ts @@ -0,0 +1,36 @@ +import type { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider } from "@smithy/types"; +export interface FromEnvInit extends CredentialProviderOptions { +} +/** + * @internal + */ +export declare const ENV_KEY = "AWS_ACCESS_KEY_ID"; +/** + * @internal + */ +export declare const ENV_SECRET = "AWS_SECRET_ACCESS_KEY"; +/** + * @internal + */ +export declare const ENV_SESSION = "AWS_SESSION_TOKEN"; +/** + * @internal + */ +export declare const ENV_EXPIRATION = "AWS_CREDENTIAL_EXPIRATION"; +/** + * @internal + */ +export declare const ENV_CREDENTIAL_SCOPE = "AWS_CREDENTIAL_SCOPE"; +/** + * @internal + */ +export declare const ENV_ACCOUNT_ID = "AWS_ACCOUNT_ID"; +/** + * @internal + * + * Source AWS credentials from known environment variables. If either the + * `AWS_ACCESS_KEY_ID` or `AWS_SECRET_ACCESS_KEY` environment variable is not + * set in this process, the provider will return a rejected promise. + */ +export declare const fromEnv: (init?: FromEnvInit) => AwsCredentialIdentityProvider; diff --git a/node_modules/@aws-sdk/credential-provider-env/dist-types/index.d.ts b/node_modules/@aws-sdk/credential-provider-env/dist-types/index.d.ts new file mode 100644 index 00000000..fe76e31b --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-env/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./fromEnv"; diff --git a/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/fromEnv.d.ts b/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/fromEnv.d.ts new file mode 100644 index 00000000..55c454e6 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/fromEnv.d.ts @@ -0,0 +1,12 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider } from "@smithy/types"; +export interface FromEnvInit extends CredentialProviderOptions {} +export declare const ENV_KEY = "AWS_ACCESS_KEY_ID"; +export declare const ENV_SECRET = "AWS_SECRET_ACCESS_KEY"; +export declare const ENV_SESSION = "AWS_SESSION_TOKEN"; +export declare const ENV_EXPIRATION = "AWS_CREDENTIAL_EXPIRATION"; +export declare const ENV_CREDENTIAL_SCOPE = "AWS_CREDENTIAL_SCOPE"; +export declare const ENV_ACCOUNT_ID = "AWS_ACCOUNT_ID"; +export declare const fromEnv: ( + init?: FromEnvInit +) => AwsCredentialIdentityProvider; diff --git a/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..17bf6daa --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./fromEnv"; diff --git a/node_modules/@aws-sdk/credential-provider-env/package.json b/node_modules/@aws-sdk/credential-provider-env/package.json new file mode 100644 index 00000000..a66a0de5 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-env/package.json @@ -0,0 +1,62 @@ +{ + "name": "@aws-sdk/credential-provider-env", + "version": "3.799.0", + "description": "AWS credential provider that sources credentials from known environment variables", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-env", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-env", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-env" + } +} diff --git a/node_modules/@aws-sdk/credential-provider-http/README.md b/node_modules/@aws-sdk/credential-provider-http/README.md new file mode 100644 index 00000000..e8f19f8d --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/README.md @@ -0,0 +1,10 @@ +# @aws-sdk/credential-provider-http + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-http/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-http) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-http.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-http) + +> An internal transitively required package. + +## Usage + +See https://www.npmjs.com/package/@aws-sdk/credential-providers diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/checkUrl.js b/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/checkUrl.js new file mode 100644 index 00000000..c4adb5f9 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/checkUrl.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.checkUrl = void 0; +const property_provider_1 = require("@smithy/property-provider"); +const LOOPBACK_CIDR_IPv4 = "127.0.0.0/8"; +const LOOPBACK_CIDR_IPv6 = "::1/128"; +const ECS_CONTAINER_HOST = "169.254.170.2"; +const EKS_CONTAINER_HOST_IPv4 = "169.254.170.23"; +const EKS_CONTAINER_HOST_IPv6 = "[fd00:ec2::23]"; +const checkUrl = (url, logger) => { + if (url.protocol === "https:") { + return; + } + if (url.hostname === ECS_CONTAINER_HOST || + url.hostname === EKS_CONTAINER_HOST_IPv4 || + url.hostname === EKS_CONTAINER_HOST_IPv6) { + return; + } + if (url.hostname.includes("[")) { + if (url.hostname === "[::1]" || url.hostname === "[0000:0000:0000:0000:0000:0000:0000:0001]") { + return; + } + } + else { + if (url.hostname === "localhost") { + return; + } + const ipComponents = url.hostname.split("."); + const inRange = (component) => { + const num = parseInt(component, 10); + return 0 <= num && num <= 255; + }; + if (ipComponents[0] === "127" && + inRange(ipComponents[1]) && + inRange(ipComponents[2]) && + inRange(ipComponents[3]) && + ipComponents.length === 4) { + return; + } + } + throw new property_provider_1.CredentialsProviderError(`URL not accepted. It must either be HTTPS or match one of the following: + - loopback CIDR 127.0.0.0/8 or [::1/128] + - ECS container host 169.254.170.2 + - EKS container host 169.254.170.23 or [fd00:ec2::23]`, { logger }); +}; +exports.checkUrl = checkUrl; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.browser.js b/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.browser.js new file mode 100644 index 00000000..d7c0efa1 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.browser.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromHttp = void 0; +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const property_provider_1 = require("@smithy/property-provider"); +const checkUrl_1 = require("./checkUrl"); +const requestHelpers_1 = require("./requestHelpers"); +const retry_wrapper_1 = require("./retry-wrapper"); +const fromHttp = (options = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-http - fromHttp"); + let host; + const full = options.credentialsFullUri; + if (full) { + host = full; + } + else { + throw new property_provider_1.CredentialsProviderError("No HTTP credential provider host provided.", { logger: options.logger }); + } + const url = new URL(host); + (0, checkUrl_1.checkUrl)(url, options.logger); + const requestHandler = new fetch_http_handler_1.FetchHttpHandler(); + return (0, retry_wrapper_1.retryWrapper)(async () => { + const request = (0, requestHelpers_1.createGetRequest)(url); + if (options.authorizationToken) { + request.headers.Authorization = options.authorizationToken; + } + const result = await requestHandler.handle(request); + return (0, requestHelpers_1.getCredentials)(result.response); + }, options.maxRetries ?? 3, options.timeout ?? 1000); +}; +exports.fromHttp = fromHttp; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.js b/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.js new file mode 100644 index 00000000..6e0269a5 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.js @@ -0,0 +1,68 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromHttp = void 0; +const tslib_1 = require("tslib"); +const client_1 = require("@aws-sdk/core/client"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const property_provider_1 = require("@smithy/property-provider"); +const promises_1 = tslib_1.__importDefault(require("fs/promises")); +const checkUrl_1 = require("./checkUrl"); +const requestHelpers_1 = require("./requestHelpers"); +const retry_wrapper_1 = require("./retry-wrapper"); +const AWS_CONTAINER_CREDENTIALS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +const DEFAULT_LINK_LOCAL_HOST = "http://169.254.170.2"; +const AWS_CONTAINER_CREDENTIALS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +const AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE = "AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE"; +const AWS_CONTAINER_AUTHORIZATION_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +const fromHttp = (options = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-http - fromHttp"); + let host; + const relative = options.awsContainerCredentialsRelativeUri ?? process.env[AWS_CONTAINER_CREDENTIALS_RELATIVE_URI]; + const full = options.awsContainerCredentialsFullUri ?? process.env[AWS_CONTAINER_CREDENTIALS_FULL_URI]; + const token = options.awsContainerAuthorizationToken ?? process.env[AWS_CONTAINER_AUTHORIZATION_TOKEN]; + const tokenFile = options.awsContainerAuthorizationTokenFile ?? process.env[AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE]; + const warn = options.logger?.constructor?.name === "NoOpLogger" || !options.logger ? console.warn : options.logger.warn; + if (relative && full) { + warn("@aws-sdk/credential-provider-http: " + + "you have set both awsContainerCredentialsRelativeUri and awsContainerCredentialsFullUri."); + warn("awsContainerCredentialsFullUri will take precedence."); + } + if (token && tokenFile) { + warn("@aws-sdk/credential-provider-http: " + + "you have set both awsContainerAuthorizationToken and awsContainerAuthorizationTokenFile."); + warn("awsContainerAuthorizationToken will take precedence."); + } + if (full) { + host = full; + } + else if (relative) { + host = `${DEFAULT_LINK_LOCAL_HOST}${relative}`; + } + else { + throw new property_provider_1.CredentialsProviderError(`No HTTP credential provider host provided. +Set AWS_CONTAINER_CREDENTIALS_FULL_URI or AWS_CONTAINER_CREDENTIALS_RELATIVE_URI.`, { logger: options.logger }); + } + const url = new URL(host); + (0, checkUrl_1.checkUrl)(url, options.logger); + const requestHandler = new node_http_handler_1.NodeHttpHandler({ + requestTimeout: options.timeout ?? 1000, + connectionTimeout: options.timeout ?? 1000, + }); + return (0, retry_wrapper_1.retryWrapper)(async () => { + const request = (0, requestHelpers_1.createGetRequest)(url); + if (token) { + request.headers.Authorization = token; + } + else if (tokenFile) { + request.headers.Authorization = (await promises_1.default.readFile(tokenFile)).toString(); + } + try { + const result = await requestHandler.handle(request); + return (0, requestHelpers_1.getCredentials)(result.response).then((creds) => (0, client_1.setCredentialFeature)(creds, "CREDENTIALS_HTTP", "z")); + } + catch (e) { + throw new property_provider_1.CredentialsProviderError(String(e), { logger: options.logger }); + } + }, options.maxRetries ?? 3, options.timeout ?? 1000); +}; +exports.fromHttp = fromHttp; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttpTypes.js b/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttpTypes.js new file mode 100644 index 00000000..c8ad2e54 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttpTypes.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/requestHelpers.js b/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/requestHelpers.js new file mode 100644 index 00000000..5229d794 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/requestHelpers.js @@ -0,0 +1,54 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getCredentials = exports.createGetRequest = void 0; +const property_provider_1 = require("@smithy/property-provider"); +const protocol_http_1 = require("@smithy/protocol-http"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_stream_1 = require("@smithy/util-stream"); +function createGetRequest(url) { + return new protocol_http_1.HttpRequest({ + protocol: url.protocol, + hostname: url.hostname, + port: Number(url.port), + path: url.pathname, + query: Array.from(url.searchParams.entries()).reduce((acc, [k, v]) => { + acc[k] = v; + return acc; + }, {}), + fragment: url.hash, + }); +} +exports.createGetRequest = createGetRequest; +async function getCredentials(response, logger) { + const stream = (0, util_stream_1.sdkStreamMixin)(response.body); + const str = await stream.transformToString(); + if (response.statusCode === 200) { + const parsed = JSON.parse(str); + if (typeof parsed.AccessKeyId !== "string" || + typeof parsed.SecretAccessKey !== "string" || + typeof parsed.Token !== "string" || + typeof parsed.Expiration !== "string") { + throw new property_provider_1.CredentialsProviderError("HTTP credential provider response not of the required format, an object matching: " + + "{ AccessKeyId: string, SecretAccessKey: string, Token: string, Expiration: string(rfc3339) }", { logger }); + } + return { + accessKeyId: parsed.AccessKeyId, + secretAccessKey: parsed.SecretAccessKey, + sessionToken: parsed.Token, + expiration: (0, smithy_client_1.parseRfc3339DateTime)(parsed.Expiration), + }; + } + if (response.statusCode >= 400 && response.statusCode < 500) { + let parsedBody = {}; + try { + parsedBody = JSON.parse(str); + } + catch (e) { } + throw Object.assign(new property_provider_1.CredentialsProviderError(`Server responded with status: ${response.statusCode}`, { logger }), { + Code: parsedBody.Code, + Message: parsedBody.Message, + }); + } + throw new property_provider_1.CredentialsProviderError(`Server responded with status: ${response.statusCode}`, { logger }); +} +exports.getCredentials = getCredentials; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/retry-wrapper.js b/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/retry-wrapper.js new file mode 100644 index 00000000..b99b2efa --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/retry-wrapper.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.retryWrapper = void 0; +const retryWrapper = (toRetry, maxRetries, delayMs) => { + return async () => { + for (let i = 0; i < maxRetries; ++i) { + try { + return await toRetry(); + } + catch (e) { + await new Promise((resolve) => setTimeout(resolve, delayMs)); + } + } + return await toRetry(); + }; +}; +exports.retryWrapper = retryWrapper; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.browser.js b/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.browser.js new file mode 100644 index 00000000..9300747a --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.browser.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromHttp = void 0; +var fromHttp_browser_1 = require("./fromHttp/fromHttp.browser"); +Object.defineProperty(exports, "fromHttp", { enumerable: true, get: function () { return fromHttp_browser_1.fromHttp; } }); diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.js b/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.js new file mode 100644 index 00000000..0286ea03 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromHttp = void 0; +var fromHttp_1 = require("./fromHttp/fromHttp"); +Object.defineProperty(exports, "fromHttp", { enumerable: true, get: function () { return fromHttp_1.fromHttp; } }); diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/checkUrl.js b/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/checkUrl.js new file mode 100644 index 00000000..2a42ed78 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/checkUrl.js @@ -0,0 +1,42 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +const LOOPBACK_CIDR_IPv4 = "127.0.0.0/8"; +const LOOPBACK_CIDR_IPv6 = "::1/128"; +const ECS_CONTAINER_HOST = "169.254.170.2"; +const EKS_CONTAINER_HOST_IPv4 = "169.254.170.23"; +const EKS_CONTAINER_HOST_IPv6 = "[fd00:ec2::23]"; +export const checkUrl = (url, logger) => { + if (url.protocol === "https:") { + return; + } + if (url.hostname === ECS_CONTAINER_HOST || + url.hostname === EKS_CONTAINER_HOST_IPv4 || + url.hostname === EKS_CONTAINER_HOST_IPv6) { + return; + } + if (url.hostname.includes("[")) { + if (url.hostname === "[::1]" || url.hostname === "[0000:0000:0000:0000:0000:0000:0000:0001]") { + return; + } + } + else { + if (url.hostname === "localhost") { + return; + } + const ipComponents = url.hostname.split("."); + const inRange = (component) => { + const num = parseInt(component, 10); + return 0 <= num && num <= 255; + }; + if (ipComponents[0] === "127" && + inRange(ipComponents[1]) && + inRange(ipComponents[2]) && + inRange(ipComponents[3]) && + ipComponents.length === 4) { + return; + } + } + throw new CredentialsProviderError(`URL not accepted. It must either be HTTPS or match one of the following: + - loopback CIDR 127.0.0.0/8 or [::1/128] + - ECS container host 169.254.170.2 + - EKS container host 169.254.170.23 or [fd00:ec2::23]`, { logger }); +}; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.browser.js b/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.browser.js new file mode 100644 index 00000000..7189b922 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.browser.js @@ -0,0 +1,27 @@ +import { FetchHttpHandler } from "@smithy/fetch-http-handler"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { checkUrl } from "./checkUrl"; +import { createGetRequest, getCredentials } from "./requestHelpers"; +import { retryWrapper } from "./retry-wrapper"; +export const fromHttp = (options = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-http - fromHttp"); + let host; + const full = options.credentialsFullUri; + if (full) { + host = full; + } + else { + throw new CredentialsProviderError("No HTTP credential provider host provided.", { logger: options.logger }); + } + const url = new URL(host); + checkUrl(url, options.logger); + const requestHandler = new FetchHttpHandler(); + return retryWrapper(async () => { + const request = createGetRequest(url); + if (options.authorizationToken) { + request.headers.Authorization = options.authorizationToken; + } + const result = await requestHandler.handle(request); + return getCredentials(result.response); + }, options.maxRetries ?? 3, options.timeout ?? 1000); +}; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.js b/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.js new file mode 100644 index 00000000..36dd8a34 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.js @@ -0,0 +1,63 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { NodeHttpHandler } from "@smithy/node-http-handler"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import fs from "fs/promises"; +import { checkUrl } from "./checkUrl"; +import { createGetRequest, getCredentials } from "./requestHelpers"; +import { retryWrapper } from "./retry-wrapper"; +const AWS_CONTAINER_CREDENTIALS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +const DEFAULT_LINK_LOCAL_HOST = "http://169.254.170.2"; +const AWS_CONTAINER_CREDENTIALS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +const AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE = "AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE"; +const AWS_CONTAINER_AUTHORIZATION_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +export const fromHttp = (options = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-http - fromHttp"); + let host; + const relative = options.awsContainerCredentialsRelativeUri ?? process.env[AWS_CONTAINER_CREDENTIALS_RELATIVE_URI]; + const full = options.awsContainerCredentialsFullUri ?? process.env[AWS_CONTAINER_CREDENTIALS_FULL_URI]; + const token = options.awsContainerAuthorizationToken ?? process.env[AWS_CONTAINER_AUTHORIZATION_TOKEN]; + const tokenFile = options.awsContainerAuthorizationTokenFile ?? process.env[AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE]; + const warn = options.logger?.constructor?.name === "NoOpLogger" || !options.logger ? console.warn : options.logger.warn; + if (relative && full) { + warn("@aws-sdk/credential-provider-http: " + + "you have set both awsContainerCredentialsRelativeUri and awsContainerCredentialsFullUri."); + warn("awsContainerCredentialsFullUri will take precedence."); + } + if (token && tokenFile) { + warn("@aws-sdk/credential-provider-http: " + + "you have set both awsContainerAuthorizationToken and awsContainerAuthorizationTokenFile."); + warn("awsContainerAuthorizationToken will take precedence."); + } + if (full) { + host = full; + } + else if (relative) { + host = `${DEFAULT_LINK_LOCAL_HOST}${relative}`; + } + else { + throw new CredentialsProviderError(`No HTTP credential provider host provided. +Set AWS_CONTAINER_CREDENTIALS_FULL_URI or AWS_CONTAINER_CREDENTIALS_RELATIVE_URI.`, { logger: options.logger }); + } + const url = new URL(host); + checkUrl(url, options.logger); + const requestHandler = new NodeHttpHandler({ + requestTimeout: options.timeout ?? 1000, + connectionTimeout: options.timeout ?? 1000, + }); + return retryWrapper(async () => { + const request = createGetRequest(url); + if (token) { + request.headers.Authorization = token; + } + else if (tokenFile) { + request.headers.Authorization = (await fs.readFile(tokenFile)).toString(); + } + try { + const result = await requestHandler.handle(request); + return getCredentials(result.response).then((creds) => setCredentialFeature(creds, "CREDENTIALS_HTTP", "z")); + } + catch (e) { + throw new CredentialsProviderError(String(e), { logger: options.logger }); + } + }, options.maxRetries ?? 3, options.timeout ?? 1000); +}; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttpTypes.js b/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttpTypes.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttpTypes.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/requestHelpers.js b/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/requestHelpers.js new file mode 100644 index 00000000..9e271cea --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/requestHelpers.js @@ -0,0 +1,49 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { HttpRequest } from "@smithy/protocol-http"; +import { parseRfc3339DateTime } from "@smithy/smithy-client"; +import { sdkStreamMixin } from "@smithy/util-stream"; +export function createGetRequest(url) { + return new HttpRequest({ + protocol: url.protocol, + hostname: url.hostname, + port: Number(url.port), + path: url.pathname, + query: Array.from(url.searchParams.entries()).reduce((acc, [k, v]) => { + acc[k] = v; + return acc; + }, {}), + fragment: url.hash, + }); +} +export async function getCredentials(response, logger) { + const stream = sdkStreamMixin(response.body); + const str = await stream.transformToString(); + if (response.statusCode === 200) { + const parsed = JSON.parse(str); + if (typeof parsed.AccessKeyId !== "string" || + typeof parsed.SecretAccessKey !== "string" || + typeof parsed.Token !== "string" || + typeof parsed.Expiration !== "string") { + throw new CredentialsProviderError("HTTP credential provider response not of the required format, an object matching: " + + "{ AccessKeyId: string, SecretAccessKey: string, Token: string, Expiration: string(rfc3339) }", { logger }); + } + return { + accessKeyId: parsed.AccessKeyId, + secretAccessKey: parsed.SecretAccessKey, + sessionToken: parsed.Token, + expiration: parseRfc3339DateTime(parsed.Expiration), + }; + } + if (response.statusCode >= 400 && response.statusCode < 500) { + let parsedBody = {}; + try { + parsedBody = JSON.parse(str); + } + catch (e) { } + throw Object.assign(new CredentialsProviderError(`Server responded with status: ${response.statusCode}`, { logger }), { + Code: parsedBody.Code, + Message: parsedBody.Message, + }); + } + throw new CredentialsProviderError(`Server responded with status: ${response.statusCode}`, { logger }); +} diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/retry-wrapper.js b/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/retry-wrapper.js new file mode 100644 index 00000000..7006f3c0 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/retry-wrapper.js @@ -0,0 +1,13 @@ +export const retryWrapper = (toRetry, maxRetries, delayMs) => { + return async () => { + for (let i = 0; i < maxRetries; ++i) { + try { + return await toRetry(); + } + catch (e) { + await new Promise((resolve) => setTimeout(resolve, delayMs)); + } + } + return await toRetry(); + }; +}; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-es/index.browser.js b/node_modules/@aws-sdk/credential-provider-http/dist-es/index.browser.js new file mode 100644 index 00000000..98204c57 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-es/index.browser.js @@ -0,0 +1 @@ +export { fromHttp } from "./fromHttp/fromHttp.browser"; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-es/index.js b/node_modules/@aws-sdk/credential-provider-http/dist-es/index.js new file mode 100644 index 00000000..29113862 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-es/index.js @@ -0,0 +1 @@ +export { fromHttp } from "./fromHttp/fromHttp"; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/checkUrl.d.ts b/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/checkUrl.d.ts new file mode 100644 index 00000000..933b12c4 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/checkUrl.d.ts @@ -0,0 +1,9 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + * + * @param url - to be validated. + * @param logger - passed to CredentialsProviderError. + * @throws if not acceptable to this provider. + */ +export declare const checkUrl: (url: URL, logger?: Logger) => void; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.browser.d.ts b/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.browser.d.ts new file mode 100644 index 00000000..cb3a03b7 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.browser.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import type { FromHttpOptions } from "./fromHttpTypes"; +/** + * Creates a provider that gets credentials via HTTP request. + */ +export declare const fromHttp: (options?: FromHttpOptions) => AwsCredentialIdentityProvider; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.d.ts b/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.d.ts new file mode 100644 index 00000000..cb3a03b7 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import type { FromHttpOptions } from "./fromHttpTypes"; +/** + * Creates a provider that gets credentials via HTTP request. + */ +export declare const fromHttp: (options?: FromHttpOptions) => AwsCredentialIdentityProvider; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttpTypes.d.ts b/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttpTypes.d.ts new file mode 100644 index 00000000..b751dedf --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttpTypes.d.ts @@ -0,0 +1,69 @@ +import type { CredentialProviderOptions } from "@aws-sdk/types"; +/** + * @public + * + * Input for the fromHttp function in the HTTP Credentials Provider for Node.js. + */ +export interface FromHttpOptions extends CredentialProviderOptions { + /** + * If this value is provided, it will be used as-is. + * + * For browser environments, use instead {@link credentialsFullUri}. + */ + awsContainerCredentialsFullUri?: string; + /** + * If this value is provided instead of the full URI, it + * will be appended to the default link local host of 169.254.170.2. + * + * Not supported in browsers. + */ + awsContainerCredentialsRelativeUri?: string; + /** + * Will be read on each credentials request to + * add an Authorization request header value. + * + * Not supported in browsers. + */ + awsContainerAuthorizationTokenFile?: string; + /** + * An alternative to awsContainerAuthorizationTokenFile, + * this is the token value itself. + * + * For browser environments, use instead {@link authorizationToken}. + */ + awsContainerAuthorizationToken?: string; + /** + * BROWSER ONLY. + * + * In browsers, a relative URI is not allowed, and a full URI must be provided. + * HTTPS is required. + * + * This value is required for the browser environment. + */ + credentialsFullUri?: string; + /** + * BROWSER ONLY. + * + * Providing this value will set an "Authorization" request + * header value on the GET request. + */ + authorizationToken?: string; + /** + * Default is 3 retry attempts or 4 total attempts. + */ + maxRetries?: number; + /** + * Default is 1000ms. Time in milliseconds to spend waiting between retry attempts. + */ + timeout?: number; +} +/** + * @public + */ +export type HttpProviderCredentials = { + AccessKeyId: string; + SecretAccessKey: string; + Token: string; + AccountId?: string; + Expiration: string; +}; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/requestHelpers.d.ts b/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/requestHelpers.d.ts new file mode 100644 index 00000000..6d1c16e7 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/requestHelpers.d.ts @@ -0,0 +1,11 @@ +import { AwsCredentialIdentity } from "@aws-sdk/types"; +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpResponse, Logger } from "@smithy/types"; +/** + * @internal + */ +export declare function createGetRequest(url: URL): HttpRequest; +/** + * @internal + */ +export declare function getCredentials(response: HttpResponse, logger?: Logger): Promise; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/retry-wrapper.d.ts b/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/retry-wrapper.d.ts new file mode 100644 index 00000000..bf63addd --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/retry-wrapper.d.ts @@ -0,0 +1,10 @@ +/** + * @internal + */ +export interface RetryableProvider { + (): Promise; +} +/** + * @internal + */ +export declare const retryWrapper: (toRetry: RetryableProvider, maxRetries: number, delayMs: number) => RetryableProvider; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-types/index.browser.d.ts b/node_modules/@aws-sdk/credential-provider-http/dist-types/index.browser.d.ts new file mode 100644 index 00000000..2a9e4eca --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-types/index.browser.d.ts @@ -0,0 +1,2 @@ +export { fromHttp } from "./fromHttp/fromHttp.browser"; +export type { FromHttpOptions, HttpProviderCredentials } from "./fromHttp/fromHttpTypes"; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-types/index.d.ts b/node_modules/@aws-sdk/credential-provider-http/dist-types/index.d.ts new file mode 100644 index 00000000..b1e99857 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export { fromHttp } from "./fromHttp/fromHttp"; +export type { FromHttpOptions, HttpProviderCredentials } from "./fromHttp/fromHttpTypes"; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/checkUrl.d.ts b/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/checkUrl.d.ts new file mode 100644 index 00000000..9f518b0d --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/checkUrl.d.ts @@ -0,0 +1,2 @@ +import { Logger } from "@smithy/types"; +export declare const checkUrl: (url: URL, logger?: Logger) => void; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.browser.d.ts b/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.browser.d.ts new file mode 100644 index 00000000..00f1506f --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.browser.d.ts @@ -0,0 +1,5 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { FromHttpOptions } from "./fromHttpTypes"; +export declare const fromHttp: ( + options?: FromHttpOptions +) => AwsCredentialIdentityProvider; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.d.ts b/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.d.ts new file mode 100644 index 00000000..00f1506f --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.d.ts @@ -0,0 +1,5 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { FromHttpOptions } from "./fromHttpTypes"; +export declare const fromHttp: ( + options?: FromHttpOptions +) => AwsCredentialIdentityProvider; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttpTypes.d.ts b/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttpTypes.d.ts new file mode 100644 index 00000000..767b6b0b --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttpTypes.d.ts @@ -0,0 +1,18 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +export interface FromHttpOptions extends CredentialProviderOptions { + awsContainerCredentialsFullUri?: string; + awsContainerCredentialsRelativeUri?: string; + awsContainerAuthorizationTokenFile?: string; + awsContainerAuthorizationToken?: string; + credentialsFullUri?: string; + authorizationToken?: string; + maxRetries?: number; + timeout?: number; +} +export type HttpProviderCredentials = { + AccessKeyId: string; + SecretAccessKey: string; + Token: string; + AccountId?: string; + Expiration: string; +}; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/requestHelpers.d.ts b/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/requestHelpers.d.ts new file mode 100644 index 00000000..68a3285f --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/requestHelpers.d.ts @@ -0,0 +1,8 @@ +import { AwsCredentialIdentity } from "@aws-sdk/types"; +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpResponse, Logger } from "@smithy/types"; +export declare function createGetRequest(url: URL): HttpRequest; +export declare function getCredentials( + response: HttpResponse, + logger?: Logger +): Promise; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/retry-wrapper.d.ts b/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/retry-wrapper.d.ts new file mode 100644 index 00000000..f992038a --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/retry-wrapper.d.ts @@ -0,0 +1,8 @@ +export interface RetryableProvider { + (): Promise; +} +export declare const retryWrapper: ( + toRetry: RetryableProvider, + maxRetries: number, + delayMs: number +) => RetryableProvider; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.browser.d.ts b/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.browser.d.ts new file mode 100644 index 00000000..40696b90 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.browser.d.ts @@ -0,0 +1,5 @@ +export { fromHttp } from "./fromHttp/fromHttp.browser"; +export { + FromHttpOptions, + HttpProviderCredentials, +} from "./fromHttp/fromHttpTypes"; diff --git a/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..560256f7 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +export { fromHttp } from "./fromHttp/fromHttp"; +export { + FromHttpOptions, + HttpProviderCredentials, +} from "./fromHttp/fromHttpTypes"; diff --git a/node_modules/@aws-sdk/credential-provider-http/package.json b/node_modules/@aws-sdk/credential-provider-http/package.json new file mode 100644 index 00000000..2ad154b8 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-http/package.json @@ -0,0 +1,69 @@ +{ + "name": "@aws-sdk/credential-provider-http", + "version": "3.799.0", + "description": "AWS credential provider for containers and HTTP sources", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "browser": "./dist-es/index.browser.js", + "react-native": "./dist-es/index.browser.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-http", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-http", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-http" + } +} diff --git a/node_modules/@aws-sdk/credential-provider-ini/LICENSE b/node_modules/@aws-sdk/credential-provider-ini/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-sdk/credential-provider-ini/README.md b/node_modules/@aws-sdk/credential-provider-ini/README.md new file mode 100644 index 00000000..b4f3af1b --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-ini + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-ini/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-ini) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-ini.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-ini) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-cjs/index.js b/node_modules/@aws-sdk/credential-provider-ini/dist-cjs/index.js new file mode 100644 index 00000000..e9b60495 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-cjs/index.js @@ -0,0 +1,276 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + fromIni: () => fromIni +}); +module.exports = __toCommonJS(index_exports); + +// src/fromIni.ts + + +// src/resolveProfileData.ts + + +// src/resolveAssumeRoleCredentials.ts + + +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); + +// src/resolveCredentialSource.ts +var import_client = require("@aws-sdk/core/client"); +var import_property_provider = require("@smithy/property-provider"); +var resolveCredentialSource = /* @__PURE__ */ __name((credentialSource, profileName, logger) => { + const sourceProvidersMap = { + EcsContainer: /* @__PURE__ */ __name(async (options) => { + const { fromHttp } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-http"))); + const { fromContainerMetadata } = await Promise.resolve().then(() => __toESM(require("@smithy/credential-provider-imds"))); + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is EcsContainer"); + return async () => (0, import_property_provider.chain)(fromHttp(options ?? {}), fromContainerMetadata(options))().then(setNamedProvider); + }, "EcsContainer"), + Ec2InstanceMetadata: /* @__PURE__ */ __name(async (options) => { + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is Ec2InstanceMetadata"); + const { fromInstanceMetadata } = await Promise.resolve().then(() => __toESM(require("@smithy/credential-provider-imds"))); + return async () => fromInstanceMetadata(options)().then(setNamedProvider); + }, "Ec2InstanceMetadata"), + Environment: /* @__PURE__ */ __name(async (options) => { + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is Environment"); + const { fromEnv } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-env"))); + return async () => fromEnv(options)().then(setNamedProvider); + }, "Environment") + }; + if (credentialSource in sourceProvidersMap) { + return sourceProvidersMap[credentialSource]; + } else { + throw new import_property_provider.CredentialsProviderError( + `Unsupported credential source in profile ${profileName}. Got ${credentialSource}, expected EcsContainer or Ec2InstanceMetadata or Environment.`, + { logger } + ); + } +}, "resolveCredentialSource"); +var setNamedProvider = /* @__PURE__ */ __name((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_NAMED_PROVIDER", "p"), "setNamedProvider"); + +// src/resolveAssumeRoleCredentials.ts +var isAssumeRoleProfile = /* @__PURE__ */ __name((arg, { profile = "default", logger } = {}) => { + return Boolean(arg) && typeof arg === "object" && typeof arg.role_arn === "string" && ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1 && ["undefined", "string"].indexOf(typeof arg.external_id) > -1 && ["undefined", "string"].indexOf(typeof arg.mfa_serial) > -1 && (isAssumeRoleWithSourceProfile(arg, { profile, logger }) || isCredentialSourceProfile(arg, { profile, logger })); +}, "isAssumeRoleProfile"); +var isAssumeRoleWithSourceProfile = /* @__PURE__ */ __name((arg, { profile, logger }) => { + const withSourceProfile = typeof arg.source_profile === "string" && typeof arg.credential_source === "undefined"; + if (withSourceProfile) { + logger?.debug?.(` ${profile} isAssumeRoleWithSourceProfile source_profile=${arg.source_profile}`); + } + return withSourceProfile; +}, "isAssumeRoleWithSourceProfile"); +var isCredentialSourceProfile = /* @__PURE__ */ __name((arg, { profile, logger }) => { + const withProviderProfile = typeof arg.credential_source === "string" && typeof arg.source_profile === "undefined"; + if (withProviderProfile) { + logger?.debug?.(` ${profile} isCredentialSourceProfile credential_source=${arg.credential_source}`); + } + return withProviderProfile; +}, "isCredentialSourceProfile"); +var resolveAssumeRoleCredentials = /* @__PURE__ */ __name(async (profileName, profiles, options, visitedProfiles = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-ini - resolveAssumeRoleCredentials (STS)"); + const profileData = profiles[profileName]; + const { source_profile, region } = profileData; + if (!options.roleAssumer) { + const { getDefaultRoleAssumer } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/nested-clients/sts"))); + options.roleAssumer = getDefaultRoleAssumer( + { + ...options.clientConfig, + credentialProviderLogger: options.logger, + parentClientConfig: { + ...options?.parentClientConfig, + region: region ?? options?.parentClientConfig?.region + } + }, + options.clientPlugins + ); + } + if (source_profile && source_profile in visitedProfiles) { + throw new import_property_provider.CredentialsProviderError( + `Detected a cycle attempting to resolve credentials for profile ${(0, import_shared_ini_file_loader.getProfileName)(options)}. Profiles visited: ` + Object.keys(visitedProfiles).join(", "), + { logger: options.logger } + ); + } + options.logger?.debug( + `@aws-sdk/credential-provider-ini - finding credential resolver using ${source_profile ? `source_profile=[${source_profile}]` : `profile=[${profileName}]`}` + ); + const sourceCredsProvider = source_profile ? resolveProfileData( + source_profile, + profiles, + options, + { + ...visitedProfiles, + [source_profile]: true + }, + isCredentialSourceWithoutRoleArn(profiles[source_profile] ?? {}) + ) : (await resolveCredentialSource(profileData.credential_source, profileName, options.logger)(options))(); + if (isCredentialSourceWithoutRoleArn(profileData)) { + return sourceCredsProvider.then((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SOURCE_PROFILE", "o")); + } else { + const params = { + RoleArn: profileData.role_arn, + RoleSessionName: profileData.role_session_name || `aws-sdk-js-${Date.now()}`, + ExternalId: profileData.external_id, + DurationSeconds: parseInt(profileData.duration_seconds || "3600", 10) + }; + const { mfa_serial } = profileData; + if (mfa_serial) { + if (!options.mfaCodeProvider) { + throw new import_property_provider.CredentialsProviderError( + `Profile ${profileName} requires multi-factor authentication, but no MFA code callback was provided.`, + { logger: options.logger, tryNextLink: false } + ); + } + params.SerialNumber = mfa_serial; + params.TokenCode = await options.mfaCodeProvider(mfa_serial); + } + const sourceCreds = await sourceCredsProvider; + return options.roleAssumer(sourceCreds, params).then( + (creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SOURCE_PROFILE", "o") + ); + } +}, "resolveAssumeRoleCredentials"); +var isCredentialSourceWithoutRoleArn = /* @__PURE__ */ __name((section) => { + return !section.role_arn && !!section.credential_source; +}, "isCredentialSourceWithoutRoleArn"); + +// src/resolveProcessCredentials.ts + +var isProcessProfile = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.credential_process === "string", "isProcessProfile"); +var resolveProcessCredentials = /* @__PURE__ */ __name(async (options, profile) => Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-process"))).then( + ({ fromProcess }) => fromProcess({ + ...options, + profile + })().then((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_PROCESS", "v")) +), "resolveProcessCredentials"); + +// src/resolveSsoCredentials.ts + +var resolveSsoCredentials = /* @__PURE__ */ __name(async (profile, profileData, options = {}) => { + const { fromSSO } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-sso"))); + return fromSSO({ + profile, + logger: options.logger, + parentClientConfig: options.parentClientConfig, + clientConfig: options.clientConfig + })().then((creds) => { + if (profileData.sso_session) { + return (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SSO", "r"); + } else { + return (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SSO_LEGACY", "t"); + } + }); +}, "resolveSsoCredentials"); +var isSsoProfile = /* @__PURE__ */ __name((arg) => arg && (typeof arg.sso_start_url === "string" || typeof arg.sso_account_id === "string" || typeof arg.sso_session === "string" || typeof arg.sso_region === "string" || typeof arg.sso_role_name === "string"), "isSsoProfile"); + +// src/resolveStaticCredentials.ts + +var isStaticCredsProfile = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.aws_access_key_id === "string" && typeof arg.aws_secret_access_key === "string" && ["undefined", "string"].indexOf(typeof arg.aws_session_token) > -1 && ["undefined", "string"].indexOf(typeof arg.aws_account_id) > -1, "isStaticCredsProfile"); +var resolveStaticCredentials = /* @__PURE__ */ __name(async (profile, options) => { + options?.logger?.debug("@aws-sdk/credential-provider-ini - resolveStaticCredentials"); + const credentials = { + accessKeyId: profile.aws_access_key_id, + secretAccessKey: profile.aws_secret_access_key, + sessionToken: profile.aws_session_token, + ...profile.aws_credential_scope && { credentialScope: profile.aws_credential_scope }, + ...profile.aws_account_id && { accountId: profile.aws_account_id } + }; + return (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_PROFILE", "n"); +}, "resolveStaticCredentials"); + +// src/resolveWebIdentityCredentials.ts + +var isWebIdentityProfile = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.web_identity_token_file === "string" && typeof arg.role_arn === "string" && ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1, "isWebIdentityProfile"); +var resolveWebIdentityCredentials = /* @__PURE__ */ __name(async (profile, options) => Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-web-identity"))).then( + ({ fromTokenFile }) => fromTokenFile({ + webIdentityTokenFile: profile.web_identity_token_file, + roleArn: profile.role_arn, + roleSessionName: profile.role_session_name, + roleAssumerWithWebIdentity: options.roleAssumerWithWebIdentity, + logger: options.logger, + parentClientConfig: options.parentClientConfig + })().then((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN", "q")) +), "resolveWebIdentityCredentials"); + +// src/resolveProfileData.ts +var resolveProfileData = /* @__PURE__ */ __name(async (profileName, profiles, options, visitedProfiles = {}, isAssumeRoleRecursiveCall = false) => { + const data = profiles[profileName]; + if (Object.keys(visitedProfiles).length > 0 && isStaticCredsProfile(data)) { + return resolveStaticCredentials(data, options); + } + if (isAssumeRoleRecursiveCall || isAssumeRoleProfile(data, { profile: profileName, logger: options.logger })) { + return resolveAssumeRoleCredentials(profileName, profiles, options, visitedProfiles); + } + if (isStaticCredsProfile(data)) { + return resolveStaticCredentials(data, options); + } + if (isWebIdentityProfile(data)) { + return resolveWebIdentityCredentials(data, options); + } + if (isProcessProfile(data)) { + return resolveProcessCredentials(options, profileName); + } + if (isSsoProfile(data)) { + return await resolveSsoCredentials(profileName, data, options); + } + throw new import_property_provider.CredentialsProviderError( + `Could not resolve credentials using profile: [${profileName}] in configuration/credentials file(s).`, + { logger: options.logger } + ); +}, "resolveProfileData"); + +// src/fromIni.ts +var fromIni = /* @__PURE__ */ __name((_init = {}) => async ({ callerClientConfig } = {}) => { + const init = { + ..._init, + parentClientConfig: { + ...callerClientConfig, + ..._init.parentClientConfig + } + }; + init.logger?.debug("@aws-sdk/credential-provider-ini - fromIni"); + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + return resolveProfileData( + (0, import_shared_ini_file_loader.getProfileName)({ + profile: _init.profile ?? callerClientConfig?.profile + }), + profiles, + init + ); +}, "fromIni"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromIni +}); + diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-es/fromIni.js b/node_modules/@aws-sdk/credential-provider-ini/dist-es/fromIni.js new file mode 100644 index 00000000..ccf03972 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-es/fromIni.js @@ -0,0 +1,16 @@ +import { getProfileName, parseKnownFiles } from "@smithy/shared-ini-file-loader"; +import { resolveProfileData } from "./resolveProfileData"; +export const fromIni = (_init = {}) => async ({ callerClientConfig } = {}) => { + const init = { + ..._init, + parentClientConfig: { + ...callerClientConfig, + ..._init.parentClientConfig, + }, + }; + init.logger?.debug("@aws-sdk/credential-provider-ini - fromIni"); + const profiles = await parseKnownFiles(init); + return resolveProfileData(getProfileName({ + profile: _init.profile ?? callerClientConfig?.profile, + }), profiles, init); +}; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-es/index.js b/node_modules/@aws-sdk/credential-provider-ini/dist-es/index.js new file mode 100644 index 00000000..b0191315 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-es/index.js @@ -0,0 +1 @@ +export * from "./fromIni"; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveAssumeRoleCredentials.js b/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveAssumeRoleCredentials.js new file mode 100644 index 00000000..14113180 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveAssumeRoleCredentials.js @@ -0,0 +1,80 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getProfileName } from "@smithy/shared-ini-file-loader"; +import { resolveCredentialSource } from "./resolveCredentialSource"; +import { resolveProfileData } from "./resolveProfileData"; +export const isAssumeRoleProfile = (arg, { profile = "default", logger } = {}) => { + return (Boolean(arg) && + typeof arg === "object" && + typeof arg.role_arn === "string" && + ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1 && + ["undefined", "string"].indexOf(typeof arg.external_id) > -1 && + ["undefined", "string"].indexOf(typeof arg.mfa_serial) > -1 && + (isAssumeRoleWithSourceProfile(arg, { profile, logger }) || isCredentialSourceProfile(arg, { profile, logger }))); +}; +const isAssumeRoleWithSourceProfile = (arg, { profile, logger }) => { + const withSourceProfile = typeof arg.source_profile === "string" && typeof arg.credential_source === "undefined"; + if (withSourceProfile) { + logger?.debug?.(` ${profile} isAssumeRoleWithSourceProfile source_profile=${arg.source_profile}`); + } + return withSourceProfile; +}; +const isCredentialSourceProfile = (arg, { profile, logger }) => { + const withProviderProfile = typeof arg.credential_source === "string" && typeof arg.source_profile === "undefined"; + if (withProviderProfile) { + logger?.debug?.(` ${profile} isCredentialSourceProfile credential_source=${arg.credential_source}`); + } + return withProviderProfile; +}; +export const resolveAssumeRoleCredentials = async (profileName, profiles, options, visitedProfiles = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-ini - resolveAssumeRoleCredentials (STS)"); + const profileData = profiles[profileName]; + const { source_profile, region } = profileData; + if (!options.roleAssumer) { + const { getDefaultRoleAssumer } = await import("@aws-sdk/nested-clients/sts"); + options.roleAssumer = getDefaultRoleAssumer({ + ...options.clientConfig, + credentialProviderLogger: options.logger, + parentClientConfig: { + ...options?.parentClientConfig, + region: region ?? options?.parentClientConfig?.region, + }, + }, options.clientPlugins); + } + if (source_profile && source_profile in visitedProfiles) { + throw new CredentialsProviderError(`Detected a cycle attempting to resolve credentials for profile` + + ` ${getProfileName(options)}. Profiles visited: ` + + Object.keys(visitedProfiles).join(", "), { logger: options.logger }); + } + options.logger?.debug(`@aws-sdk/credential-provider-ini - finding credential resolver using ${source_profile ? `source_profile=[${source_profile}]` : `profile=[${profileName}]`}`); + const sourceCredsProvider = source_profile + ? resolveProfileData(source_profile, profiles, options, { + ...visitedProfiles, + [source_profile]: true, + }, isCredentialSourceWithoutRoleArn(profiles[source_profile] ?? {})) + : (await resolveCredentialSource(profileData.credential_source, profileName, options.logger)(options))(); + if (isCredentialSourceWithoutRoleArn(profileData)) { + return sourceCredsProvider.then((creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_SOURCE_PROFILE", "o")); + } + else { + const params = { + RoleArn: profileData.role_arn, + RoleSessionName: profileData.role_session_name || `aws-sdk-js-${Date.now()}`, + ExternalId: profileData.external_id, + DurationSeconds: parseInt(profileData.duration_seconds || "3600", 10), + }; + const { mfa_serial } = profileData; + if (mfa_serial) { + if (!options.mfaCodeProvider) { + throw new CredentialsProviderError(`Profile ${profileName} requires multi-factor authentication, but no MFA code callback was provided.`, { logger: options.logger, tryNextLink: false }); + } + params.SerialNumber = mfa_serial; + params.TokenCode = await options.mfaCodeProvider(mfa_serial); + } + const sourceCreds = await sourceCredsProvider; + return options.roleAssumer(sourceCreds, params).then((creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_SOURCE_PROFILE", "o")); + } +}; +const isCredentialSourceWithoutRoleArn = (section) => { + return !section.role_arn && !!section.credential_source; +}; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveCredentialSource.js b/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveCredentialSource.js new file mode 100644 index 00000000..b004933e --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveCredentialSource.js @@ -0,0 +1,30 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { chain, CredentialsProviderError } from "@smithy/property-provider"; +export const resolveCredentialSource = (credentialSource, profileName, logger) => { + const sourceProvidersMap = { + EcsContainer: async (options) => { + const { fromHttp } = await import("@aws-sdk/credential-provider-http"); + const { fromContainerMetadata } = await import("@smithy/credential-provider-imds"); + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is EcsContainer"); + return async () => chain(fromHttp(options ?? {}), fromContainerMetadata(options))().then(setNamedProvider); + }, + Ec2InstanceMetadata: async (options) => { + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is Ec2InstanceMetadata"); + const { fromInstanceMetadata } = await import("@smithy/credential-provider-imds"); + return async () => fromInstanceMetadata(options)().then(setNamedProvider); + }, + Environment: async (options) => { + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is Environment"); + const { fromEnv } = await import("@aws-sdk/credential-provider-env"); + return async () => fromEnv(options)().then(setNamedProvider); + }, + }; + if (credentialSource in sourceProvidersMap) { + return sourceProvidersMap[credentialSource]; + } + else { + throw new CredentialsProviderError(`Unsupported credential source in profile ${profileName}. Got ${credentialSource}, ` + + `expected EcsContainer or Ec2InstanceMetadata or Environment.`, { logger }); + } +}; +const setNamedProvider = (creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_NAMED_PROVIDER", "p"); diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProcessCredentials.js b/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProcessCredentials.js new file mode 100644 index 00000000..5a9f9753 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProcessCredentials.js @@ -0,0 +1,6 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const isProcessProfile = (arg) => Boolean(arg) && typeof arg === "object" && typeof arg.credential_process === "string"; +export const resolveProcessCredentials = async (options, profile) => import("@aws-sdk/credential-provider-process").then(({ fromProcess }) => fromProcess({ + ...options, + profile, +})().then((creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_PROCESS", "v"))); diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProfileData.js b/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProfileData.js new file mode 100644 index 00000000..3e64e9e1 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProfileData.js @@ -0,0 +1,28 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { isAssumeRoleProfile, resolveAssumeRoleCredentials } from "./resolveAssumeRoleCredentials"; +import { isProcessProfile, resolveProcessCredentials } from "./resolveProcessCredentials"; +import { isSsoProfile, resolveSsoCredentials } from "./resolveSsoCredentials"; +import { isStaticCredsProfile, resolveStaticCredentials } from "./resolveStaticCredentials"; +import { isWebIdentityProfile, resolveWebIdentityCredentials } from "./resolveWebIdentityCredentials"; +export const resolveProfileData = async (profileName, profiles, options, visitedProfiles = {}, isAssumeRoleRecursiveCall = false) => { + const data = profiles[profileName]; + if (Object.keys(visitedProfiles).length > 0 && isStaticCredsProfile(data)) { + return resolveStaticCredentials(data, options); + } + if (isAssumeRoleRecursiveCall || isAssumeRoleProfile(data, { profile: profileName, logger: options.logger })) { + return resolveAssumeRoleCredentials(profileName, profiles, options, visitedProfiles); + } + if (isStaticCredsProfile(data)) { + return resolveStaticCredentials(data, options); + } + if (isWebIdentityProfile(data)) { + return resolveWebIdentityCredentials(data, options); + } + if (isProcessProfile(data)) { + return resolveProcessCredentials(options, profileName); + } + if (isSsoProfile(data)) { + return await resolveSsoCredentials(profileName, data, options); + } + throw new CredentialsProviderError(`Could not resolve credentials using profile: [${profileName}] in configuration/credentials file(s).`, { logger: options.logger }); +}; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveSsoCredentials.js b/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveSsoCredentials.js new file mode 100644 index 00000000..5da74da1 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveSsoCredentials.js @@ -0,0 +1,23 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const resolveSsoCredentials = async (profile, profileData, options = {}) => { + const { fromSSO } = await import("@aws-sdk/credential-provider-sso"); + return fromSSO({ + profile, + logger: options.logger, + parentClientConfig: options.parentClientConfig, + clientConfig: options.clientConfig, + })().then((creds) => { + if (profileData.sso_session) { + return setCredentialFeature(creds, "CREDENTIALS_PROFILE_SSO", "r"); + } + else { + return setCredentialFeature(creds, "CREDENTIALS_PROFILE_SSO_LEGACY", "t"); + } + }); +}; +export const isSsoProfile = (arg) => arg && + (typeof arg.sso_start_url === "string" || + typeof arg.sso_account_id === "string" || + typeof arg.sso_session === "string" || + typeof arg.sso_region === "string" || + typeof arg.sso_role_name === "string"); diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveStaticCredentials.js b/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveStaticCredentials.js new file mode 100644 index 00000000..c04435fa --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveStaticCredentials.js @@ -0,0 +1,18 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const isStaticCredsProfile = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.aws_access_key_id === "string" && + typeof arg.aws_secret_access_key === "string" && + ["undefined", "string"].indexOf(typeof arg.aws_session_token) > -1 && + ["undefined", "string"].indexOf(typeof arg.aws_account_id) > -1; +export const resolveStaticCredentials = async (profile, options) => { + options?.logger?.debug("@aws-sdk/credential-provider-ini - resolveStaticCredentials"); + const credentials = { + accessKeyId: profile.aws_access_key_id, + secretAccessKey: profile.aws_secret_access_key, + sessionToken: profile.aws_session_token, + ...(profile.aws_credential_scope && { credentialScope: profile.aws_credential_scope }), + ...(profile.aws_account_id && { accountId: profile.aws_account_id }), + }; + return setCredentialFeature(credentials, "CREDENTIALS_PROFILE", "n"); +}; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveWebIdentityCredentials.js b/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveWebIdentityCredentials.js new file mode 100644 index 00000000..10adfe76 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveWebIdentityCredentials.js @@ -0,0 +1,14 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const isWebIdentityProfile = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.web_identity_token_file === "string" && + typeof arg.role_arn === "string" && + ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1; +export const resolveWebIdentityCredentials = async (profile, options) => import("@aws-sdk/credential-provider-web-identity").then(({ fromTokenFile }) => fromTokenFile({ + webIdentityTokenFile: profile.web_identity_token_file, + roleArn: profile.role_arn, + roleSessionName: profile.role_session_name, + roleAssumerWithWebIdentity: options.roleAssumerWithWebIdentity, + logger: options.logger, + parentClientConfig: options.parentClientConfig, +})().then((creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN", "q"))); diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-types/fromIni.d.ts b/node_modules/@aws-sdk/credential-provider-ini/dist-types/fromIni.d.ts new file mode 100644 index 00000000..5554125e --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-types/fromIni.d.ts @@ -0,0 +1,55 @@ +import type { AssumeRoleWithWebIdentityParams } from "@aws-sdk/credential-provider-web-identity"; +import type { CredentialProviderOptions } from "@aws-sdk/types"; +import type { RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import type { AwsCredentialIdentity, Pluggable } from "@smithy/types"; +import { AssumeRoleParams } from "./resolveAssumeRoleCredentials"; +/** + * @public + */ +export interface FromIniInit extends SourceProfileInit, CredentialProviderOptions { + /** + * A function that returns a promise fulfilled with an MFA token code for + * the provided MFA Serial code. If a profile requires an MFA code and + * `mfaCodeProvider` is not a valid function, the credential provider + * promise will be rejected. + * + * @param mfaSerial The serial code of the MFA device specified. + */ + mfaCodeProvider?: (mfaSerial: string) => Promise; + /** + * A function that assumes a role and returns a promise fulfilled with + * credentials for the assumed role. + * + * @param sourceCreds The credentials with which to assume a role. + * @param params + */ + roleAssumer?: (sourceCreds: AwsCredentialIdentity, params: AssumeRoleParams) => Promise; + /** + * A function that assumes a role with web identity and returns a promise fulfilled with + * credentials for the assumed role. + * + * @param sourceCreds The credentials with which to assume a role. + * @param params + */ + roleAssumerWithWebIdentity?: (params: AssumeRoleWithWebIdentityParams) => Promise; + /** + * STSClientConfig or SSOClientConfig to be used for creating inner client + * for auth operations. + * @internal + */ + clientConfig?: any; + clientPlugins?: Pluggable[]; + /** + * When true, always reload credentials from the file system instead of using cached values. + * This is useful when you need to detect changes to the credentials file. + */ + ignoreCache?: boolean; +} +/** + * @internal + * + * Creates a credential provider that will read from ini files and supports + * role assumption and multi-factor authentication. + */ +export declare const fromIni: (_init?: FromIniInit) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-types/index.d.ts b/node_modules/@aws-sdk/credential-provider-ini/dist-types/index.d.ts new file mode 100644 index 00000000..75680c07 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./fromIni"; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveAssumeRoleCredentials.d.ts b/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveAssumeRoleCredentials.d.ts new file mode 100644 index 00000000..dd9a8969 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveAssumeRoleCredentials.d.ts @@ -0,0 +1,47 @@ +import { Logger, ParsedIniData } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + * + * @see http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/STS.html#assumeRole-property + * TODO update the above to link to V3 docs + */ +export interface AssumeRoleParams { + /** + * The identifier of the role to be assumed. + */ + RoleArn: string; + /** + * A name for the assumed role session. + */ + RoleSessionName: string; + /** + * A unique identifier that is used by third parties when assuming roles in + * their customers' accounts. + */ + ExternalId?: string; + /** + * The identification number of the MFA device that is associated with the + * user who is making the `AssumeRole` call. + */ + SerialNumber?: string; + /** + * The value provided by the MFA device. + */ + TokenCode?: string; + /** + * The duration, in seconds, of the role session. + */ + DurationSeconds?: number; +} +/** + * @internal + */ +export declare const isAssumeRoleProfile: (arg: any, { profile, logger }?: { + profile?: string | undefined; + logger?: Logger | undefined; +}) => boolean; +/** + * @internal + */ +export declare const resolveAssumeRoleCredentials: (profileName: string, profiles: ParsedIniData, options: FromIniInit, visitedProfiles?: Record) => Promise; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveCredentialSource.d.ts b/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveCredentialSource.d.ts new file mode 100644 index 00000000..6f1c9b70 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveCredentialSource.d.ts @@ -0,0 +1,12 @@ +import type { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider, Logger } from "@smithy/types"; +/** + * @internal + * + * Resolve the `credential_source` entry from the profile, and return the + * credential providers respectively. No memoization is needed for the + * credential source providers because memoization should be added outside the + * fromIni() provider. The source credential needs to be refreshed every time + * fromIni() is called. + */ +export declare const resolveCredentialSource: (credentialSource: string, profileName: string, logger?: Logger) => (options?: CredentialProviderOptions) => Promise; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProcessCredentials.d.ts b/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProcessCredentials.d.ts new file mode 100644 index 00000000..71945187 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProcessCredentials.d.ts @@ -0,0 +1,16 @@ +import { Credentials, Profile } from "@aws-sdk/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export interface ProcessProfile extends Profile { + credential_process: string; +} +/** + * @internal + */ +export declare const isProcessProfile: (arg: any) => arg is ProcessProfile; +/** + * @internal + */ +export declare const resolveProcessCredentials: (options: FromIniInit, profile: string) => Promise; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProfileData.d.ts b/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProfileData.d.ts new file mode 100644 index 00000000..e59ca936 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProfileData.d.ts @@ -0,0 +1,6 @@ +import type { AwsCredentialIdentity, ParsedIniData } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export declare const resolveProfileData: (profileName: string, profiles: ParsedIniData, options: FromIniInit, visitedProfiles?: Record, isAssumeRoleRecursiveCall?: boolean) => Promise; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveSsoCredentials.d.ts b/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveSsoCredentials.d.ts new file mode 100644 index 00000000..1909a510 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveSsoCredentials.d.ts @@ -0,0 +1,12 @@ +import type { SsoProfile } from "@aws-sdk/credential-provider-sso"; +import type { IniSection, Profile } from "@smithy/types"; +import type { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export declare const resolveSsoCredentials: (profile: string, profileData: IniSection, options?: FromIniInit) => Promise; +/** + * @internal + * duplicated from \@aws-sdk/credential-provider-sso to defer import. + */ +export declare const isSsoProfile: (arg: Profile) => arg is Partial; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveStaticCredentials.d.ts b/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveStaticCredentials.d.ts new file mode 100644 index 00000000..e04cf26d --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveStaticCredentials.d.ts @@ -0,0 +1,20 @@ +import { AwsCredentialIdentity, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export interface StaticCredsProfile extends Profile { + aws_access_key_id: string; + aws_secret_access_key: string; + aws_session_token?: string; + aws_credential_scope?: string; + aws_account_id?: string; +} +/** + * @internal + */ +export declare const isStaticCredsProfile: (arg: any) => arg is StaticCredsProfile; +/** + * @internal + */ +export declare const resolveStaticCredentials: (profile: StaticCredsProfile, options?: FromIniInit) => Promise; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveWebIdentityCredentials.d.ts b/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveWebIdentityCredentials.d.ts new file mode 100644 index 00000000..acb1d454 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveWebIdentityCredentials.d.ts @@ -0,0 +1,18 @@ +import { AwsCredentialIdentity, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export interface WebIdentityProfile extends Profile { + web_identity_token_file: string; + role_arn: string; + role_session_name?: string; +} +/** + * @internal + */ +export declare const isWebIdentityProfile: (arg: any) => arg is WebIdentityProfile; +/** + * @internal + */ +export declare const resolveWebIdentityCredentials: (profile: WebIdentityProfile, options: FromIniInit) => Promise; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/fromIni.d.ts b/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/fromIni.d.ts new file mode 100644 index 00000000..9d640a05 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/fromIni.d.ts @@ -0,0 +1,24 @@ +import { AssumeRoleWithWebIdentityParams } from "@aws-sdk/credential-provider-web-identity"; +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import { AwsCredentialIdentity, Pluggable } from "@smithy/types"; +import { AssumeRoleParams } from "./resolveAssumeRoleCredentials"; +export interface FromIniInit + extends SourceProfileInit, + CredentialProviderOptions { + mfaCodeProvider?: (mfaSerial: string) => Promise; + roleAssumer?: ( + sourceCreds: AwsCredentialIdentity, + params: AssumeRoleParams + ) => Promise; + roleAssumerWithWebIdentity?: ( + params: AssumeRoleWithWebIdentityParams + ) => Promise; + clientConfig?: any; + clientPlugins?: Pluggable[]; + ignoreCache?: boolean; +} +export declare const fromIni: ( + _init?: FromIniInit +) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..b0191315 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./fromIni"; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveAssumeRoleCredentials.d.ts b/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveAssumeRoleCredentials.d.ts new file mode 100644 index 00000000..eb782f39 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveAssumeRoleCredentials.d.ts @@ -0,0 +1,26 @@ +import { Logger, ParsedIniData } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export interface AssumeRoleParams { + RoleArn: string; + RoleSessionName: string; + ExternalId?: string; + SerialNumber?: string; + TokenCode?: string; + DurationSeconds?: number; +} +export declare const isAssumeRoleProfile: ( + arg: any, + { + profile, + logger, + }?: { + profile?: string | undefined; + logger?: Logger | undefined; + } +) => boolean; +export declare const resolveAssumeRoleCredentials: ( + profileName: string, + profiles: ParsedIniData, + options: FromIniInit, + visitedProfiles?: Record +) => Promise; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveCredentialSource.d.ts b/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveCredentialSource.d.ts new file mode 100644 index 00000000..21a7f9fe --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveCredentialSource.d.ts @@ -0,0 +1,9 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider, Logger } from "@smithy/types"; +export declare const resolveCredentialSource: ( + credentialSource: string, + profileName: string, + logger?: Logger +) => ( + options?: CredentialProviderOptions +) => Promise; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProcessCredentials.d.ts b/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProcessCredentials.d.ts new file mode 100644 index 00000000..dbd55835 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProcessCredentials.d.ts @@ -0,0 +1,10 @@ +import { Credentials, Profile } from "@aws-sdk/types"; +import { FromIniInit } from "./fromIni"; +export interface ProcessProfile extends Profile { + credential_process: string; +} +export declare const isProcessProfile: (arg: any) => arg is ProcessProfile; +export declare const resolveProcessCredentials: ( + options: FromIniInit, + profile: string +) => Promise; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProfileData.d.ts b/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProfileData.d.ts new file mode 100644 index 00000000..d821bb40 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProfileData.d.ts @@ -0,0 +1,9 @@ +import { AwsCredentialIdentity, ParsedIniData } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export declare const resolveProfileData: ( + profileName: string, + profiles: ParsedIniData, + options: FromIniInit, + visitedProfiles?: Record, + isAssumeRoleRecursiveCall?: boolean +) => Promise; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveSsoCredentials.d.ts b/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveSsoCredentials.d.ts new file mode 100644 index 00000000..88bec34b --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveSsoCredentials.d.ts @@ -0,0 +1,9 @@ +import { SsoProfile } from "@aws-sdk/credential-provider-sso"; +import { IniSection, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export declare const resolveSsoCredentials: ( + profile: string, + profileData: IniSection, + options?: FromIniInit +) => Promise; +export declare const isSsoProfile: (arg: Profile) => arg is Partial; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveStaticCredentials.d.ts b/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveStaticCredentials.d.ts new file mode 100644 index 00000000..5f5daa92 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveStaticCredentials.d.ts @@ -0,0 +1,16 @@ +import { AwsCredentialIdentity, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export interface StaticCredsProfile extends Profile { + aws_access_key_id: string; + aws_secret_access_key: string; + aws_session_token?: string; + aws_credential_scope?: string; + aws_account_id?: string; +} +export declare const isStaticCredsProfile: ( + arg: any +) => arg is StaticCredsProfile; +export declare const resolveStaticCredentials: ( + profile: StaticCredsProfile, + options?: FromIniInit +) => Promise; diff --git a/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveWebIdentityCredentials.d.ts b/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveWebIdentityCredentials.d.ts new file mode 100644 index 00000000..4179f942 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveWebIdentityCredentials.d.ts @@ -0,0 +1,14 @@ +import { AwsCredentialIdentity, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export interface WebIdentityProfile extends Profile { + web_identity_token_file: string; + role_arn: string; + role_session_name?: string; +} +export declare const isWebIdentityProfile: ( + arg: any +) => arg is WebIdentityProfile; +export declare const resolveWebIdentityCredentials: ( + profile: WebIdentityProfile, + options: FromIniInit +) => Promise; diff --git a/node_modules/@aws-sdk/credential-provider-ini/package.json b/node_modules/@aws-sdk/credential-provider-ini/package.json new file mode 100644 index 00000000..713141c6 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-ini/package.json @@ -0,0 +1,72 @@ +{ + "name": "@aws-sdk/credential-provider-ini", + "version": "3.799.0", + "description": "AWS credential provider that sources credentials from ~/.aws/credentials and ~/.aws/config", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-ini", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.799.0", + "@aws-sdk/credential-provider-web-identity": "3.799.0", + "@aws-sdk/nested-clients": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-ini", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-ini" + } +} diff --git a/node_modules/@aws-sdk/credential-provider-node/LICENSE b/node_modules/@aws-sdk/credential-provider-node/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-node/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-sdk/credential-provider-node/README.md b/node_modules/@aws-sdk/credential-provider-node/README.md new file mode 100644 index 00000000..7957cc0b --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-node/README.md @@ -0,0 +1,104 @@ +# @aws-sdk/credential-provider-node + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-node/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-node) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-node.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-node) + +## AWS Credential Provider for Node.JS + +This module provides a factory function, `defaultProvider`, that will attempt to +source AWS credentials from a Node.JS environment. It will attempt to find +credentials from the following sources (listed in order of precedence): + +- Environment variables exposed via `process.env` +- SSO credentials from token cache +- Web identity token credentials +- Shared credentials and config ini files +- The EC2/ECS Instance Metadata Service + +The default credential provider will invoke one provider at a time and only +continue to the next if no credentials have been located. For example, if the +process finds values defined via the `AWS_ACCESS_KEY_ID` and +`AWS_SECRET_ACCESS_KEY` environment variables, the files at `~/.aws/credentials` +and `~/.aws/config` will not be read, nor will any messages be sent to the +Instance Metadata Service. + +If invalid configuration is encountered (such as a profile in +`~/.aws/credentials` specifying as its `source_profile` the name of a profile +that does not exist), then the chained provider will be rejected with an error +and will not invoke the next provider in the list. + +_IMPORTANT_: if you intend to acquire credentials using EKS +[IAM Roles for Service Accounts](https://docs.aws.amazon.com/eks/latest/userguide/iam-roles-for-service-accounts.html), +then you must explicitly specify a value for `roleAssumerWithWebIdentity`. There is a +default function available in `@aws-sdk/client-sts` package. An example of using +this: + +```js +const { getDefaultRoleAssumerWithWebIdentity } = require("@aws-sdk/client-sts"); +const { defaultProvider } = require("@aws-sdk/credential-provider-node"); +const { S3Client, GetObjectCommand } = require("@aws-sdk/client-s3"); + +const provider = defaultProvider({ + roleAssumerWithWebIdentity: getDefaultRoleAssumerWithWebIdentity({ + // You must explicitly pass a region if you are not using us-east-1 + region: "eu-west-1" + }), +}); + +const client = new S3Client({ credentialDefaultProvider: provider }); +``` + +_IMPORTANT_: We provide a wrapper of this provider in `@aws-sdk/credential-providers` +package to save you from importing `getDefaultRoleAssumerWithWebIdentity()` or +`getDefaultRoleAssume()` from STS package. Similarly, you can do: + +```js +const { fromNodeProviderChain } = require("@aws-sdk/credential-providers"); + +const credentials = fromNodeProviderChain(); + +const client = new S3Client({ credentials }); +``` + +## Supported configuration + +You may customize how credentials are resolved by providing an options hash to +the `defaultProvider` factory function. The following options are +supported: + +- `profile` - The configuration profile to use. If not specified, the provider + will use the value in the `AWS_PROFILE` environment variable or a default of + `default`. +- `filepath` - The path to the shared credentials file. If not specified, the + provider will use the value in the `AWS_SHARED_CREDENTIALS_FILE` environment + variable or a default of `~/.aws/credentials`. +- `configFilepath` - The path to the shared config file. If not specified, the + provider will use the value in the `AWS_CONFIG_FILE` environment variable or a + default of `~/.aws/config`. +- `mfaCodeProvider` - A function that returns a a promise fulfilled with an + MFA token code for the provided MFA Serial code. If a profile requires an MFA + code and `mfaCodeProvider` is not a valid function, the credential provider + promise will be rejected. +- `roleAssumer` - A function that assumes a role and returns a promise + fulfilled with credentials for the assumed role. If not specified, no role + will be assumed, and an error will be thrown. +- `roleArn` - ARN to assume. If not specified, the provider will use the value + in the `AWS_ROLE_ARN` environment variable. +- `webIdentityTokenFile` - File location of where the `OIDC` token is stored. + If not specified, the provider will use the value in the `AWS_WEB_IDENTITY_TOKEN_FILE` + environment variable. +- `roleAssumerWithWebIdentity` - A function that assumes a role with web identity and + returns a promise fulfilled with credentials for the assumed role. +- `timeout` - The connection timeout (in milliseconds) to apply to any remote + requests. If not specified, a default value of `1000` (one second) is used. +- `maxRetries` - The maximum number of times any HTTP connections should be + retried. If not specified, a default value of `0` will be used. + +## Related packages: + +- [AWS Credential Provider for Node.JS - Environment Variables](../credential-provider-env) +- [AWS Credential Provider for Node.JS - SSO](../credential-provider-sso) +- [AWS Credential Provider for Node.JS - Web Identity](../credential-provider-web-identity) +- [AWS Credential Provider for Node.JS - Shared Configuration Files](../credential-provider-ini) +- [AWS Credential Provider for Node.JS - Instance and Container Metadata](../credential-provider-imds) +- [AWS Shared Configuration File Loader](../shared-ini-file-loader) diff --git a/node_modules/@aws-sdk/credential-provider-node/dist-cjs/index.js b/node_modules/@aws-sdk/credential-provider-node/dist-cjs/index.js new file mode 100644 index 00000000..be4d2b68 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-node/dist-cjs/index.js @@ -0,0 +1,147 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + credentialsTreatedAsExpired: () => credentialsTreatedAsExpired, + credentialsWillNeedRefresh: () => credentialsWillNeedRefresh, + defaultProvider: () => defaultProvider +}); +module.exports = __toCommonJS(index_exports); + +// src/defaultProvider.ts +var import_credential_provider_env = require("@aws-sdk/credential-provider-env"); + +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); + +// src/remoteProvider.ts +var import_property_provider = require("@smithy/property-provider"); +var ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +var remoteProvider = /* @__PURE__ */ __name(async (init) => { + const { ENV_CMDS_FULL_URI, ENV_CMDS_RELATIVE_URI, fromContainerMetadata, fromInstanceMetadata } = await Promise.resolve().then(() => __toESM(require("@smithy/credential-provider-imds"))); + if (process.env[ENV_CMDS_RELATIVE_URI] || process.env[ENV_CMDS_FULL_URI]) { + init.logger?.debug("@aws-sdk/credential-provider-node - remoteProvider::fromHttp/fromContainerMetadata"); + const { fromHttp } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-http"))); + return (0, import_property_provider.chain)(fromHttp(init), fromContainerMetadata(init)); + } + if (process.env[ENV_IMDS_DISABLED] && process.env[ENV_IMDS_DISABLED] !== "false") { + return async () => { + throw new import_property_provider.CredentialsProviderError("EC2 Instance Metadata Service access disabled", { logger: init.logger }); + }; + } + init.logger?.debug("@aws-sdk/credential-provider-node - remoteProvider::fromInstanceMetadata"); + return fromInstanceMetadata(init); +}, "remoteProvider"); + +// src/defaultProvider.ts +var multipleCredentialSourceWarningEmitted = false; +var defaultProvider = /* @__PURE__ */ __name((init = {}) => (0, import_property_provider.memoize)( + (0, import_property_provider.chain)( + async () => { + const profile = init.profile ?? process.env[import_shared_ini_file_loader.ENV_PROFILE]; + if (profile) { + const envStaticCredentialsAreSet = process.env[import_credential_provider_env.ENV_KEY] && process.env[import_credential_provider_env.ENV_SECRET]; + if (envStaticCredentialsAreSet) { + if (!multipleCredentialSourceWarningEmitted) { + const warnFn = init.logger?.warn && init.logger?.constructor?.name !== "NoOpLogger" ? init.logger.warn : console.warn; + warnFn( + `@aws-sdk/credential-provider-node - defaultProvider::fromEnv WARNING: + Multiple credential sources detected: + Both AWS_PROFILE and the pair AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY static credentials are set. + This SDK will proceed with the AWS_PROFILE value. + + However, a future version may change this behavior to prefer the ENV static credentials. + Please ensure that your environment only sets either the AWS_PROFILE or the + AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY pair. +` + ); + multipleCredentialSourceWarningEmitted = true; + } + } + throw new import_property_provider.CredentialsProviderError("AWS_PROFILE is set, skipping fromEnv provider.", { + logger: init.logger, + tryNextLink: true + }); + } + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromEnv"); + return (0, import_credential_provider_env.fromEnv)(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromSSO"); + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoSession } = init; + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + throw new import_property_provider.CredentialsProviderError( + "Skipping SSO provider in default chain (inputs do not include SSO fields).", + { logger: init.logger } + ); + } + const { fromSSO } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-sso"))); + return fromSSO(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromIni"); + const { fromIni } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-ini"))); + return fromIni(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromProcess"); + const { fromProcess } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-process"))); + return fromProcess(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromTokenFile"); + const { fromTokenFile } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-web-identity"))); + return fromTokenFile(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::remoteProvider"); + return (await remoteProvider(init))(); + }, + async () => { + throw new import_property_provider.CredentialsProviderError("Could not load credentials from any providers", { + tryNextLink: false, + logger: init.logger + }); + } + ), + credentialsTreatedAsExpired, + credentialsWillNeedRefresh +), "defaultProvider"); +var credentialsWillNeedRefresh = /* @__PURE__ */ __name((credentials) => credentials?.expiration !== void 0, "credentialsWillNeedRefresh"); +var credentialsTreatedAsExpired = /* @__PURE__ */ __name((credentials) => credentials?.expiration !== void 0 && credentials.expiration.getTime() - Date.now() < 3e5, "credentialsTreatedAsExpired"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + defaultProvider, + credentialsWillNeedRefresh, + credentialsTreatedAsExpired +}); + diff --git a/node_modules/@aws-sdk/credential-provider-node/dist-es/defaultProvider.js b/node_modules/@aws-sdk/credential-provider-node/dist-es/defaultProvider.js new file mode 100644 index 00000000..d582cf8a --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-node/dist-es/defaultProvider.js @@ -0,0 +1,62 @@ +import { ENV_KEY, ENV_SECRET, fromEnv } from "@aws-sdk/credential-provider-env"; +import { chain, CredentialsProviderError, memoize } from "@smithy/property-provider"; +import { ENV_PROFILE } from "@smithy/shared-ini-file-loader"; +import { remoteProvider } from "./remoteProvider"; +let multipleCredentialSourceWarningEmitted = false; +export const defaultProvider = (init = {}) => memoize(chain(async () => { + const profile = init.profile ?? process.env[ENV_PROFILE]; + if (profile) { + const envStaticCredentialsAreSet = process.env[ENV_KEY] && process.env[ENV_SECRET]; + if (envStaticCredentialsAreSet) { + if (!multipleCredentialSourceWarningEmitted) { + const warnFn = init.logger?.warn && init.logger?.constructor?.name !== "NoOpLogger" ? init.logger.warn : console.warn; + warnFn(`@aws-sdk/credential-provider-node - defaultProvider::fromEnv WARNING: + Multiple credential sources detected: + Both AWS_PROFILE and the pair AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY static credentials are set. + This SDK will proceed with the AWS_PROFILE value. + + However, a future version may change this behavior to prefer the ENV static credentials. + Please ensure that your environment only sets either the AWS_PROFILE or the + AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY pair. +`); + multipleCredentialSourceWarningEmitted = true; + } + } + throw new CredentialsProviderError("AWS_PROFILE is set, skipping fromEnv provider.", { + logger: init.logger, + tryNextLink: true, + }); + } + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromEnv"); + return fromEnv(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromSSO"); + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoSession } = init; + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + throw new CredentialsProviderError("Skipping SSO provider in default chain (inputs do not include SSO fields).", { logger: init.logger }); + } + const { fromSSO } = await import("@aws-sdk/credential-provider-sso"); + return fromSSO(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromIni"); + const { fromIni } = await import("@aws-sdk/credential-provider-ini"); + return fromIni(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromProcess"); + const { fromProcess } = await import("@aws-sdk/credential-provider-process"); + return fromProcess(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromTokenFile"); + const { fromTokenFile } = await import("@aws-sdk/credential-provider-web-identity"); + return fromTokenFile(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::remoteProvider"); + return (await remoteProvider(init))(); +}, async () => { + throw new CredentialsProviderError("Could not load credentials from any providers", { + tryNextLink: false, + logger: init.logger, + }); +}), credentialsTreatedAsExpired, credentialsWillNeedRefresh); +export const credentialsWillNeedRefresh = (credentials) => credentials?.expiration !== undefined; +export const credentialsTreatedAsExpired = (credentials) => credentials?.expiration !== undefined && credentials.expiration.getTime() - Date.now() < 300000; diff --git a/node_modules/@aws-sdk/credential-provider-node/dist-es/index.js b/node_modules/@aws-sdk/credential-provider-node/dist-es/index.js new file mode 100644 index 00000000..c82818e5 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-node/dist-es/index.js @@ -0,0 +1 @@ +export * from "./defaultProvider"; diff --git a/node_modules/@aws-sdk/credential-provider-node/dist-es/remoteProvider.js b/node_modules/@aws-sdk/credential-provider-node/dist-es/remoteProvider.js new file mode 100644 index 00000000..c455bc14 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-node/dist-es/remoteProvider.js @@ -0,0 +1,17 @@ +import { chain, CredentialsProviderError } from "@smithy/property-provider"; +export const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +export const remoteProvider = async (init) => { + const { ENV_CMDS_FULL_URI, ENV_CMDS_RELATIVE_URI, fromContainerMetadata, fromInstanceMetadata } = await import("@smithy/credential-provider-imds"); + if (process.env[ENV_CMDS_RELATIVE_URI] || process.env[ENV_CMDS_FULL_URI]) { + init.logger?.debug("@aws-sdk/credential-provider-node - remoteProvider::fromHttp/fromContainerMetadata"); + const { fromHttp } = await import("@aws-sdk/credential-provider-http"); + return chain(fromHttp(init), fromContainerMetadata(init)); + } + if (process.env[ENV_IMDS_DISABLED] && process.env[ENV_IMDS_DISABLED] !== "false") { + return async () => { + throw new CredentialsProviderError("EC2 Instance Metadata Service access disabled", { logger: init.logger }); + }; + } + init.logger?.debug("@aws-sdk/credential-provider-node - remoteProvider::fromInstanceMetadata"); + return fromInstanceMetadata(init); +}; diff --git a/node_modules/@aws-sdk/credential-provider-node/dist-types/defaultProvider.d.ts b/node_modules/@aws-sdk/credential-provider-node/dist-types/defaultProvider.d.ts new file mode 100644 index 00000000..fd401502 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-node/dist-types/defaultProvider.d.ts @@ -0,0 +1,58 @@ +import type { FromHttpOptions } from "@aws-sdk/credential-provider-http"; +import type { FromIniInit } from "@aws-sdk/credential-provider-ini"; +import type { FromProcessInit } from "@aws-sdk/credential-provider-process"; +import type { FromSSOInit, SsoCredentialsParameters } from "@aws-sdk/credential-provider-sso"; +import type { FromTokenFileInit } from "@aws-sdk/credential-provider-web-identity"; +import type { RemoteProviderInit } from "@smithy/credential-provider-imds"; +import { AwsCredentialIdentity, MemoizedProvider } from "@smithy/types"; +/** + * @public + */ +export type DefaultProviderInit = FromIniInit & FromHttpOptions & RemoteProviderInit & FromProcessInit & (FromSSOInit & Partial) & FromTokenFileInit; +/** + * Creates a credential provider that will attempt to find credentials from the + * following sources (listed in order of precedence): + * * Environment variables exposed via `process.env` + * * SSO credentials from token cache + * * Web identity token credentials + * * Shared credentials and config ini files + * * The EC2/ECS Instance Metadata Service + * + * The default credential provider will invoke one provider at a time and only + * continue to the next if no credentials have been located. For example, if + * the process finds values defined via the `AWS_ACCESS_KEY_ID` and + * `AWS_SECRET_ACCESS_KEY` environment variables, the files at + * `~/.aws/credentials` and `~/.aws/config` will not be read, nor will any + * messages be sent to the Instance Metadata Service. + * + * @param init Configuration that is passed to each individual + * provider + * + * @see {@link fromEnv} The function used to source credentials from + * environment variables. + * @see {@link fromSSO} The function used to source credentials from + * resolved SSO token cache. + * @see {@link fromTokenFile} The function used to source credentials from + * token file. + * @see {@link fromIni} The function used to source credentials from INI + * files. + * @see {@link fromProcess} The function used to sources credentials from + * credential_process in INI files. + * @see {@link fromInstanceMetadata} The function used to source credentials from the + * EC2 Instance Metadata Service. + * @see {@link fromContainerMetadata} The function used to source credentials from the + * ECS Container Metadata Service. + */ +export declare const defaultProvider: (init?: DefaultProviderInit) => MemoizedProvider; +/** + * @internal + * + * @returns credentials have expiration. + */ +export declare const credentialsWillNeedRefresh: (credentials: AwsCredentialIdentity) => boolean; +/** + * @internal + * + * @returns credentials with less than 5 minutes left. + */ +export declare const credentialsTreatedAsExpired: (credentials: AwsCredentialIdentity) => boolean; diff --git a/node_modules/@aws-sdk/credential-provider-node/dist-types/index.d.ts b/node_modules/@aws-sdk/credential-provider-node/dist-types/index.d.ts new file mode 100644 index 00000000..c82818e5 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-node/dist-types/index.d.ts @@ -0,0 +1 @@ +export * from "./defaultProvider"; diff --git a/node_modules/@aws-sdk/credential-provider-node/dist-types/remoteProvider.d.ts b/node_modules/@aws-sdk/credential-provider-node/dist-types/remoteProvider.d.ts new file mode 100644 index 00000000..4022a4e7 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-node/dist-types/remoteProvider.d.ts @@ -0,0 +1,11 @@ +import type { FromHttpOptions } from "@aws-sdk/credential-provider-http"; +import type { RemoteProviderInit } from "@smithy/credential-provider-imds"; +import type { AwsCredentialIdentityProvider } from "@smithy/types"; +/** + * @internal + */ +export declare const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +/** + * @internal + */ +export declare const remoteProvider: (init: RemoteProviderInit | FromHttpOptions) => Promise; diff --git a/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/defaultProvider.d.ts b/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/defaultProvider.d.ts new file mode 100644 index 00000000..e1f1a8df --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/defaultProvider.d.ts @@ -0,0 +1,25 @@ +import { FromHttpOptions } from "@aws-sdk/credential-provider-http"; +import { FromIniInit } from "@aws-sdk/credential-provider-ini"; +import { FromProcessInit } from "@aws-sdk/credential-provider-process"; +import { + FromSSOInit, + SsoCredentialsParameters, +} from "@aws-sdk/credential-provider-sso"; +import { FromTokenFileInit } from "@aws-sdk/credential-provider-web-identity"; +import { RemoteProviderInit } from "@smithy/credential-provider-imds"; +import { AwsCredentialIdentity, MemoizedProvider } from "@smithy/types"; +export type DefaultProviderInit = FromIniInit & + FromHttpOptions & + RemoteProviderInit & + FromProcessInit & + (FromSSOInit & Partial) & + FromTokenFileInit; +export declare const defaultProvider: ( + init?: DefaultProviderInit +) => MemoizedProvider; +export declare const credentialsWillNeedRefresh: ( + credentials: AwsCredentialIdentity +) => boolean; +export declare const credentialsTreatedAsExpired: ( + credentials: AwsCredentialIdentity +) => boolean; diff --git a/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..c82818e5 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./defaultProvider"; diff --git a/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/remoteProvider.d.ts b/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/remoteProvider.d.ts new file mode 100644 index 00000000..90948cc8 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/remoteProvider.d.ts @@ -0,0 +1,7 @@ +import { FromHttpOptions } from "@aws-sdk/credential-provider-http"; +import { RemoteProviderInit } from "@smithy/credential-provider-imds"; +import { AwsCredentialIdentityProvider } from "@smithy/types"; +export declare const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +export declare const remoteProvider: ( + init: RemoteProviderInit | FromHttpOptions +) => Promise; diff --git a/node_modules/@aws-sdk/credential-provider-node/package.json b/node_modules/@aws-sdk/credential-provider-node/package.json new file mode 100644 index 00000000..c115edd8 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-node/package.json @@ -0,0 +1,70 @@ +{ + "name": "@aws-sdk/credential-provider-node", + "version": "3.799.0", + "description": "AWS credential provider that sources credentials from a Node.JS environment. ", + "engines": { + "node": ">=18.0.0" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-node", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:jest -c jest.config.integ.js", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-ini": "3.799.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.799.0", + "@aws-sdk/credential-provider-web-identity": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-node", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-node" + } +} diff --git a/node_modules/@aws-sdk/credential-provider-process/LICENSE b/node_modules/@aws-sdk/credential-provider-process/LICENSE new file mode 100644 index 00000000..f9a66739 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-process/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-sdk/credential-provider-process/README.md b/node_modules/@aws-sdk/credential-provider-process/README.md new file mode 100644 index 00000000..4e9d9bd4 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-process/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-process + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-process/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-process) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-process.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-process) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/node_modules/@aws-sdk/credential-provider-process/dist-cjs/index.js b/node_modules/@aws-sdk/credential-provider-process/dist-cjs/index.js new file mode 100644 index 00000000..57146de7 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-process/dist-cjs/index.js @@ -0,0 +1,114 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + fromProcess: () => fromProcess +}); +module.exports = __toCommonJS(index_exports); + +// src/fromProcess.ts +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); + +// src/resolveProcessCredentials.ts +var import_property_provider = require("@smithy/property-provider"); +var import_child_process = require("child_process"); +var import_util = require("util"); + +// src/getValidatedProcessCredentials.ts +var import_client = require("@aws-sdk/core/client"); +var getValidatedProcessCredentials = /* @__PURE__ */ __name((profileName, data, profiles) => { + if (data.Version !== 1) { + throw Error(`Profile ${profileName} credential_process did not return Version 1.`); + } + if (data.AccessKeyId === void 0 || data.SecretAccessKey === void 0) { + throw Error(`Profile ${profileName} credential_process returned invalid credentials.`); + } + if (data.Expiration) { + const currentTime = /* @__PURE__ */ new Date(); + const expireTime = new Date(data.Expiration); + if (expireTime < currentTime) { + throw Error(`Profile ${profileName} credential_process returned expired credentials.`); + } + } + let accountId = data.AccountId; + if (!accountId && profiles?.[profileName]?.aws_account_id) { + accountId = profiles[profileName].aws_account_id; + } + const credentials = { + accessKeyId: data.AccessKeyId, + secretAccessKey: data.SecretAccessKey, + ...data.SessionToken && { sessionToken: data.SessionToken }, + ...data.Expiration && { expiration: new Date(data.Expiration) }, + ...data.CredentialScope && { credentialScope: data.CredentialScope }, + ...accountId && { accountId } + }; + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_PROCESS", "w"); + return credentials; +}, "getValidatedProcessCredentials"); + +// src/resolveProcessCredentials.ts +var resolveProcessCredentials = /* @__PURE__ */ __name(async (profileName, profiles, logger) => { + const profile = profiles[profileName]; + if (profiles[profileName]) { + const credentialProcess = profile["credential_process"]; + if (credentialProcess !== void 0) { + const execPromise = (0, import_util.promisify)(import_child_process.exec); + try { + const { stdout } = await execPromise(credentialProcess); + let data; + try { + data = JSON.parse(stdout.trim()); + } catch { + throw Error(`Profile ${profileName} credential_process returned invalid JSON.`); + } + return getValidatedProcessCredentials(profileName, data, profiles); + } catch (error) { + throw new import_property_provider.CredentialsProviderError(error.message, { logger }); + } + } else { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} did not contain credential_process.`, { logger }); + } + } else { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} could not be found in shared credentials file.`, { + logger + }); + } +}, "resolveProcessCredentials"); + +// src/fromProcess.ts +var fromProcess = /* @__PURE__ */ __name((init = {}) => async ({ callerClientConfig } = {}) => { + init.logger?.debug("@aws-sdk/credential-provider-process - fromProcess"); + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + return resolveProcessCredentials( + (0, import_shared_ini_file_loader.getProfileName)({ + profile: init.profile ?? callerClientConfig?.profile + }), + profiles, + init.logger + ); +}, "fromProcess"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromProcess +}); + diff --git a/node_modules/@aws-sdk/credential-provider-process/dist-es/ProcessCredentials.js b/node_modules/@aws-sdk/credential-provider-process/dist-es/ProcessCredentials.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-process/dist-es/ProcessCredentials.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/credential-provider-process/dist-es/fromProcess.js b/node_modules/@aws-sdk/credential-provider-process/dist-es/fromProcess.js new file mode 100644 index 00000000..9e1e8009 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-process/dist-es/fromProcess.js @@ -0,0 +1,9 @@ +import { getProfileName, parseKnownFiles } from "@smithy/shared-ini-file-loader"; +import { resolveProcessCredentials } from "./resolveProcessCredentials"; +export const fromProcess = (init = {}) => async ({ callerClientConfig } = {}) => { + init.logger?.debug("@aws-sdk/credential-provider-process - fromProcess"); + const profiles = await parseKnownFiles(init); + return resolveProcessCredentials(getProfileName({ + profile: init.profile ?? callerClientConfig?.profile, + }), profiles, init.logger); +}; diff --git a/node_modules/@aws-sdk/credential-provider-process/dist-es/getValidatedProcessCredentials.js b/node_modules/@aws-sdk/credential-provider-process/dist-es/getValidatedProcessCredentials.js new file mode 100644 index 00000000..caa0dd1b --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-process/dist-es/getValidatedProcessCredentials.js @@ -0,0 +1,30 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const getValidatedProcessCredentials = (profileName, data, profiles) => { + if (data.Version !== 1) { + throw Error(`Profile ${profileName} credential_process did not return Version 1.`); + } + if (data.AccessKeyId === undefined || data.SecretAccessKey === undefined) { + throw Error(`Profile ${profileName} credential_process returned invalid credentials.`); + } + if (data.Expiration) { + const currentTime = new Date(); + const expireTime = new Date(data.Expiration); + if (expireTime < currentTime) { + throw Error(`Profile ${profileName} credential_process returned expired credentials.`); + } + } + let accountId = data.AccountId; + if (!accountId && profiles?.[profileName]?.aws_account_id) { + accountId = profiles[profileName].aws_account_id; + } + const credentials = { + accessKeyId: data.AccessKeyId, + secretAccessKey: data.SecretAccessKey, + ...(data.SessionToken && { sessionToken: data.SessionToken }), + ...(data.Expiration && { expiration: new Date(data.Expiration) }), + ...(data.CredentialScope && { credentialScope: data.CredentialScope }), + ...(accountId && { accountId }), + }; + setCredentialFeature(credentials, "CREDENTIALS_PROCESS", "w"); + return credentials; +}; diff --git a/node_modules/@aws-sdk/credential-provider-process/dist-es/index.js b/node_modules/@aws-sdk/credential-provider-process/dist-es/index.js new file mode 100644 index 00000000..b921d353 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-process/dist-es/index.js @@ -0,0 +1 @@ +export * from "./fromProcess"; diff --git a/node_modules/@aws-sdk/credential-provider-process/dist-es/resolveProcessCredentials.js b/node_modules/@aws-sdk/credential-provider-process/dist-es/resolveProcessCredentials.js new file mode 100644 index 00000000..334e0afc --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-process/dist-es/resolveProcessCredentials.js @@ -0,0 +1,35 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { exec } from "child_process"; +import { promisify } from "util"; +import { getValidatedProcessCredentials } from "./getValidatedProcessCredentials"; +export const resolveProcessCredentials = async (profileName, profiles, logger) => { + const profile = profiles[profileName]; + if (profiles[profileName]) { + const credentialProcess = profile["credential_process"]; + if (credentialProcess !== undefined) { + const execPromise = promisify(exec); + try { + const { stdout } = await execPromise(credentialProcess); + let data; + try { + data = JSON.parse(stdout.trim()); + } + catch { + throw Error(`Profile ${profileName} credential_process returned invalid JSON.`); + } + return getValidatedProcessCredentials(profileName, data, profiles); + } + catch (error) { + throw new CredentialsProviderError(error.message, { logger }); + } + } + else { + throw new CredentialsProviderError(`Profile ${profileName} did not contain credential_process.`, { logger }); + } + } + else { + throw new CredentialsProviderError(`Profile ${profileName} could not be found in shared credentials file.`, { + logger, + }); + } +}; diff --git a/node_modules/@aws-sdk/credential-provider-process/dist-types/ProcessCredentials.d.ts b/node_modules/@aws-sdk/credential-provider-process/dist-types/ProcessCredentials.d.ts new file mode 100644 index 00000000..a4e6b46b --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-process/dist-types/ProcessCredentials.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export type ProcessCredentials = { + Version: number; + AccessKeyId: string; + SecretAccessKey: string; + SessionToken?: string; + Expiration?: number; + CredentialScope?: string; + AccountId?: string; +}; diff --git a/node_modules/@aws-sdk/credential-provider-process/dist-types/fromProcess.d.ts b/node_modules/@aws-sdk/credential-provider-process/dist-types/fromProcess.d.ts new file mode 100644 index 00000000..21776303 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-process/dist-types/fromProcess.d.ts @@ -0,0 +1,14 @@ +import type { CredentialProviderOptions, RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +/** + * @internal + */ +export interface FromProcessInit extends SourceProfileInit, CredentialProviderOptions { +} +/** + * @internal + * + * Creates a credential provider that will read from a credential_process specified + * in ini files. + */ +export declare const fromProcess: (init?: FromProcessInit) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/node_modules/@aws-sdk/credential-provider-process/dist-types/getValidatedProcessCredentials.d.ts b/node_modules/@aws-sdk/credential-provider-process/dist-types/getValidatedProcessCredentials.d.ts new file mode 100644 index 00000000..1e86d6b5 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-process/dist-types/getValidatedProcessCredentials.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentity, ParsedIniData } from "@smithy/types"; +import { ProcessCredentials } from "./ProcessCredentials"; +/** + * @internal + */ +export declare const getValidatedProcessCredentials: (profileName: string, data: ProcessCredentials, profiles: ParsedIniData) => AwsCredentialIdentity; diff --git a/node_modules/@aws-sdk/credential-provider-process/dist-types/index.d.ts b/node_modules/@aws-sdk/credential-provider-process/dist-types/index.d.ts new file mode 100644 index 00000000..adad9394 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-process/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./fromProcess"; diff --git a/node_modules/@aws-sdk/credential-provider-process/dist-types/resolveProcessCredentials.d.ts b/node_modules/@aws-sdk/credential-provider-process/dist-types/resolveProcessCredentials.d.ts new file mode 100644 index 00000000..4f69ca70 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-process/dist-types/resolveProcessCredentials.d.ts @@ -0,0 +1,5 @@ +import { AwsCredentialIdentity, Logger, ParsedIniData } from "@smithy/types"; +/** + * @internal + */ +export declare const resolveProcessCredentials: (profileName: string, profiles: ParsedIniData, logger?: Logger) => Promise; diff --git a/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/ProcessCredentials.d.ts b/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/ProcessCredentials.d.ts new file mode 100644 index 00000000..45acf5e8 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/ProcessCredentials.d.ts @@ -0,0 +1,9 @@ +export type ProcessCredentials = { + Version: number; + AccessKeyId: string; + SecretAccessKey: string; + SessionToken?: string; + Expiration?: number; + CredentialScope?: string; + AccountId?: string; +}; diff --git a/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/fromProcess.d.ts b/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/fromProcess.d.ts new file mode 100644 index 00000000..8e396564 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/fromProcess.d.ts @@ -0,0 +1,11 @@ +import { + CredentialProviderOptions, + RuntimeConfigAwsCredentialIdentityProvider, +} from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +export interface FromProcessInit + extends SourceProfileInit, + CredentialProviderOptions {} +export declare const fromProcess: ( + init?: FromProcessInit +) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/getValidatedProcessCredentials.d.ts b/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/getValidatedProcessCredentials.d.ts new file mode 100644 index 00000000..f44c81c2 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/getValidatedProcessCredentials.d.ts @@ -0,0 +1,7 @@ +import { AwsCredentialIdentity, ParsedIniData } from "@smithy/types"; +import { ProcessCredentials } from "./ProcessCredentials"; +export declare const getValidatedProcessCredentials: ( + profileName: string, + data: ProcessCredentials, + profiles: ParsedIniData +) => AwsCredentialIdentity; diff --git a/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..b921d353 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./fromProcess"; diff --git a/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/resolveProcessCredentials.d.ts b/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/resolveProcessCredentials.d.ts new file mode 100644 index 00000000..a204db4f --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/resolveProcessCredentials.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentity, Logger, ParsedIniData } from "@smithy/types"; +export declare const resolveProcessCredentials: ( + profileName: string, + profiles: ParsedIniData, + logger?: Logger +) => Promise; diff --git a/node_modules/@aws-sdk/credential-provider-process/package.json b/node_modules/@aws-sdk/credential-provider-process/package.json new file mode 100644 index 00000000..fb1f3831 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-process/package.json @@ -0,0 +1,63 @@ +{ + "name": "@aws-sdk/credential-provider-process", + "version": "3.799.0", + "description": "AWS credential provider that sources credential_process from ~/.aws/credentials and ~/.aws/config", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-process", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-process", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-process" + } +} diff --git a/node_modules/@aws-sdk/credential-provider-sso/LICENSE b/node_modules/@aws-sdk/credential-provider-sso/LICENSE new file mode 100644 index 00000000..f9a66739 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-sdk/credential-provider-sso/README.md b/node_modules/@aws-sdk/credential-provider-sso/README.md new file mode 100644 index 00000000..aba3fa80 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-sso + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-sso/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-sso) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-sso.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-sso) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-cjs/index.js b/node_modules/@aws-sdk/credential-provider-sso/dist-cjs/index.js new file mode 100644 index 00000000..ab7549a6 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-cjs/index.js @@ -0,0 +1,246 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __esm = (fn, res) => function __init() { + return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res; +}; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/loadSso.ts +var loadSso_exports = {}; +__export(loadSso_exports, { + GetRoleCredentialsCommand: () => import_client_sso.GetRoleCredentialsCommand, + SSOClient: () => import_client_sso.SSOClient +}); +var import_client_sso; +var init_loadSso = __esm({ + "src/loadSso.ts"() { + "use strict"; + import_client_sso = require("@aws-sdk/client-sso"); + } +}); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + fromSSO: () => fromSSO, + isSsoProfile: () => isSsoProfile, + validateSsoProfile: () => validateSsoProfile +}); +module.exports = __toCommonJS(index_exports); + +// src/fromSSO.ts + + + +// src/isSsoProfile.ts +var isSsoProfile = /* @__PURE__ */ __name((arg) => arg && (typeof arg.sso_start_url === "string" || typeof arg.sso_account_id === "string" || typeof arg.sso_session === "string" || typeof arg.sso_region === "string" || typeof arg.sso_role_name === "string"), "isSsoProfile"); + +// src/resolveSSOCredentials.ts +var import_client = require("@aws-sdk/core/client"); +var import_token_providers = require("@aws-sdk/token-providers"); +var import_property_provider = require("@smithy/property-provider"); +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); +var SHOULD_FAIL_CREDENTIAL_CHAIN = false; +var resolveSSOCredentials = /* @__PURE__ */ __name(async ({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + clientConfig, + parentClientConfig, + profile, + logger +}) => { + let token; + const refreshMessage = `To refresh this SSO session run aws sso login with the corresponding profile.`; + if (ssoSession) { + try { + const _token = await (0, import_token_providers.fromSso)({ profile })(); + token = { + accessToken: _token.token, + expiresAt: new Date(_token.expiration).toISOString() + }; + } catch (e) { + throw new import_property_provider.CredentialsProviderError(e.message, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + } else { + try { + token = await (0, import_shared_ini_file_loader.getSSOTokenFromFile)(ssoStartUrl); + } catch (e) { + throw new import_property_provider.CredentialsProviderError(`The SSO session associated with this profile is invalid. ${refreshMessage}`, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + } + if (new Date(token.expiresAt).getTime() - Date.now() <= 0) { + throw new import_property_provider.CredentialsProviderError(`The SSO session associated with this profile has expired. ${refreshMessage}`, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + const { accessToken } = token; + const { SSOClient: SSOClient2, GetRoleCredentialsCommand: GetRoleCredentialsCommand2 } = await Promise.resolve().then(() => (init_loadSso(), loadSso_exports)); + const sso = ssoClient || new SSOClient2( + Object.assign({}, clientConfig ?? {}, { + logger: clientConfig?.logger ?? parentClientConfig?.logger, + region: clientConfig?.region ?? ssoRegion + }) + ); + let ssoResp; + try { + ssoResp = await sso.send( + new GetRoleCredentialsCommand2({ + accountId: ssoAccountId, + roleName: ssoRoleName, + accessToken + }) + ); + } catch (e) { + throw new import_property_provider.CredentialsProviderError(e, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + const { + roleCredentials: { accessKeyId, secretAccessKey, sessionToken, expiration, credentialScope, accountId } = {} + } = ssoResp; + if (!accessKeyId || !secretAccessKey || !sessionToken || !expiration) { + throw new import_property_provider.CredentialsProviderError("SSO returns an invalid temporary credential.", { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + const credentials = { + accessKeyId, + secretAccessKey, + sessionToken, + expiration: new Date(expiration), + ...credentialScope && { credentialScope }, + ...accountId && { accountId } + }; + if (ssoSession) { + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_SSO", "s"); + } else { + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_SSO_LEGACY", "u"); + } + return credentials; +}, "resolveSSOCredentials"); + +// src/validateSsoProfile.ts + +var validateSsoProfile = /* @__PURE__ */ __name((profile, logger) => { + const { sso_start_url, sso_account_id, sso_region, sso_role_name } = profile; + if (!sso_start_url || !sso_account_id || !sso_region || !sso_role_name) { + throw new import_property_provider.CredentialsProviderError( + `Profile is configured with invalid SSO credentials. Required parameters "sso_account_id", "sso_region", "sso_role_name", "sso_start_url". Got ${Object.keys(profile).join( + ", " + )} +Reference: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.html`, + { tryNextLink: false, logger } + ); + } + return profile; +}, "validateSsoProfile"); + +// src/fromSSO.ts +var fromSSO = /* @__PURE__ */ __name((init = {}) => async ({ callerClientConfig } = {}) => { + init.logger?.debug("@aws-sdk/credential-provider-sso - fromSSO"); + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoSession } = init; + const { ssoClient } = init; + const profileName = (0, import_shared_ini_file_loader.getProfileName)({ + profile: init.profile ?? callerClientConfig?.profile + }); + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + const profile = profiles[profileName]; + if (!profile) { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} was not found.`, { logger: init.logger }); + } + if (!isSsoProfile(profile)) { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} is not configured with SSO credentials.`, { + logger: init.logger + }); + } + if (profile?.sso_session) { + const ssoSessions = await (0, import_shared_ini_file_loader.loadSsoSessionData)(init); + const session = ssoSessions[profile.sso_session]; + const conflictMsg = ` configurations in profile ${profileName} and sso-session ${profile.sso_session}`; + if (ssoRegion && ssoRegion !== session.sso_region) { + throw new import_property_provider.CredentialsProviderError(`Conflicting SSO region` + conflictMsg, { + tryNextLink: false, + logger: init.logger + }); + } + if (ssoStartUrl && ssoStartUrl !== session.sso_start_url) { + throw new import_property_provider.CredentialsProviderError(`Conflicting SSO start_url` + conflictMsg, { + tryNextLink: false, + logger: init.logger + }); + } + profile.sso_region = session.sso_region; + profile.sso_start_url = session.sso_start_url; + } + const { sso_start_url, sso_account_id, sso_region, sso_role_name, sso_session } = validateSsoProfile( + profile, + init.logger + ); + return resolveSSOCredentials({ + ssoStartUrl: sso_start_url, + ssoSession: sso_session, + ssoAccountId: sso_account_id, + ssoRegion: sso_region, + ssoRoleName: sso_role_name, + ssoClient, + clientConfig: init.clientConfig, + parentClientConfig: init.parentClientConfig, + profile: profileName + }); + } else if (!ssoStartUrl || !ssoAccountId || !ssoRegion || !ssoRoleName) { + throw new import_property_provider.CredentialsProviderError( + 'Incomplete configuration. The fromSSO() argument hash must include "ssoStartUrl", "ssoAccountId", "ssoRegion", "ssoRoleName"', + { tryNextLink: false, logger: init.logger } + ); + } else { + return resolveSSOCredentials({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + clientConfig: init.clientConfig, + parentClientConfig: init.parentClientConfig, + profile: profileName + }); + } +}, "fromSSO"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromSSO, + isSsoProfile, + validateSsoProfile +}); + diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-es/fromSSO.js b/node_modules/@aws-sdk/credential-provider-sso/dist-es/fromSSO.js new file mode 100644 index 00000000..75f18603 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-es/fromSSO.js @@ -0,0 +1,73 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getProfileName, loadSsoSessionData, parseKnownFiles } from "@smithy/shared-ini-file-loader"; +import { isSsoProfile } from "./isSsoProfile"; +import { resolveSSOCredentials } from "./resolveSSOCredentials"; +import { validateSsoProfile } from "./validateSsoProfile"; +export const fromSSO = (init = {}) => async ({ callerClientConfig } = {}) => { + init.logger?.debug("@aws-sdk/credential-provider-sso - fromSSO"); + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoSession } = init; + const { ssoClient } = init; + const profileName = getProfileName({ + profile: init.profile ?? callerClientConfig?.profile, + }); + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + const profiles = await parseKnownFiles(init); + const profile = profiles[profileName]; + if (!profile) { + throw new CredentialsProviderError(`Profile ${profileName} was not found.`, { logger: init.logger }); + } + if (!isSsoProfile(profile)) { + throw new CredentialsProviderError(`Profile ${profileName} is not configured with SSO credentials.`, { + logger: init.logger, + }); + } + if (profile?.sso_session) { + const ssoSessions = await loadSsoSessionData(init); + const session = ssoSessions[profile.sso_session]; + const conflictMsg = ` configurations in profile ${profileName} and sso-session ${profile.sso_session}`; + if (ssoRegion && ssoRegion !== session.sso_region) { + throw new CredentialsProviderError(`Conflicting SSO region` + conflictMsg, { + tryNextLink: false, + logger: init.logger, + }); + } + if (ssoStartUrl && ssoStartUrl !== session.sso_start_url) { + throw new CredentialsProviderError(`Conflicting SSO start_url` + conflictMsg, { + tryNextLink: false, + logger: init.logger, + }); + } + profile.sso_region = session.sso_region; + profile.sso_start_url = session.sso_start_url; + } + const { sso_start_url, sso_account_id, sso_region, sso_role_name, sso_session } = validateSsoProfile(profile, init.logger); + return resolveSSOCredentials({ + ssoStartUrl: sso_start_url, + ssoSession: sso_session, + ssoAccountId: sso_account_id, + ssoRegion: sso_region, + ssoRoleName: sso_role_name, + ssoClient: ssoClient, + clientConfig: init.clientConfig, + parentClientConfig: init.parentClientConfig, + profile: profileName, + }); + } + else if (!ssoStartUrl || !ssoAccountId || !ssoRegion || !ssoRoleName) { + throw new CredentialsProviderError("Incomplete configuration. The fromSSO() argument hash must include " + + '"ssoStartUrl", "ssoAccountId", "ssoRegion", "ssoRoleName"', { tryNextLink: false, logger: init.logger }); + } + else { + return resolveSSOCredentials({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + clientConfig: init.clientConfig, + parentClientConfig: init.parentClientConfig, + profile: profileName, + }); + } +}; diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-es/index.js b/node_modules/@aws-sdk/credential-provider-sso/dist-es/index.js new file mode 100644 index 00000000..7215fb68 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-es/index.js @@ -0,0 +1,4 @@ +export * from "./fromSSO"; +export * from "./isSsoProfile"; +export * from "./types"; +export * from "./validateSsoProfile"; diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-es/isSsoProfile.js b/node_modules/@aws-sdk/credential-provider-sso/dist-es/isSsoProfile.js new file mode 100644 index 00000000..e6554380 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-es/isSsoProfile.js @@ -0,0 +1,6 @@ +export const isSsoProfile = (arg) => arg && + (typeof arg.sso_start_url === "string" || + typeof arg.sso_account_id === "string" || + typeof arg.sso_session === "string" || + typeof arg.sso_region === "string" || + typeof arg.sso_role_name === "string"); diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-es/loadSso.js b/node_modules/@aws-sdk/credential-provider-sso/dist-es/loadSso.js new file mode 100644 index 00000000..6a4414f3 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-es/loadSso.js @@ -0,0 +1,2 @@ +import { GetRoleCredentialsCommand, SSOClient } from "@aws-sdk/client-sso"; +export { GetRoleCredentialsCommand, SSOClient }; diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-es/resolveSSOCredentials.js b/node_modules/@aws-sdk/credential-provider-sso/dist-es/resolveSSOCredentials.js new file mode 100644 index 00000000..979d9b20 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-es/resolveSSOCredentials.js @@ -0,0 +1,84 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { fromSso as getSsoTokenProvider } from "@aws-sdk/token-providers"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getSSOTokenFromFile } from "@smithy/shared-ini-file-loader"; +const SHOULD_FAIL_CREDENTIAL_CHAIN = false; +export const resolveSSOCredentials = async ({ ssoStartUrl, ssoSession, ssoAccountId, ssoRegion, ssoRoleName, ssoClient, clientConfig, parentClientConfig, profile, logger, }) => { + let token; + const refreshMessage = `To refresh this SSO session run aws sso login with the corresponding profile.`; + if (ssoSession) { + try { + const _token = await getSsoTokenProvider({ profile })(); + token = { + accessToken: _token.token, + expiresAt: new Date(_token.expiration).toISOString(), + }; + } + catch (e) { + throw new CredentialsProviderError(e.message, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + } + else { + try { + token = await getSSOTokenFromFile(ssoStartUrl); + } + catch (e) { + throw new CredentialsProviderError(`The SSO session associated with this profile is invalid. ${refreshMessage}`, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + } + if (new Date(token.expiresAt).getTime() - Date.now() <= 0) { + throw new CredentialsProviderError(`The SSO session associated with this profile has expired. ${refreshMessage}`, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + const { accessToken } = token; + const { SSOClient, GetRoleCredentialsCommand } = await import("./loadSso"); + const sso = ssoClient || + new SSOClient(Object.assign({}, clientConfig ?? {}, { + logger: clientConfig?.logger ?? parentClientConfig?.logger, + region: clientConfig?.region ?? ssoRegion, + })); + let ssoResp; + try { + ssoResp = await sso.send(new GetRoleCredentialsCommand({ + accountId: ssoAccountId, + roleName: ssoRoleName, + accessToken, + })); + } + catch (e) { + throw new CredentialsProviderError(e, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + const { roleCredentials: { accessKeyId, secretAccessKey, sessionToken, expiration, credentialScope, accountId } = {}, } = ssoResp; + if (!accessKeyId || !secretAccessKey || !sessionToken || !expiration) { + throw new CredentialsProviderError("SSO returns an invalid temporary credential.", { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + const credentials = { + accessKeyId, + secretAccessKey, + sessionToken, + expiration: new Date(expiration), + ...(credentialScope && { credentialScope }), + ...(accountId && { accountId }), + }; + if (ssoSession) { + setCredentialFeature(credentials, "CREDENTIALS_SSO", "s"); + } + else { + setCredentialFeature(credentials, "CREDENTIALS_SSO_LEGACY", "u"); + } + return credentials; +}; diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-es/types.js b/node_modules/@aws-sdk/credential-provider-sso/dist-es/types.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-es/validateSsoProfile.js b/node_modules/@aws-sdk/credential-provider-sso/dist-es/validateSsoProfile.js new file mode 100644 index 00000000..94174b68 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-es/validateSsoProfile.js @@ -0,0 +1,9 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +export const validateSsoProfile = (profile, logger) => { + const { sso_start_url, sso_account_id, sso_region, sso_role_name } = profile; + if (!sso_start_url || !sso_account_id || !sso_region || !sso_role_name) { + throw new CredentialsProviderError(`Profile is configured with invalid SSO credentials. Required parameters "sso_account_id", ` + + `"sso_region", "sso_role_name", "sso_start_url". Got ${Object.keys(profile).join(", ")}\nReference: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.html`, { tryNextLink: false, logger }); + } + return profile; +}; diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-types/fromSSO.d.ts b/node_modules/@aws-sdk/credential-provider-sso/dist-types/fromSSO.d.ts new file mode 100644 index 00000000..47521a60 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-types/fromSSO.d.ts @@ -0,0 +1,68 @@ +import type { CredentialProviderOptions, RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import type { SSOClient, SSOClientConfig } from "./loadSso"; +/** + * @internal + */ +export interface SsoCredentialsParameters { + /** + * The URL to the AWS SSO service. + */ + ssoStartUrl: string; + /** + * SSO session identifier. + * Presence implies usage of the SSOTokenProvider. + */ + ssoSession?: string; + /** + * The ID of the AWS account to use for temporary credentials. + */ + ssoAccountId: string; + /** + * The AWS region to use for temporary credentials. + */ + ssoRegion: string; + /** + * The name of the AWS role to assume. + */ + ssoRoleName: string; +} +/** + * @internal + */ +export interface FromSSOInit extends SourceProfileInit, CredentialProviderOptions { + ssoClient?: SSOClient; + clientConfig?: SSOClientConfig; +} +/** + * @internal + * + * Creates a credential provider that will read from a credential_process specified + * in ini files. + * + * The SSO credential provider must support both + * + * 1. the legacy profile format, + * @example + * ``` + * [profile sample-profile] + * sso_account_id = 012345678901 + * sso_region = us-east-1 + * sso_role_name = SampleRole + * sso_start_url = https://www.....com/start + * ``` + * + * 2. and the profile format for SSO Token Providers. + * @example + * ``` + * [profile sso-profile] + * sso_session = dev + * sso_account_id = 012345678901 + * sso_role_name = SampleRole + * + * [sso-session dev] + * sso_region = us-east-1 + * sso_start_url = https://www.....com/start + * ``` + */ +export declare const fromSSO: (init?: FromSSOInit & Partial) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-types/index.d.ts b/node_modules/@aws-sdk/credential-provider-sso/dist-types/index.d.ts new file mode 100644 index 00000000..d851c152 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-types/index.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export * from "./fromSSO"; +/** + * @internal + */ +export * from "./isSsoProfile"; +/** + * @internal + */ +export * from "./types"; +/** + * @internal + */ +export * from "./validateSsoProfile"; diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-types/isSsoProfile.d.ts b/node_modules/@aws-sdk/credential-provider-sso/dist-types/isSsoProfile.d.ts new file mode 100644 index 00000000..77c1fb23 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-types/isSsoProfile.d.ts @@ -0,0 +1,6 @@ +import { Profile } from "@smithy/types"; +import { SsoProfile } from "./types"; +/** + * @internal + */ +export declare const isSsoProfile: (arg: Profile) => arg is Partial; diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-types/loadSso.d.ts b/node_modules/@aws-sdk/credential-provider-sso/dist-types/loadSso.d.ts new file mode 100644 index 00000000..f44232fe --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-types/loadSso.d.ts @@ -0,0 +1,3 @@ +import { GetRoleCredentialsCommand, SSOClient } from "@aws-sdk/client-sso"; +export { GetRoleCredentialsCommand, SSOClient }; +export type { SSOClientConfig, GetRoleCredentialsCommandOutput } from "@aws-sdk/client-sso"; diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-types/resolveSSOCredentials.d.ts b/node_modules/@aws-sdk/credential-provider-sso/dist-types/resolveSSOCredentials.d.ts new file mode 100644 index 00000000..e4e3fccb --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-types/resolveSSOCredentials.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +import { FromSSOInit, SsoCredentialsParameters } from "./fromSSO"; +/** + * @internal + */ +export declare const resolveSSOCredentials: ({ ssoStartUrl, ssoSession, ssoAccountId, ssoRegion, ssoRoleName, ssoClient, clientConfig, parentClientConfig, profile, logger, }: FromSSOInit & SsoCredentialsParameters) => Promise; diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/fromSSO.d.ts b/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/fromSSO.d.ts new file mode 100644 index 00000000..bf50b63c --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/fromSSO.d.ts @@ -0,0 +1,22 @@ +import { + CredentialProviderOptions, + RuntimeConfigAwsCredentialIdentityProvider, +} from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import { SSOClient, SSOClientConfig } from "./loadSso"; +export interface SsoCredentialsParameters { + ssoStartUrl: string; + ssoSession?: string; + ssoAccountId: string; + ssoRegion: string; + ssoRoleName: string; +} +export interface FromSSOInit + extends SourceProfileInit, + CredentialProviderOptions { + ssoClient?: SSOClient; + clientConfig?: SSOClientConfig; +} +export declare const fromSSO: ( + init?: FromSSOInit & Partial +) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..7215fb68 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +export * from "./fromSSO"; +export * from "./isSsoProfile"; +export * from "./types"; +export * from "./validateSsoProfile"; diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/isSsoProfile.d.ts b/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/isSsoProfile.d.ts new file mode 100644 index 00000000..b4e8bddd --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/isSsoProfile.d.ts @@ -0,0 +1,3 @@ +import { Profile } from "@smithy/types"; +import { SsoProfile } from "./types"; +export declare const isSsoProfile: (arg: Profile) => arg is Partial; diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/loadSso.d.ts b/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/loadSso.d.ts new file mode 100644 index 00000000..2d3249fc --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/loadSso.d.ts @@ -0,0 +1,6 @@ +import { GetRoleCredentialsCommand, SSOClient } from "@aws-sdk/client-sso"; +export { GetRoleCredentialsCommand, SSOClient }; +export { + SSOClientConfig, + GetRoleCredentialsCommandOutput, +} from "@aws-sdk/client-sso"; diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/resolveSSOCredentials.d.ts b/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/resolveSSOCredentials.d.ts new file mode 100644 index 00000000..cc165545 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/resolveSSOCredentials.d.ts @@ -0,0 +1,14 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +import { FromSSOInit, SsoCredentialsParameters } from "./fromSSO"; +export declare const resolveSSOCredentials: ({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + clientConfig, + parentClientConfig, + profile, + logger, +}: FromSSOInit & SsoCredentialsParameters) => Promise; diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/types.d.ts b/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/types.d.ts new file mode 100644 index 00000000..4a3986b3 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/types.d.ts @@ -0,0 +1,14 @@ +import { Profile } from "@smithy/types"; +export interface SSOToken { + accessToken: string; + expiresAt: string; + region?: string; + startUrl?: string; +} +export interface SsoProfile extends Profile { + sso_start_url: string; + sso_session?: string; + sso_account_id: string; + sso_region: string; + sso_role_name: string; +} diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/validateSsoProfile.d.ts b/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/validateSsoProfile.d.ts new file mode 100644 index 00000000..6572fc43 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/validateSsoProfile.d.ts @@ -0,0 +1,6 @@ +import { Logger } from "@smithy/types"; +import { SsoProfile } from "./types"; +export declare const validateSsoProfile: ( + profile: Partial, + logger?: Logger +) => SsoProfile; diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-types/types.d.ts b/node_modules/@aws-sdk/credential-provider-sso/dist-types/types.d.ts new file mode 100644 index 00000000..551d6784 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-types/types.d.ts @@ -0,0 +1,22 @@ +import { Profile } from "@smithy/types"; +/** + * @internal + * + * Cached SSO token retrieved from SSO login flow. + */ +export interface SSOToken { + accessToken: string; + expiresAt: string; + region?: string; + startUrl?: string; +} +/** + * @internal + */ +export interface SsoProfile extends Profile { + sso_start_url: string; + sso_session?: string; + sso_account_id: string; + sso_region: string; + sso_role_name: string; +} diff --git a/node_modules/@aws-sdk/credential-provider-sso/dist-types/validateSsoProfile.d.ts b/node_modules/@aws-sdk/credential-provider-sso/dist-types/validateSsoProfile.d.ts new file mode 100644 index 00000000..8b0ab31f --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/dist-types/validateSsoProfile.d.ts @@ -0,0 +1,6 @@ +import { Logger } from "@smithy/types"; +import { SsoProfile } from "./types"; +/** + * @internal + */ +export declare const validateSsoProfile: (profile: Partial, logger?: Logger) => SsoProfile; diff --git a/node_modules/@aws-sdk/credential-provider-sso/package.json b/node_modules/@aws-sdk/credential-provider-sso/package.json new file mode 100644 index 00000000..3212ebd4 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-sso/package.json @@ -0,0 +1,65 @@ +{ + "name": "@aws-sdk/credential-provider-sso", + "version": "3.799.0", + "description": "AWS credential provider that exchanges a resolved SSO login token file for temporary AWS credentials", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-sso", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/client-sso": "3.799.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/token-providers": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-sso", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-sso" + } +} diff --git a/node_modules/@aws-sdk/credential-provider-web-identity/LICENSE b/node_modules/@aws-sdk/credential-provider-web-identity/LICENSE new file mode 100644 index 00000000..f9a66739 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-web-identity/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-sdk/credential-provider-web-identity/README.md b/node_modules/@aws-sdk/credential-provider-web-identity/README.md new file mode 100644 index 00000000..e4858a41 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-web-identity/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-web-identity + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-web-identity/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-web-identity) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-web-identity.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-web-identity) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromTokenFile.js b/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromTokenFile.js new file mode 100644 index 00000000..2be77277 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromTokenFile.js @@ -0,0 +1,32 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromTokenFile = void 0; +const client_1 = require("@aws-sdk/core/client"); +const property_provider_1 = require("@smithy/property-provider"); +const fs_1 = require("fs"); +const fromWebToken_1 = require("./fromWebToken"); +const ENV_TOKEN_FILE = "AWS_WEB_IDENTITY_TOKEN_FILE"; +const ENV_ROLE_ARN = "AWS_ROLE_ARN"; +const ENV_ROLE_SESSION_NAME = "AWS_ROLE_SESSION_NAME"; +const fromTokenFile = (init = {}) => async () => { + init.logger?.debug("@aws-sdk/credential-provider-web-identity - fromTokenFile"); + const webIdentityTokenFile = init?.webIdentityTokenFile ?? process.env[ENV_TOKEN_FILE]; + const roleArn = init?.roleArn ?? process.env[ENV_ROLE_ARN]; + const roleSessionName = init?.roleSessionName ?? process.env[ENV_ROLE_SESSION_NAME]; + if (!webIdentityTokenFile || !roleArn) { + throw new property_provider_1.CredentialsProviderError("Web identity configuration not specified", { + logger: init.logger, + }); + } + const credentials = await (0, fromWebToken_1.fromWebToken)({ + ...init, + webIdentityToken: (0, fs_1.readFileSync)(webIdentityTokenFile, { encoding: "ascii" }), + roleArn, + roleSessionName, + })(); + if (webIdentityTokenFile === process.env[ENV_TOKEN_FILE]) { + (0, client_1.setCredentialFeature)(credentials, "CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN", "h"); + } + return credentials; +}; +exports.fromTokenFile = fromTokenFile; diff --git a/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromWebToken.js b/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromWebToken.js new file mode 100644 index 00000000..f8eafde8 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromWebToken.js @@ -0,0 +1,52 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromWebToken = void 0; +const fromWebToken = (init) => async (awsIdentityProperties) => { + init.logger?.debug("@aws-sdk/credential-provider-web-identity - fromWebToken"); + const { roleArn, roleSessionName, webIdentityToken, providerId, policyArns, policy, durationSeconds } = init; + let { roleAssumerWithWebIdentity } = init; + if (!roleAssumerWithWebIdentity) { + const { getDefaultRoleAssumerWithWebIdentity } = await Promise.resolve().then(() => __importStar(require("@aws-sdk/nested-clients/sts"))); + roleAssumerWithWebIdentity = getDefaultRoleAssumerWithWebIdentity({ + ...init.clientConfig, + credentialProviderLogger: init.logger, + parentClientConfig: { + ...awsIdentityProperties?.callerClientConfig, + ...init.parentClientConfig, + }, + }, init.clientPlugins); + } + return roleAssumerWithWebIdentity({ + RoleArn: roleArn, + RoleSessionName: roleSessionName ?? `aws-sdk-js-session-${Date.now()}`, + WebIdentityToken: webIdentityToken, + ProviderId: providerId, + PolicyArns: policyArns, + Policy: policy, + DurationSeconds: durationSeconds, + }); +}; +exports.fromWebToken = fromWebToken; diff --git a/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/index.js b/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/index.js new file mode 100644 index 00000000..5dc29db5 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/index.js @@ -0,0 +1,28 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +module.exports = __toCommonJS(index_exports); +__reExport(index_exports, require("././fromTokenFile"), module.exports); +__reExport(index_exports, require("././fromWebToken"), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromTokenFile, + fromWebToken +}); + diff --git a/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromTokenFile.js b/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromTokenFile.js new file mode 100644 index 00000000..64a50325 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromTokenFile.js @@ -0,0 +1,28 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { readFileSync } from "fs"; +import { fromWebToken } from "./fromWebToken"; +const ENV_TOKEN_FILE = "AWS_WEB_IDENTITY_TOKEN_FILE"; +const ENV_ROLE_ARN = "AWS_ROLE_ARN"; +const ENV_ROLE_SESSION_NAME = "AWS_ROLE_SESSION_NAME"; +export const fromTokenFile = (init = {}) => async () => { + init.logger?.debug("@aws-sdk/credential-provider-web-identity - fromTokenFile"); + const webIdentityTokenFile = init?.webIdentityTokenFile ?? process.env[ENV_TOKEN_FILE]; + const roleArn = init?.roleArn ?? process.env[ENV_ROLE_ARN]; + const roleSessionName = init?.roleSessionName ?? process.env[ENV_ROLE_SESSION_NAME]; + if (!webIdentityTokenFile || !roleArn) { + throw new CredentialsProviderError("Web identity configuration not specified", { + logger: init.logger, + }); + } + const credentials = await fromWebToken({ + ...init, + webIdentityToken: readFileSync(webIdentityTokenFile, { encoding: "ascii" }), + roleArn, + roleSessionName, + })(); + if (webIdentityTokenFile === process.env[ENV_TOKEN_FILE]) { + setCredentialFeature(credentials, "CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN", "h"); + } + return credentials; +}; diff --git a/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromWebToken.js b/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromWebToken.js new file mode 100644 index 00000000..268e0aab --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromWebToken.js @@ -0,0 +1,25 @@ +export const fromWebToken = (init) => async (awsIdentityProperties) => { + init.logger?.debug("@aws-sdk/credential-provider-web-identity - fromWebToken"); + const { roleArn, roleSessionName, webIdentityToken, providerId, policyArns, policy, durationSeconds } = init; + let { roleAssumerWithWebIdentity } = init; + if (!roleAssumerWithWebIdentity) { + const { getDefaultRoleAssumerWithWebIdentity } = await import("@aws-sdk/nested-clients/sts"); + roleAssumerWithWebIdentity = getDefaultRoleAssumerWithWebIdentity({ + ...init.clientConfig, + credentialProviderLogger: init.logger, + parentClientConfig: { + ...awsIdentityProperties?.callerClientConfig, + ...init.parentClientConfig, + }, + }, init.clientPlugins); + } + return roleAssumerWithWebIdentity({ + RoleArn: roleArn, + RoleSessionName: roleSessionName ?? `aws-sdk-js-session-${Date.now()}`, + WebIdentityToken: webIdentityToken, + ProviderId: providerId, + PolicyArns: policyArns, + Policy: policy, + DurationSeconds: durationSeconds, + }); +}; diff --git a/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/index.js b/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/index.js new file mode 100644 index 00000000..0e900c0a --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./fromTokenFile"; +export * from "./fromWebToken"; diff --git a/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromTokenFile.d.ts b/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromTokenFile.d.ts new file mode 100644 index 00000000..58f885f7 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromTokenFile.d.ts @@ -0,0 +1,18 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import type { AwsCredentialIdentityProvider } from "@smithy/types"; +import { FromWebTokenInit } from "./fromWebToken"; +/** + * @public + */ +export interface FromTokenFileInit extends Partial>, CredentialProviderOptions { + /** + * File location of where the `OIDC` token is stored. + */ + webIdentityTokenFile?: string; +} +/** + * @internal + * + * Represents OIDC credentials from a file on disk. + */ +export declare const fromTokenFile: (init?: FromTokenFileInit) => AwsCredentialIdentityProvider; diff --git a/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromWebToken.d.ts b/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromWebToken.d.ts new file mode 100644 index 00000000..6b5e066c --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromWebToken.d.ts @@ -0,0 +1,145 @@ +import type { CredentialProviderOptions, RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import type { AwsCredentialIdentity, Pluggable } from "@smithy/types"; +/** + * @public + */ +export interface AssumeRoleWithWebIdentityParams { + /** + *

The Amazon Resource Name (ARN) of the role that the caller is assuming.

+ */ + RoleArn: string; + /** + *

An identifier for the assumed role session. Typically, you pass the name or identifier + * that is associated with the user who is using your application. That way, the temporary + * security credentials that your application will use are associated with that user. This + * session name is included as part of the ARN and assumed role ID in the + * AssumedRoleUser response element.

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ */ + RoleSessionName: string; + /** + *

The OAuth 2.0 access token or OpenID Connect ID token that is provided by the identity + * provider. Your application must get this token by authenticating the user who is using your + * application with a web identity provider before the application makes an + * AssumeRoleWithWebIdentity call.

+ */ + WebIdentityToken: string; + /** + *

The fully qualified host component of the domain name of the identity provider.

+ *

Specify this value only for OAuth 2.0 access tokens. Currently + * www.amazon.com and graph.facebook.com are the only supported + * identity providers for OAuth 2.0 access tokens. Do not include URL schemes and port + * numbers.

+ *

Do not specify this value for OpenID Connect ID tokens.

+ */ + ProviderId?: string; + /** + *

The Amazon Resource Names (ARNs) of the IAM managed policies that you want to use as + * managed session policies. The policies must exist in the same account as the role.

+ *

This parameter is optional. You can provide up to 10 managed policy ARNs. However, the + * plain text that you use for both inline and managed session policies can't exceed 2,048 + * characters. For more information about ARNs, see Amazon Resource Names (ARNs) and AWS + * Service Namespaces in the AWS General Reference.

+ * + *

An AWS conversion compresses the passed session policies and session tags into a + * packed binary format that has a separate limit. Your request can fail for this limit + * even if your plain text meets the other requirements. The PackedPolicySize + * response element indicates by percentage how close the policies and tags for your + * request are to the upper size limit. + *

+ *
+ * + *

Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent AWS API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ */ + PolicyArns?: { + arn?: string; + }[]; + /** + *

An IAM policy in JSON format that you want to use as an inline session policy.

+ *

This parameter is optional. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent AWS API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

The plain text that you use for both inline and managed session policies can't exceed + * 2,048 characters. The JSON policy characters can be any ASCII character from the space + * character to the end of the valid character list (\u0020 through \u00FF). It can also + * include the tab (\u0009), linefeed (\u000A), and carriage return (\u000D) + * characters.

+ * + *

An AWS conversion compresses the passed session policies and session tags into a + * packed binary format that has a separate limit. Your request can fail for this limit + * even if your plain text meets the other requirements. The PackedPolicySize + * response element indicates by percentage how close the policies and tags for your + * request are to the upper size limit. + *

+ *
+ */ + Policy?: string; + /** + *

The duration, in seconds, of the role session. The value can range from 900 seconds (15 + * minutes) up to the maximum session duration setting for the role. This setting can have a + * value from 1 hour to 12 hours. If you specify a value higher than this setting, the + * operation fails. For example, if you specify a session duration of 12 hours, but your + * administrator set the maximum session duration to 6 hours, your operation fails. To learn + * how to view the maximum value for your role, see View the + * Maximum Session Duration Setting for a Role in the + * IAM User Guide.

+ *

By default, the value is set to 3600 seconds.

+ * + *

The DurationSeconds parameter is separate from the duration of a console + * session that you might request using the returned credentials. The request to the + * federation endpoint for a console sign-in token takes a SessionDuration + * parameter that specifies the maximum length of the console session. For more + * information, see Creating a URL + * that Enables Federated Users to Access the AWS Management Console in the + * IAM User Guide.

+ *
+ */ + DurationSeconds?: number; +} +type LowerCaseKey = { + [K in keyof T as `${Uncapitalize}`]: T[K]; +}; +/** + * @public + */ +export interface FromWebTokenInit extends Omit, "roleSessionName">, CredentialProviderOptions { + /** + * The IAM session name used to distinguish sessions. + */ + roleSessionName?: string; + /** + * A function that assumes a role with web identity and returns a promise fulfilled with + * credentials for the assumed role. + * + * @param params input parameter of sts:AssumeRoleWithWebIdentity API. + */ + roleAssumerWithWebIdentity?: (params: AssumeRoleWithWebIdentityParams) => Promise; + /** + * STSClientConfig to be used for creating STS Client for assuming role. + * @internal + */ + clientConfig?: any; + /** + * @internal + */ + clientPlugins?: Pluggable[]; +} +/** + * @internal + */ +export declare const fromWebToken: (init: FromWebTokenInit) => RuntimeConfigAwsCredentialIdentityProvider; +export {}; diff --git a/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/index.d.ts b/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/index.d.ts new file mode 100644 index 00000000..36c15dcd --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./fromTokenFile"; +/** + * @internal + */ +export * from "./fromWebToken"; diff --git a/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromTokenFile.d.ts b/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromTokenFile.d.ts new file mode 100644 index 00000000..4f673560 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromTokenFile.d.ts @@ -0,0 +1,16 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { FromWebTokenInit } from "./fromWebToken"; +export interface FromTokenFileInit + extends Partial< + Pick< + FromWebTokenInit, + Exclude + > + >, + CredentialProviderOptions { + webIdentityTokenFile?: string; +} +export declare const fromTokenFile: ( + init?: FromTokenFileInit +) => AwsCredentialIdentityProvider; diff --git a/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromWebToken.d.ts b/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromWebToken.d.ts new file mode 100644 index 00000000..73529a14 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromWebToken.d.ts @@ -0,0 +1,39 @@ +import { + CredentialProviderOptions, + RuntimeConfigAwsCredentialIdentityProvider, +} from "@aws-sdk/types"; +import { AwsCredentialIdentity, Pluggable } from "@smithy/types"; +export interface AssumeRoleWithWebIdentityParams { + RoleArn: string; + RoleSessionName: string; + WebIdentityToken: string; + ProviderId?: string; + PolicyArns?: { + arn?: string; + }[]; + Policy?: string; + DurationSeconds?: number; +} +type LowerCaseKey = { + [K in keyof T as `${Uncapitalize}`]: T[K]; +}; +export interface FromWebTokenInit + extends Pick< + LowerCaseKey, + Exclude< + keyof LowerCaseKey, + "roleSessionName" + > + >, + CredentialProviderOptions { + roleSessionName?: string; + roleAssumerWithWebIdentity?: ( + params: AssumeRoleWithWebIdentityParams + ) => Promise; + clientConfig?: any; + clientPlugins?: Pluggable[]; +} +export declare const fromWebToken: ( + init: FromWebTokenInit +) => RuntimeConfigAwsCredentialIdentityProvider; +export {}; diff --git a/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..0e900c0a --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fromTokenFile"; +export * from "./fromWebToken"; diff --git a/node_modules/@aws-sdk/credential-provider-web-identity/package.json b/node_modules/@aws-sdk/credential-provider-web-identity/package.json new file mode 100644 index 00000000..7a2d0ac7 --- /dev/null +++ b/node_modules/@aws-sdk/credential-provider-web-identity/package.json @@ -0,0 +1,71 @@ +{ + "name": "@aws-sdk/credential-provider-web-identity", + "version": "3.799.0", + "description": "AWS credential provider that calls STS assumeRole for temporary AWS credentials", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-web-identity", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "browser": { + "./dist-cjs/fromTokenFile": false, + "./dist-es/fromTokenFile": false + }, + "react-native": { + "./dist-es/fromTokenFile": false, + "./dist-cjs/fromTokenFile": false + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/nested-clients": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-web-identity", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-web-identity" + } +} diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/LICENSE b/node_modules/@aws-sdk/middleware-bucket-endpoint/LICENSE new file mode 100644 index 00000000..dd65ae06 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/README.md b/node_modules/@aws-sdk/middleware-bucket-endpoint/README.md new file mode 100644 index 00000000..99cd3c5f --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-bucket-endpoint + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-bucket-endpoint/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-bucket-endpoint) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-bucket-endpoint.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-bucket-endpoint) diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-cjs/index.js b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-cjs/index.js new file mode 100644 index 00000000..76986ba8 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-cjs/index.js @@ -0,0 +1,497 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + NODE_DISABLE_MULTIREGION_ACCESS_POINT_CONFIG_OPTIONS: () => NODE_DISABLE_MULTIREGION_ACCESS_POINT_CONFIG_OPTIONS, + NODE_DISABLE_MULTIREGION_ACCESS_POINT_ENV_NAME: () => NODE_DISABLE_MULTIREGION_ACCESS_POINT_ENV_NAME, + NODE_DISABLE_MULTIREGION_ACCESS_POINT_INI_NAME: () => NODE_DISABLE_MULTIREGION_ACCESS_POINT_INI_NAME, + NODE_USE_ARN_REGION_CONFIG_OPTIONS: () => NODE_USE_ARN_REGION_CONFIG_OPTIONS, + NODE_USE_ARN_REGION_ENV_NAME: () => NODE_USE_ARN_REGION_ENV_NAME, + NODE_USE_ARN_REGION_INI_NAME: () => NODE_USE_ARN_REGION_INI_NAME, + bucketEndpointMiddleware: () => bucketEndpointMiddleware, + bucketEndpointMiddlewareOptions: () => bucketEndpointMiddlewareOptions, + bucketHostname: () => bucketHostname, + getArnResources: () => getArnResources, + getBucketEndpointPlugin: () => getBucketEndpointPlugin, + getSuffixForArnEndpoint: () => getSuffixForArnEndpoint, + resolveBucketEndpointConfig: () => resolveBucketEndpointConfig, + validateAccountId: () => validateAccountId, + validateDNSHostLabel: () => validateDNSHostLabel, + validateNoDualstack: () => validateNoDualstack, + validateNoFIPS: () => validateNoFIPS, + validateOutpostService: () => validateOutpostService, + validatePartition: () => validatePartition, + validateRegion: () => validateRegion +}); +module.exports = __toCommonJS(index_exports); + +// src/NodeDisableMultiregionAccessPointConfigOptions.ts +var import_util_config_provider = require("@smithy/util-config-provider"); +var NODE_DISABLE_MULTIREGION_ACCESS_POINT_ENV_NAME = "AWS_S3_DISABLE_MULTIREGION_ACCESS_POINTS"; +var NODE_DISABLE_MULTIREGION_ACCESS_POINT_INI_NAME = "s3_disable_multiregion_access_points"; +var NODE_DISABLE_MULTIREGION_ACCESS_POINT_CONFIG_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env) => (0, import_util_config_provider.booleanSelector)(env, NODE_DISABLE_MULTIREGION_ACCESS_POINT_ENV_NAME, import_util_config_provider.SelectorType.ENV), "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => (0, import_util_config_provider.booleanSelector)(profile, NODE_DISABLE_MULTIREGION_ACCESS_POINT_INI_NAME, import_util_config_provider.SelectorType.CONFIG), "configFileSelector"), + default: false +}; + +// src/NodeUseArnRegionConfigOptions.ts + +var NODE_USE_ARN_REGION_ENV_NAME = "AWS_S3_USE_ARN_REGION"; +var NODE_USE_ARN_REGION_INI_NAME = "s3_use_arn_region"; +var NODE_USE_ARN_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env) => (0, import_util_config_provider.booleanSelector)(env, NODE_USE_ARN_REGION_ENV_NAME, import_util_config_provider.SelectorType.ENV), "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => (0, import_util_config_provider.booleanSelector)(profile, NODE_USE_ARN_REGION_INI_NAME, import_util_config_provider.SelectorType.CONFIG), "configFileSelector"), + default: false +}; + +// src/bucketEndpointMiddleware.ts +var import_util_arn_parser = require("@aws-sdk/util-arn-parser"); +var import_protocol_http = require("@smithy/protocol-http"); + +// src/bucketHostnameUtils.ts +var DOMAIN_PATTERN = /^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$/; +var IP_ADDRESS_PATTERN = /(\d+\.){3}\d+/; +var DOTS_PATTERN = /\.\./; +var DOT_PATTERN = /\./; +var S3_HOSTNAME_PATTERN = /^(.+\.)?s3(-fips)?(\.dualstack)?[.-]([a-z0-9-]+)\./; +var S3_US_EAST_1_ALTNAME_PATTERN = /^s3(-external-1)?\.amazonaws\.com$/; +var AWS_PARTITION_SUFFIX = "amazonaws.com"; +var isBucketNameOptions = /* @__PURE__ */ __name((options) => typeof options.bucketName === "string", "isBucketNameOptions"); +var isDnsCompatibleBucketName = /* @__PURE__ */ __name((bucketName) => DOMAIN_PATTERN.test(bucketName) && !IP_ADDRESS_PATTERN.test(bucketName) && !DOTS_PATTERN.test(bucketName), "isDnsCompatibleBucketName"); +var getRegionalSuffix = /* @__PURE__ */ __name((hostname) => { + const parts = hostname.match(S3_HOSTNAME_PATTERN); + return [parts[4], hostname.replace(new RegExp(`^${parts[0]}`), "")]; +}, "getRegionalSuffix"); +var getSuffix = /* @__PURE__ */ __name((hostname) => S3_US_EAST_1_ALTNAME_PATTERN.test(hostname) ? ["us-east-1", AWS_PARTITION_SUFFIX] : getRegionalSuffix(hostname), "getSuffix"); +var getSuffixForArnEndpoint = /* @__PURE__ */ __name((hostname) => S3_US_EAST_1_ALTNAME_PATTERN.test(hostname) ? [hostname.replace(`.${AWS_PARTITION_SUFFIX}`, ""), AWS_PARTITION_SUFFIX] : getRegionalSuffix(hostname), "getSuffixForArnEndpoint"); +var validateArnEndpointOptions = /* @__PURE__ */ __name((options) => { + if (options.pathStyleEndpoint) { + throw new Error("Path-style S3 endpoint is not supported when bucket is an ARN"); + } + if (options.accelerateEndpoint) { + throw new Error("Accelerate endpoint is not supported when bucket is an ARN"); + } + if (!options.tlsCompatible) { + throw new Error("HTTPS is required when bucket is an ARN"); + } +}, "validateArnEndpointOptions"); +var validateService = /* @__PURE__ */ __name((service) => { + if (service !== "s3" && service !== "s3-outposts" && service !== "s3-object-lambda") { + throw new Error("Expect 's3' or 's3-outposts' or 's3-object-lambda' in ARN service component"); + } +}, "validateService"); +var validateS3Service = /* @__PURE__ */ __name((service) => { + if (service !== "s3") { + throw new Error("Expect 's3' in Accesspoint ARN service component"); + } +}, "validateS3Service"); +var validateOutpostService = /* @__PURE__ */ __name((service) => { + if (service !== "s3-outposts") { + throw new Error("Expect 's3-posts' in Outpost ARN service component"); + } +}, "validateOutpostService"); +var validatePartition = /* @__PURE__ */ __name((partition, options) => { + if (partition !== options.clientPartition) { + throw new Error(`Partition in ARN is incompatible, got "${partition}" but expected "${options.clientPartition}"`); + } +}, "validatePartition"); +var validateRegion = /* @__PURE__ */ __name((region, options) => { + if (region === "") { + throw new Error("ARN region is empty"); + } + if (options.useFipsEndpoint) { + if (!options.allowFipsRegion) { + throw new Error("FIPS region is not supported"); + } else if (!isEqualRegions(region, options.clientRegion)) { + throw new Error(`Client FIPS region ${options.clientRegion} doesn't match region ${region} in ARN`); + } + } + if (!options.useArnRegion && !isEqualRegions(region, options.clientRegion || "") && !isEqualRegions(region, options.clientSigningRegion || "")) { + throw new Error(`Region in ARN is incompatible, got ${region} but expected ${options.clientRegion}`); + } +}, "validateRegion"); +var validateRegionalClient = /* @__PURE__ */ __name((region) => { + if (["s3-external-1", "aws-global"].includes(region)) { + throw new Error(`Client region ${region} is not regional`); + } +}, "validateRegionalClient"); +var isEqualRegions = /* @__PURE__ */ __name((regionA, regionB) => regionA === regionB, "isEqualRegions"); +var validateAccountId = /* @__PURE__ */ __name((accountId) => { + if (!/[0-9]{12}/.exec(accountId)) { + throw new Error("Access point ARN accountID does not match regex '[0-9]{12}'"); + } +}, "validateAccountId"); +var validateDNSHostLabel = /* @__PURE__ */ __name((label, options = { tlsCompatible: true }) => { + if (label.length >= 64 || !/^[a-z0-9][a-z0-9.-]*[a-z0-9]$/.test(label) || /(\d+\.){3}\d+/.test(label) || /[.-]{2}/.test(label) || options?.tlsCompatible && DOT_PATTERN.test(label)) { + throw new Error(`Invalid DNS label ${label}`); + } +}, "validateDNSHostLabel"); +var validateCustomEndpoint = /* @__PURE__ */ __name((options) => { + if (options.isCustomEndpoint) { + if (options.dualstackEndpoint) throw new Error("Dualstack endpoint is not supported with custom endpoint"); + if (options.accelerateEndpoint) throw new Error("Accelerate endpoint is not supported with custom endpoint"); + } +}, "validateCustomEndpoint"); +var getArnResources = /* @__PURE__ */ __name((resource) => { + const delimiter = resource.includes(":") ? ":" : "/"; + const [resourceType, ...rest] = resource.split(delimiter); + if (resourceType === "accesspoint") { + if (rest.length !== 1 || rest[0] === "") { + throw new Error(`Access Point ARN should have one resource accesspoint${delimiter}{accesspointname}`); + } + return { accesspointName: rest[0] }; + } else if (resourceType === "outpost") { + if (!rest[0] || rest[1] !== "accesspoint" || !rest[2] || rest.length !== 3) { + throw new Error( + `Outpost ARN should have resource outpost${delimiter}{outpostId}${delimiter}accesspoint${delimiter}{accesspointName}` + ); + } + const [outpostId, _, accesspointName] = rest; + return { outpostId, accesspointName }; + } else { + throw new Error(`ARN resource should begin with 'accesspoint${delimiter}' or 'outpost${delimiter}'`); + } +}, "getArnResources"); +var validateNoDualstack = /* @__PURE__ */ __name((dualstackEndpoint) => { + if (dualstackEndpoint) + throw new Error("Dualstack endpoint is not supported with Outpost or Multi-region Access Point ARN."); +}, "validateNoDualstack"); +var validateNoFIPS = /* @__PURE__ */ __name((useFipsEndpoint) => { + if (useFipsEndpoint) throw new Error(`FIPS region is not supported with Outpost.`); +}, "validateNoFIPS"); +var validateMrapAlias = /* @__PURE__ */ __name((name) => { + try { + name.split(".").forEach((label) => { + validateDNSHostLabel(label); + }); + } catch (e) { + throw new Error(`"${name}" is not a DNS compatible name.`); + } +}, "validateMrapAlias"); + +// src/bucketHostname.ts +var bucketHostname = /* @__PURE__ */ __name((options) => { + validateCustomEndpoint(options); + return isBucketNameOptions(options) ? ( + // Construct endpoint when bucketName is a string referring to a bucket name + getEndpointFromBucketName(options) + ) : ( + // Construct endpoint when bucketName is an ARN referring to an S3 resource like Access Point + getEndpointFromArn(options) + ); +}, "bucketHostname"); +var getEndpointFromBucketName = /* @__PURE__ */ __name(({ + accelerateEndpoint = false, + clientRegion: region, + baseHostname, + bucketName, + dualstackEndpoint = false, + fipsEndpoint = false, + pathStyleEndpoint = false, + tlsCompatible = true, + isCustomEndpoint = false +}) => { + const [clientRegion, hostnameSuffix] = isCustomEndpoint ? [region, baseHostname] : getSuffix(baseHostname); + if (pathStyleEndpoint || !isDnsCompatibleBucketName(bucketName) || tlsCompatible && DOT_PATTERN.test(bucketName)) { + return { + bucketEndpoint: false, + hostname: dualstackEndpoint ? `s3.dualstack.${clientRegion}.${hostnameSuffix}` : baseHostname + }; + } + if (accelerateEndpoint) { + baseHostname = `s3-accelerate${dualstackEndpoint ? ".dualstack" : ""}.${hostnameSuffix}`; + } else if (dualstackEndpoint) { + baseHostname = `s3.dualstack.${clientRegion}.${hostnameSuffix}`; + } + return { + bucketEndpoint: true, + hostname: `${bucketName}.${baseHostname}` + }; +}, "getEndpointFromBucketName"); +var getEndpointFromArn = /* @__PURE__ */ __name((options) => { + const { isCustomEndpoint, baseHostname, clientRegion } = options; + const hostnameSuffix = isCustomEndpoint ? baseHostname : getSuffixForArnEndpoint(baseHostname)[1]; + const { + pathStyleEndpoint, + accelerateEndpoint = false, + fipsEndpoint = false, + tlsCompatible = true, + bucketName, + clientPartition = "aws" + } = options; + validateArnEndpointOptions({ pathStyleEndpoint, accelerateEndpoint, tlsCompatible }); + const { service, partition, accountId, region, resource } = bucketName; + validateService(service); + validatePartition(partition, { clientPartition }); + validateAccountId(accountId); + const { accesspointName, outpostId } = getArnResources(resource); + if (service === "s3-object-lambda") { + return getEndpointFromObjectLambdaArn({ ...options, tlsCompatible, bucketName, accesspointName, hostnameSuffix }); + } + if (region === "") { + return getEndpointFromMRAPArn({ ...options, clientRegion, mrapAlias: accesspointName, hostnameSuffix }); + } + if (outpostId) { + return getEndpointFromOutpostArn({ ...options, clientRegion, outpostId, accesspointName, hostnameSuffix }); + } + return getEndpointFromAccessPointArn({ ...options, clientRegion, accesspointName, hostnameSuffix }); +}, "getEndpointFromArn"); +var getEndpointFromObjectLambdaArn = /* @__PURE__ */ __name(({ + dualstackEndpoint = false, + fipsEndpoint = false, + tlsCompatible = true, + useArnRegion, + clientRegion, + clientSigningRegion = clientRegion, + accesspointName, + bucketName, + hostnameSuffix +}) => { + const { accountId, region, service } = bucketName; + validateRegionalClient(clientRegion); + validateRegion(region, { + useArnRegion, + clientRegion, + clientSigningRegion, + allowFipsRegion: true, + useFipsEndpoint: fipsEndpoint + }); + validateNoDualstack(dualstackEndpoint); + const DNSHostLabel = `${accesspointName}-${accountId}`; + validateDNSHostLabel(DNSHostLabel, { tlsCompatible }); + const endpointRegion = useArnRegion ? region : clientRegion; + const signingRegion = useArnRegion ? region : clientSigningRegion; + return { + bucketEndpoint: true, + hostname: `${DNSHostLabel}.${service}${fipsEndpoint ? "-fips" : ""}.${endpointRegion}.${hostnameSuffix}`, + signingRegion, + signingService: service + }; +}, "getEndpointFromObjectLambdaArn"); +var getEndpointFromMRAPArn = /* @__PURE__ */ __name(({ + disableMultiregionAccessPoints, + dualstackEndpoint = false, + isCustomEndpoint, + mrapAlias, + hostnameSuffix +}) => { + if (disableMultiregionAccessPoints === true) { + throw new Error("SDK is attempting to use a MRAP ARN. Please enable to feature."); + } + validateMrapAlias(mrapAlias); + validateNoDualstack(dualstackEndpoint); + return { + bucketEndpoint: true, + hostname: `${mrapAlias}${isCustomEndpoint ? "" : `.accesspoint.s3-global`}.${hostnameSuffix}`, + signingRegion: "*" + }; +}, "getEndpointFromMRAPArn"); +var getEndpointFromOutpostArn = /* @__PURE__ */ __name(({ + useArnRegion, + clientRegion, + clientSigningRegion = clientRegion, + bucketName, + outpostId, + dualstackEndpoint = false, + fipsEndpoint = false, + tlsCompatible = true, + accesspointName, + isCustomEndpoint, + hostnameSuffix +}) => { + validateRegionalClient(clientRegion); + validateRegion(bucketName.region, { useArnRegion, clientRegion, clientSigningRegion, useFipsEndpoint: fipsEndpoint }); + const DNSHostLabel = `${accesspointName}-${bucketName.accountId}`; + validateDNSHostLabel(DNSHostLabel, { tlsCompatible }); + const endpointRegion = useArnRegion ? bucketName.region : clientRegion; + const signingRegion = useArnRegion ? bucketName.region : clientSigningRegion; + validateOutpostService(bucketName.service); + validateDNSHostLabel(outpostId, { tlsCompatible }); + validateNoDualstack(dualstackEndpoint); + validateNoFIPS(fipsEndpoint); + const hostnamePrefix = `${DNSHostLabel}.${outpostId}`; + return { + bucketEndpoint: true, + hostname: `${hostnamePrefix}${isCustomEndpoint ? "" : `.s3-outposts.${endpointRegion}`}.${hostnameSuffix}`, + signingRegion, + signingService: "s3-outposts" + }; +}, "getEndpointFromOutpostArn"); +var getEndpointFromAccessPointArn = /* @__PURE__ */ __name(({ + useArnRegion, + clientRegion, + clientSigningRegion = clientRegion, + bucketName, + dualstackEndpoint = false, + fipsEndpoint = false, + tlsCompatible = true, + accesspointName, + isCustomEndpoint, + hostnameSuffix +}) => { + validateRegionalClient(clientRegion); + validateRegion(bucketName.region, { + useArnRegion, + clientRegion, + clientSigningRegion, + allowFipsRegion: true, + useFipsEndpoint: fipsEndpoint + }); + const hostnamePrefix = `${accesspointName}-${bucketName.accountId}`; + validateDNSHostLabel(hostnamePrefix, { tlsCompatible }); + const endpointRegion = useArnRegion ? bucketName.region : clientRegion; + const signingRegion = useArnRegion ? bucketName.region : clientSigningRegion; + validateS3Service(bucketName.service); + return { + bucketEndpoint: true, + hostname: `${hostnamePrefix}${isCustomEndpoint ? "" : `.s3-accesspoint${fipsEndpoint ? "-fips" : ""}${dualstackEndpoint ? ".dualstack" : ""}.${endpointRegion}`}.${hostnameSuffix}`, + signingRegion + }; +}, "getEndpointFromAccessPointArn"); + +// src/bucketEndpointMiddleware.ts +var bucketEndpointMiddleware = /* @__PURE__ */ __name((options) => (next, context) => async (args) => { + const { Bucket: bucketName } = args.input; + let replaceBucketInPath = options.bucketEndpoint; + const request = args.request; + if (import_protocol_http.HttpRequest.isInstance(request)) { + if (options.bucketEndpoint) { + request.hostname = bucketName; + } else if ((0, import_util_arn_parser.validate)(bucketName)) { + const bucketArn = (0, import_util_arn_parser.parse)(bucketName); + const clientRegion = await options.region(); + const useDualstackEndpoint = await options.useDualstackEndpoint(); + const useFipsEndpoint = await options.useFipsEndpoint(); + const { partition, signingRegion = clientRegion } = await options.regionInfoProvider(clientRegion, { useDualstackEndpoint, useFipsEndpoint }) || {}; + const useArnRegion = await options.useArnRegion(); + const { + hostname, + bucketEndpoint, + signingRegion: modifiedSigningRegion, + signingService + } = bucketHostname({ + bucketName: bucketArn, + baseHostname: request.hostname, + accelerateEndpoint: options.useAccelerateEndpoint, + dualstackEndpoint: useDualstackEndpoint, + fipsEndpoint: useFipsEndpoint, + pathStyleEndpoint: options.forcePathStyle, + tlsCompatible: request.protocol === "https:", + useArnRegion, + clientPartition: partition, + clientSigningRegion: signingRegion, + clientRegion, + isCustomEndpoint: options.isCustomEndpoint, + disableMultiregionAccessPoints: await options.disableMultiregionAccessPoints() + }); + if (modifiedSigningRegion && modifiedSigningRegion !== signingRegion) { + context["signing_region"] = modifiedSigningRegion; + } + if (signingService && signingService !== "s3") { + context["signing_service"] = signingService; + } + request.hostname = hostname; + replaceBucketInPath = bucketEndpoint; + } else { + const clientRegion = await options.region(); + const dualstackEndpoint = await options.useDualstackEndpoint(); + const fipsEndpoint = await options.useFipsEndpoint(); + const { hostname, bucketEndpoint } = bucketHostname({ + bucketName, + clientRegion, + baseHostname: request.hostname, + accelerateEndpoint: options.useAccelerateEndpoint, + dualstackEndpoint, + fipsEndpoint, + pathStyleEndpoint: options.forcePathStyle, + tlsCompatible: request.protocol === "https:", + isCustomEndpoint: options.isCustomEndpoint + }); + request.hostname = hostname; + replaceBucketInPath = bucketEndpoint; + } + if (replaceBucketInPath) { + request.path = request.path.replace(/^(\/)?[^\/]+/, ""); + if (request.path === "") { + request.path = "/"; + } + } + } + return next({ ...args, request }); +}, "bucketEndpointMiddleware"); +var bucketEndpointMiddlewareOptions = { + tags: ["BUCKET_ENDPOINT"], + name: "bucketEndpointMiddleware", + relation: "before", + toMiddleware: "hostHeaderMiddleware", + override: true +}; +var getBucketEndpointPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.addRelativeTo(bucketEndpointMiddleware(options), bucketEndpointMiddlewareOptions); + }, "applyToStack") +}), "getBucketEndpointPlugin"); + +// src/configurations.ts +function resolveBucketEndpointConfig(input) { + const { + bucketEndpoint = false, + forcePathStyle = false, + useAccelerateEndpoint = false, + useArnRegion = false, + disableMultiregionAccessPoints = false + } = input; + return Object.assign(input, { + bucketEndpoint, + forcePathStyle, + useAccelerateEndpoint, + useArnRegion: typeof useArnRegion === "function" ? useArnRegion : () => Promise.resolve(useArnRegion), + disableMultiregionAccessPoints: typeof disableMultiregionAccessPoints === "function" ? disableMultiregionAccessPoints : () => Promise.resolve(disableMultiregionAccessPoints) + }); +} +__name(resolveBucketEndpointConfig, "resolveBucketEndpointConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getArnResources, + getSuffixForArnEndpoint, + validateOutpostService, + validatePartition, + validateAccountId, + validateRegion, + validateDNSHostLabel, + validateNoDualstack, + validateNoFIPS, + NODE_DISABLE_MULTIREGION_ACCESS_POINT_ENV_NAME, + NODE_DISABLE_MULTIREGION_ACCESS_POINT_INI_NAME, + NODE_DISABLE_MULTIREGION_ACCESS_POINT_CONFIG_OPTIONS, + NODE_USE_ARN_REGION_ENV_NAME, + NODE_USE_ARN_REGION_INI_NAME, + NODE_USE_ARN_REGION_CONFIG_OPTIONS, + bucketEndpointMiddleware, + bucketEndpointMiddlewareOptions, + getBucketEndpointPlugin, + bucketHostname, + resolveBucketEndpointConfig +}); + diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/NodeDisableMultiregionAccessPointConfigOptions.js b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/NodeDisableMultiregionAccessPointConfigOptions.js new file mode 100644 index 00000000..ebcf87f7 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/NodeDisableMultiregionAccessPointConfigOptions.js @@ -0,0 +1,8 @@ +import { booleanSelector, SelectorType } from "@smithy/util-config-provider"; +export const NODE_DISABLE_MULTIREGION_ACCESS_POINT_ENV_NAME = "AWS_S3_DISABLE_MULTIREGION_ACCESS_POINTS"; +export const NODE_DISABLE_MULTIREGION_ACCESS_POINT_INI_NAME = "s3_disable_multiregion_access_points"; +export const NODE_DISABLE_MULTIREGION_ACCESS_POINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => booleanSelector(env, NODE_DISABLE_MULTIREGION_ACCESS_POINT_ENV_NAME, SelectorType.ENV), + configFileSelector: (profile) => booleanSelector(profile, NODE_DISABLE_MULTIREGION_ACCESS_POINT_INI_NAME, SelectorType.CONFIG), + default: false, +}; diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/NodeUseArnRegionConfigOptions.js b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/NodeUseArnRegionConfigOptions.js new file mode 100644 index 00000000..2b71fc8b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/NodeUseArnRegionConfigOptions.js @@ -0,0 +1,8 @@ +import { booleanSelector, SelectorType } from "@smithy/util-config-provider"; +export const NODE_USE_ARN_REGION_ENV_NAME = "AWS_S3_USE_ARN_REGION"; +export const NODE_USE_ARN_REGION_INI_NAME = "s3_use_arn_region"; +export const NODE_USE_ARN_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => booleanSelector(env, NODE_USE_ARN_REGION_ENV_NAME, SelectorType.ENV), + configFileSelector: (profile) => booleanSelector(profile, NODE_USE_ARN_REGION_INI_NAME, SelectorType.CONFIG), + default: false, +}; diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/bucketEndpointMiddleware.js b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/bucketEndpointMiddleware.js new file mode 100644 index 00000000..6e0f8f8a --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/bucketEndpointMiddleware.js @@ -0,0 +1,81 @@ +import { parse as parseArn, validate as validateArn } from "@aws-sdk/util-arn-parser"; +import { HttpRequest } from "@smithy/protocol-http"; +import { bucketHostname } from "./bucketHostname"; +export const bucketEndpointMiddleware = (options) => (next, context) => async (args) => { + const { Bucket: bucketName } = args.input; + let replaceBucketInPath = options.bucketEndpoint; + const request = args.request; + if (HttpRequest.isInstance(request)) { + if (options.bucketEndpoint) { + request.hostname = bucketName; + } + else if (validateArn(bucketName)) { + const bucketArn = parseArn(bucketName); + const clientRegion = await options.region(); + const useDualstackEndpoint = await options.useDualstackEndpoint(); + const useFipsEndpoint = await options.useFipsEndpoint(); + const { partition, signingRegion = clientRegion } = (await options.regionInfoProvider(clientRegion, { useDualstackEndpoint, useFipsEndpoint })) || {}; + const useArnRegion = await options.useArnRegion(); + const { hostname, bucketEndpoint, signingRegion: modifiedSigningRegion, signingService, } = bucketHostname({ + bucketName: bucketArn, + baseHostname: request.hostname, + accelerateEndpoint: options.useAccelerateEndpoint, + dualstackEndpoint: useDualstackEndpoint, + fipsEndpoint: useFipsEndpoint, + pathStyleEndpoint: options.forcePathStyle, + tlsCompatible: request.protocol === "https:", + useArnRegion, + clientPartition: partition, + clientSigningRegion: signingRegion, + clientRegion: clientRegion, + isCustomEndpoint: options.isCustomEndpoint, + disableMultiregionAccessPoints: await options.disableMultiregionAccessPoints(), + }); + if (modifiedSigningRegion && modifiedSigningRegion !== signingRegion) { + context["signing_region"] = modifiedSigningRegion; + } + if (signingService && signingService !== "s3") { + context["signing_service"] = signingService; + } + request.hostname = hostname; + replaceBucketInPath = bucketEndpoint; + } + else { + const clientRegion = await options.region(); + const dualstackEndpoint = await options.useDualstackEndpoint(); + const fipsEndpoint = await options.useFipsEndpoint(); + const { hostname, bucketEndpoint } = bucketHostname({ + bucketName, + clientRegion, + baseHostname: request.hostname, + accelerateEndpoint: options.useAccelerateEndpoint, + dualstackEndpoint, + fipsEndpoint, + pathStyleEndpoint: options.forcePathStyle, + tlsCompatible: request.protocol === "https:", + isCustomEndpoint: options.isCustomEndpoint, + }); + request.hostname = hostname; + replaceBucketInPath = bucketEndpoint; + } + if (replaceBucketInPath) { + request.path = request.path.replace(/^(\/)?[^\/]+/, ""); + if (request.path === "") { + request.path = "/"; + } + } + } + return next({ ...args, request }); +}; +export const bucketEndpointMiddlewareOptions = { + tags: ["BUCKET_ENDPOINT"], + name: "bucketEndpointMiddleware", + relation: "before", + toMiddleware: "hostHeaderMiddleware", + override: true, +}; +export const getBucketEndpointPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(bucketEndpointMiddleware(options), bucketEndpointMiddlewareOptions); + }, +}); diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/bucketHostname.js b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/bucketHostname.js new file mode 100644 index 00000000..57af576e --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/bucketHostname.js @@ -0,0 +1,124 @@ +import { DOT_PATTERN, getArnResources, getSuffix, getSuffixForArnEndpoint, isBucketNameOptions, isDnsCompatibleBucketName, validateAccountId, validateArnEndpointOptions, validateCustomEndpoint, validateDNSHostLabel, validateMrapAlias, validateNoDualstack, validateNoFIPS, validateOutpostService, validatePartition, validateRegion, validateRegionalClient, validateS3Service, validateService, } from "./bucketHostnameUtils"; +export const bucketHostname = (options) => { + validateCustomEndpoint(options); + return isBucketNameOptions(options) + ? + getEndpointFromBucketName(options) + : + getEndpointFromArn(options); +}; +const getEndpointFromBucketName = ({ accelerateEndpoint = false, clientRegion: region, baseHostname, bucketName, dualstackEndpoint = false, fipsEndpoint = false, pathStyleEndpoint = false, tlsCompatible = true, isCustomEndpoint = false, }) => { + const [clientRegion, hostnameSuffix] = isCustomEndpoint ? [region, baseHostname] : getSuffix(baseHostname); + if (pathStyleEndpoint || !isDnsCompatibleBucketName(bucketName) || (tlsCompatible && DOT_PATTERN.test(bucketName))) { + return { + bucketEndpoint: false, + hostname: dualstackEndpoint ? `s3.dualstack.${clientRegion}.${hostnameSuffix}` : baseHostname, + }; + } + if (accelerateEndpoint) { + baseHostname = `s3-accelerate${dualstackEndpoint ? ".dualstack" : ""}.${hostnameSuffix}`; + } + else if (dualstackEndpoint) { + baseHostname = `s3.dualstack.${clientRegion}.${hostnameSuffix}`; + } + return { + bucketEndpoint: true, + hostname: `${bucketName}.${baseHostname}`, + }; +}; +const getEndpointFromArn = (options) => { + const { isCustomEndpoint, baseHostname, clientRegion } = options; + const hostnameSuffix = isCustomEndpoint ? baseHostname : getSuffixForArnEndpoint(baseHostname)[1]; + const { pathStyleEndpoint, accelerateEndpoint = false, fipsEndpoint = false, tlsCompatible = true, bucketName, clientPartition = "aws", } = options; + validateArnEndpointOptions({ pathStyleEndpoint, accelerateEndpoint, tlsCompatible }); + const { service, partition, accountId, region, resource } = bucketName; + validateService(service); + validatePartition(partition, { clientPartition }); + validateAccountId(accountId); + const { accesspointName, outpostId } = getArnResources(resource); + if (service === "s3-object-lambda") { + return getEndpointFromObjectLambdaArn({ ...options, tlsCompatible, bucketName, accesspointName, hostnameSuffix }); + } + if (region === "") { + return getEndpointFromMRAPArn({ ...options, clientRegion, mrapAlias: accesspointName, hostnameSuffix }); + } + if (outpostId) { + return getEndpointFromOutpostArn({ ...options, clientRegion, outpostId, accesspointName, hostnameSuffix }); + } + return getEndpointFromAccessPointArn({ ...options, clientRegion, accesspointName, hostnameSuffix }); +}; +const getEndpointFromObjectLambdaArn = ({ dualstackEndpoint = false, fipsEndpoint = false, tlsCompatible = true, useArnRegion, clientRegion, clientSigningRegion = clientRegion, accesspointName, bucketName, hostnameSuffix, }) => { + const { accountId, region, service } = bucketName; + validateRegionalClient(clientRegion); + validateRegion(region, { + useArnRegion, + clientRegion, + clientSigningRegion, + allowFipsRegion: true, + useFipsEndpoint: fipsEndpoint, + }); + validateNoDualstack(dualstackEndpoint); + const DNSHostLabel = `${accesspointName}-${accountId}`; + validateDNSHostLabel(DNSHostLabel, { tlsCompatible }); + const endpointRegion = useArnRegion ? region : clientRegion; + const signingRegion = useArnRegion ? region : clientSigningRegion; + return { + bucketEndpoint: true, + hostname: `${DNSHostLabel}.${service}${fipsEndpoint ? "-fips" : ""}.${endpointRegion}.${hostnameSuffix}`, + signingRegion, + signingService: service, + }; +}; +const getEndpointFromMRAPArn = ({ disableMultiregionAccessPoints, dualstackEndpoint = false, isCustomEndpoint, mrapAlias, hostnameSuffix, }) => { + if (disableMultiregionAccessPoints === true) { + throw new Error("SDK is attempting to use a MRAP ARN. Please enable to feature."); + } + validateMrapAlias(mrapAlias); + validateNoDualstack(dualstackEndpoint); + return { + bucketEndpoint: true, + hostname: `${mrapAlias}${isCustomEndpoint ? "" : `.accesspoint.s3-global`}.${hostnameSuffix}`, + signingRegion: "*", + }; +}; +const getEndpointFromOutpostArn = ({ useArnRegion, clientRegion, clientSigningRegion = clientRegion, bucketName, outpostId, dualstackEndpoint = false, fipsEndpoint = false, tlsCompatible = true, accesspointName, isCustomEndpoint, hostnameSuffix, }) => { + validateRegionalClient(clientRegion); + validateRegion(bucketName.region, { useArnRegion, clientRegion, clientSigningRegion, useFipsEndpoint: fipsEndpoint }); + const DNSHostLabel = `${accesspointName}-${bucketName.accountId}`; + validateDNSHostLabel(DNSHostLabel, { tlsCompatible }); + const endpointRegion = useArnRegion ? bucketName.region : clientRegion; + const signingRegion = useArnRegion ? bucketName.region : clientSigningRegion; + validateOutpostService(bucketName.service); + validateDNSHostLabel(outpostId, { tlsCompatible }); + validateNoDualstack(dualstackEndpoint); + validateNoFIPS(fipsEndpoint); + const hostnamePrefix = `${DNSHostLabel}.${outpostId}`; + return { + bucketEndpoint: true, + hostname: `${hostnamePrefix}${isCustomEndpoint ? "" : `.s3-outposts.${endpointRegion}`}.${hostnameSuffix}`, + signingRegion, + signingService: "s3-outposts", + }; +}; +const getEndpointFromAccessPointArn = ({ useArnRegion, clientRegion, clientSigningRegion = clientRegion, bucketName, dualstackEndpoint = false, fipsEndpoint = false, tlsCompatible = true, accesspointName, isCustomEndpoint, hostnameSuffix, }) => { + validateRegionalClient(clientRegion); + validateRegion(bucketName.region, { + useArnRegion, + clientRegion, + clientSigningRegion, + allowFipsRegion: true, + useFipsEndpoint: fipsEndpoint, + }); + const hostnamePrefix = `${accesspointName}-${bucketName.accountId}`; + validateDNSHostLabel(hostnamePrefix, { tlsCompatible }); + const endpointRegion = useArnRegion ? bucketName.region : clientRegion; + const signingRegion = useArnRegion ? bucketName.region : clientSigningRegion; + validateS3Service(bucketName.service); + return { + bucketEndpoint: true, + hostname: `${hostnamePrefix}${isCustomEndpoint + ? "" + : `.s3-accesspoint${fipsEndpoint ? "-fips" : ""}${dualstackEndpoint ? ".dualstack" : ""}.${endpointRegion}`}.${hostnameSuffix}`, + signingRegion, + }; +}; diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/bucketHostnameUtils.js b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/bucketHostnameUtils.js new file mode 100644 index 00000000..69c41dcb --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/bucketHostnameUtils.js @@ -0,0 +1,132 @@ +const DOMAIN_PATTERN = /^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$/; +const IP_ADDRESS_PATTERN = /(\d+\.){3}\d+/; +const DOTS_PATTERN = /\.\./; +export const DOT_PATTERN = /\./; +export const S3_HOSTNAME_PATTERN = /^(.+\.)?s3(-fips)?(\.dualstack)?[.-]([a-z0-9-]+)\./; +const S3_US_EAST_1_ALTNAME_PATTERN = /^s3(-external-1)?\.amazonaws\.com$/; +const AWS_PARTITION_SUFFIX = "amazonaws.com"; +export const isBucketNameOptions = (options) => typeof options.bucketName === "string"; +export const isDnsCompatibleBucketName = (bucketName) => DOMAIN_PATTERN.test(bucketName) && !IP_ADDRESS_PATTERN.test(bucketName) && !DOTS_PATTERN.test(bucketName); +const getRegionalSuffix = (hostname) => { + const parts = hostname.match(S3_HOSTNAME_PATTERN); + return [parts[4], hostname.replace(new RegExp(`^${parts[0]}`), "")]; +}; +export const getSuffix = (hostname) => S3_US_EAST_1_ALTNAME_PATTERN.test(hostname) ? ["us-east-1", AWS_PARTITION_SUFFIX] : getRegionalSuffix(hostname); +export const getSuffixForArnEndpoint = (hostname) => S3_US_EAST_1_ALTNAME_PATTERN.test(hostname) + ? [hostname.replace(`.${AWS_PARTITION_SUFFIX}`, ""), AWS_PARTITION_SUFFIX] + : getRegionalSuffix(hostname); +export const validateArnEndpointOptions = (options) => { + if (options.pathStyleEndpoint) { + throw new Error("Path-style S3 endpoint is not supported when bucket is an ARN"); + } + if (options.accelerateEndpoint) { + throw new Error("Accelerate endpoint is not supported when bucket is an ARN"); + } + if (!options.tlsCompatible) { + throw new Error("HTTPS is required when bucket is an ARN"); + } +}; +export const validateService = (service) => { + if (service !== "s3" && service !== "s3-outposts" && service !== "s3-object-lambda") { + throw new Error("Expect 's3' or 's3-outposts' or 's3-object-lambda' in ARN service component"); + } +}; +export const validateS3Service = (service) => { + if (service !== "s3") { + throw new Error("Expect 's3' in Accesspoint ARN service component"); + } +}; +export const validateOutpostService = (service) => { + if (service !== "s3-outposts") { + throw new Error("Expect 's3-posts' in Outpost ARN service component"); + } +}; +export const validatePartition = (partition, options) => { + if (partition !== options.clientPartition) { + throw new Error(`Partition in ARN is incompatible, got "${partition}" but expected "${options.clientPartition}"`); + } +}; +export const validateRegion = (region, options) => { + if (region === "") { + throw new Error("ARN region is empty"); + } + if (options.useFipsEndpoint) { + if (!options.allowFipsRegion) { + throw new Error("FIPS region is not supported"); + } + else if (!isEqualRegions(region, options.clientRegion)) { + throw new Error(`Client FIPS region ${options.clientRegion} doesn't match region ${region} in ARN`); + } + } + if (!options.useArnRegion && + !isEqualRegions(region, options.clientRegion || "") && + !isEqualRegions(region, options.clientSigningRegion || "")) { + throw new Error(`Region in ARN is incompatible, got ${region} but expected ${options.clientRegion}`); + } +}; +export const validateRegionalClient = (region) => { + if (["s3-external-1", "aws-global"].includes(region)) { + throw new Error(`Client region ${region} is not regional`); + } +}; +const isEqualRegions = (regionA, regionB) => regionA === regionB; +export const validateAccountId = (accountId) => { + if (!/[0-9]{12}/.exec(accountId)) { + throw new Error("Access point ARN accountID does not match regex '[0-9]{12}'"); + } +}; +export const validateDNSHostLabel = (label, options = { tlsCompatible: true }) => { + if (label.length >= 64 || + !/^[a-z0-9][a-z0-9.-]*[a-z0-9]$/.test(label) || + /(\d+\.){3}\d+/.test(label) || + /[.-]{2}/.test(label) || + (options?.tlsCompatible && DOT_PATTERN.test(label))) { + throw new Error(`Invalid DNS label ${label}`); + } +}; +export const validateCustomEndpoint = (options) => { + if (options.isCustomEndpoint) { + if (options.dualstackEndpoint) + throw new Error("Dualstack endpoint is not supported with custom endpoint"); + if (options.accelerateEndpoint) + throw new Error("Accelerate endpoint is not supported with custom endpoint"); + } +}; +export const getArnResources = (resource) => { + const delimiter = resource.includes(":") ? ":" : "/"; + const [resourceType, ...rest] = resource.split(delimiter); + if (resourceType === "accesspoint") { + if (rest.length !== 1 || rest[0] === "") { + throw new Error(`Access Point ARN should have one resource accesspoint${delimiter}{accesspointname}`); + } + return { accesspointName: rest[0] }; + } + else if (resourceType === "outpost") { + if (!rest[0] || rest[1] !== "accesspoint" || !rest[2] || rest.length !== 3) { + throw new Error(`Outpost ARN should have resource outpost${delimiter}{outpostId}${delimiter}accesspoint${delimiter}{accesspointName}`); + } + const [outpostId, _, accesspointName] = rest; + return { outpostId, accesspointName }; + } + else { + throw new Error(`ARN resource should begin with 'accesspoint${delimiter}' or 'outpost${delimiter}'`); + } +}; +export const validateNoDualstack = (dualstackEndpoint) => { + if (dualstackEndpoint) + throw new Error("Dualstack endpoint is not supported with Outpost or Multi-region Access Point ARN."); +}; +export const validateNoFIPS = (useFipsEndpoint) => { + if (useFipsEndpoint) + throw new Error(`FIPS region is not supported with Outpost.`); +}; +export const validateMrapAlias = (name) => { + try { + name.split(".").forEach((label) => { + validateDNSHostLabel(label); + }); + } + catch (e) { + throw new Error(`"${name}" is not a DNS compatible name.`); + } +}; diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/configurations.js b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/configurations.js new file mode 100644 index 00000000..189c0323 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/configurations.js @@ -0,0 +1,12 @@ +export function resolveBucketEndpointConfig(input) { + const { bucketEndpoint = false, forcePathStyle = false, useAccelerateEndpoint = false, useArnRegion = false, disableMultiregionAccessPoints = false, } = input; + return Object.assign(input, { + bucketEndpoint, + forcePathStyle, + useAccelerateEndpoint, + useArnRegion: typeof useArnRegion === "function" ? useArnRegion : () => Promise.resolve(useArnRegion), + disableMultiregionAccessPoints: typeof disableMultiregionAccessPoints === "function" + ? disableMultiregionAccessPoints + : () => Promise.resolve(disableMultiregionAccessPoints), + }); +} diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/index.js b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/index.js new file mode 100644 index 00000000..c8583c2b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-es/index.js @@ -0,0 +1,6 @@ +export * from "./NodeDisableMultiregionAccessPointConfigOptions"; +export * from "./NodeUseArnRegionConfigOptions"; +export * from "./bucketEndpointMiddleware"; +export * from "./bucketHostname"; +export * from "./configurations"; +export { getArnResources, getSuffixForArnEndpoint, validateOutpostService, validatePartition, validateAccountId, validateRegion, validateDNSHostLabel, validateNoDualstack, validateNoFIPS, } from "./bucketHostnameUtils"; diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/NodeDisableMultiregionAccessPointConfigOptions.d.ts b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/NodeDisableMultiregionAccessPointConfigOptions.d.ts new file mode 100644 index 00000000..c6e0db55 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/NodeDisableMultiregionAccessPointConfigOptions.d.ts @@ -0,0 +1,4 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const NODE_DISABLE_MULTIREGION_ACCESS_POINT_ENV_NAME = "AWS_S3_DISABLE_MULTIREGION_ACCESS_POINTS"; +export declare const NODE_DISABLE_MULTIREGION_ACCESS_POINT_INI_NAME = "s3_disable_multiregion_access_points"; +export declare const NODE_DISABLE_MULTIREGION_ACCESS_POINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/NodeUseArnRegionConfigOptions.d.ts b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/NodeUseArnRegionConfigOptions.d.ts new file mode 100644 index 00000000..4c27b55b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/NodeUseArnRegionConfigOptions.d.ts @@ -0,0 +1,9 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const NODE_USE_ARN_REGION_ENV_NAME = "AWS_S3_USE_ARN_REGION"; +export declare const NODE_USE_ARN_REGION_INI_NAME = "s3_use_arn_region"; +/** + * Config to load useArnRegion from environment variables and shared INI files + * + * @api private + */ +export declare const NODE_USE_ARN_REGION_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/bucketEndpointMiddleware.d.ts b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/bucketEndpointMiddleware.d.ts new file mode 100644 index 00000000..71fc8e77 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/bucketEndpointMiddleware.d.ts @@ -0,0 +1,5 @@ +import { BuildMiddleware, Pluggable, RelativeMiddlewareOptions } from "@smithy/types"; +import { BucketEndpointResolvedConfig } from "./configurations"; +export declare const bucketEndpointMiddleware: (options: BucketEndpointResolvedConfig) => BuildMiddleware; +export declare const bucketEndpointMiddlewareOptions: RelativeMiddlewareOptions; +export declare const getBucketEndpointPlugin: (options: BucketEndpointResolvedConfig) => Pluggable; diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/bucketHostname.d.ts b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/bucketHostname.d.ts new file mode 100644 index 00000000..71ee2652 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/bucketHostname.d.ts @@ -0,0 +1,8 @@ +import { ArnHostnameParams, BucketHostnameParams } from "./bucketHostnameUtils"; +export interface BucketHostname { + hostname: string; + bucketEndpoint: boolean; + signingRegion?: string; + signingService?: string; +} +export declare const bucketHostname: (options: BucketHostnameParams | ArnHostnameParams) => BucketHostname; diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/bucketHostnameUtils.d.ts b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/bucketHostnameUtils.d.ts new file mode 100644 index 00000000..41db55e6 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/bucketHostnameUtils.d.ts @@ -0,0 +1,118 @@ +import { ARN } from "@aws-sdk/util-arn-parser"; +export declare const DOT_PATTERN: RegExp; +export declare const S3_HOSTNAME_PATTERN: RegExp; +export interface AccessPointArn extends ARN { + accessPointName: string; +} +export interface BucketHostnameParams { + isCustomEndpoint?: boolean; + baseHostname: string; + bucketName: string; + clientRegion: string; + accelerateEndpoint?: boolean; + dualstackEndpoint?: boolean; + fipsEndpoint?: boolean; + pathStyleEndpoint?: boolean; + tlsCompatible?: boolean; +} +export interface ArnHostnameParams extends Omit { + bucketName: ARN; + clientSigningRegion?: string; + clientPartition?: string; + useArnRegion?: boolean; + disableMultiregionAccessPoints?: boolean; +} +export declare const isBucketNameOptions: (options: BucketHostnameParams | ArnHostnameParams) => options is BucketHostnameParams; +/** + * Determines whether a given string is DNS compliant per the rules outlined by + * S3. Length, capitaization, and leading dot restrictions are enforced by the + * DOMAIN_PATTERN regular expression. + * @internal + * + * @see https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html + */ +export declare const isDnsCompatibleBucketName: (bucketName: string) => boolean; +export declare const getSuffix: (hostname: string) => [string, string]; +/** + * Infer region and hostname suffix from a complete hostname + * @internal + * @param hostname - Hostname + * @returns [Region, Hostname suffix] + */ +export declare const getSuffixForArnEndpoint: (hostname: string) => [string, string]; +export declare const validateArnEndpointOptions: (options: { + accelerateEndpoint?: boolean; + tlsCompatible?: boolean; + pathStyleEndpoint?: boolean; +}) => void; +export declare const validateService: (service: string) => void; +export declare const validateS3Service: (service: string) => void; +export declare const validateOutpostService: (service: string) => void; +/** + * Validate partition inferred from ARN is the same to `options.clientPartition`. + * @internal + */ +export declare const validatePartition: (partition: string, options: { + clientPartition: string; +}) => void; +/** + * validate region value inferred from ARN. If `options.useArnRegion` is set, it validates the region is not a FIPS + * region. If `options.useArnRegion` is unset, it validates the region is equal to `options.clientRegion` or + * `options.clientSigningRegion`. + * @internal + */ +export declare const validateRegion: (region: string, options: { + useArnRegion?: boolean; + allowFipsRegion?: boolean; + clientRegion: string; + clientSigningRegion: string; + useFipsEndpoint: boolean; +}) => void; +/** + * + * @param region + */ +export declare const validateRegionalClient: (region: string) => void; +/** + * Validate an account ID + * @internal + */ +export declare const validateAccountId: (accountId: string) => void; +/** + * Validate a host label according to https://tools.ietf.org/html/rfc3986#section-3.2.2 + * @internal + */ +export declare const validateDNSHostLabel: (label: string, options?: { + tlsCompatible?: boolean; +}) => void; +export declare const validateCustomEndpoint: (options: { + isCustomEndpoint?: boolean; + dualstackEndpoint?: boolean; + accelerateEndpoint?: boolean; +}) => void; +/** + * Validate and parse an Access Point ARN or Outposts ARN + * @internal + * + * @param resource - The resource section of an ARN + * @returns Access Point Name and optional Outpost ID. + */ +export declare const getArnResources: (resource: string) => { + accesspointName: string; + outpostId?: string; +}; +/** + * Throw if dual stack configuration is set to true. + * @internal + */ +export declare const validateNoDualstack: (dualstackEndpoint?: boolean) => void; +/** + * Validate fips endpoint is not set up. + * @internal + */ +export declare const validateNoFIPS: (useFipsEndpoint?: boolean) => void; +/** + * Validate the multi-region access point alias. + * @internal + */ +export declare const validateMrapAlias: (name: string) => void; diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/configurations.d.ts b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/configurations.d.ts new file mode 100644 index 00000000..bd262737 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/configurations.d.ts @@ -0,0 +1,83 @@ +import { Provider, RegionInfoProvider } from "@smithy/types"; +export interface BucketEndpointInputConfig { + /** + * Whether to use the bucket name as the endpoint for this request. The bucket + * name must be a domain name with a CNAME record alias to an appropriate virtual + * hosted-style S3 hostname, e.g. a bucket of `images.johnsmith.net` and a DNS + * record of: + * + * ``` + * images.johnsmith.net CNAME images.johnsmith.net.s3.amazonaws.com. + * ``` + * + * @see https://docs.aws.amazon.com/AmazonS3/latest/userguide/VirtualHosting.html#VirtualHostingCustomURLs + */ + bucketEndpoint?: boolean; + /** + * Whether to force path style URLs for S3 objects (e.g., https://s3.amazonaws.com// instead of https://.s3.amazonaws.com/ + */ + forcePathStyle?: boolean; + /** + * Whether to use the S3 Transfer Acceleration endpoint by default + */ + useAccelerateEndpoint?: boolean; + /** + * Whether to override the request region with the region inferred from requested resource's ARN. Defaults to false + */ + useArnRegion?: boolean | Provider; + /** + * Whether to prevent SDK from making cross-region request when supplied bucket is a multi-region access point ARN. + * Defaults to false + */ + disableMultiregionAccessPoints?: boolean | Provider; +} +interface PreviouslyResolved { + isCustomEndpoint?: boolean; + region: Provider; + regionInfoProvider: RegionInfoProvider; + useFipsEndpoint: Provider; + useDualstackEndpoint: Provider; +} +export interface BucketEndpointResolvedConfig { + /** + * Whether the endpoint is specified by caller. + * @internal + */ + isCustomEndpoint?: boolean; + /** + * Resolved value for input config {@link BucketEndpointInputConfig.bucketEndpoint} + */ + bucketEndpoint: boolean; + /** + * Resolved value for input config {@link BucketEndpointInputConfig.forcePathStyle} + */ + forcePathStyle: boolean; + /** + * Resolved value for input config {@link BucketEndpointInputConfig.useAccelerateEndpoint} + */ + useAccelerateEndpoint: boolean; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint: Provider; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint: Provider; + /** + * Resolved value for input config {@link BucketEndpointInputConfig.useArnRegion} + */ + useArnRegion: Provider; + /** + * Resolved value for input config {@link RegionInputConfig.region} + */ + region: Provider; + /** + * Fetch related hostname, signing name or signing region with given region. + * @internal + */ + regionInfoProvider: RegionInfoProvider; + disableMultiregionAccessPoints: Provider; +} +export declare function resolveBucketEndpointConfig(input: T & PreviouslyResolved & BucketEndpointInputConfig): T & BucketEndpointResolvedConfig; +export {}; diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/index.d.ts b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/index.d.ts new file mode 100644 index 00000000..c8583c2b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/index.d.ts @@ -0,0 +1,6 @@ +export * from "./NodeDisableMultiregionAccessPointConfigOptions"; +export * from "./NodeUseArnRegionConfigOptions"; +export * from "./bucketEndpointMiddleware"; +export * from "./bucketHostname"; +export * from "./configurations"; +export { getArnResources, getSuffixForArnEndpoint, validateOutpostService, validatePartition, validateAccountId, validateRegion, validateDNSHostLabel, validateNoDualstack, validateNoFIPS, } from "./bucketHostnameUtils"; diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/NodeDisableMultiregionAccessPointConfigOptions.d.ts b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/NodeDisableMultiregionAccessPointConfigOptions.d.ts new file mode 100644 index 00000000..2275039d --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/NodeDisableMultiregionAccessPointConfigOptions.d.ts @@ -0,0 +1,6 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const NODE_DISABLE_MULTIREGION_ACCESS_POINT_ENV_NAME = + "AWS_S3_DISABLE_MULTIREGION_ACCESS_POINTS"; +export declare const NODE_DISABLE_MULTIREGION_ACCESS_POINT_INI_NAME = + "s3_disable_multiregion_access_points"; +export declare const NODE_DISABLE_MULTIREGION_ACCESS_POINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/NodeUseArnRegionConfigOptions.d.ts b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/NodeUseArnRegionConfigOptions.d.ts new file mode 100644 index 00000000..c289cb95 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/NodeUseArnRegionConfigOptions.d.ts @@ -0,0 +1,4 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const NODE_USE_ARN_REGION_ENV_NAME = "AWS_S3_USE_ARN_REGION"; +export declare const NODE_USE_ARN_REGION_INI_NAME = "s3_use_arn_region"; +export declare const NODE_USE_ARN_REGION_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/bucketEndpointMiddleware.d.ts b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/bucketEndpointMiddleware.d.ts new file mode 100644 index 00000000..e54851b7 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/bucketEndpointMiddleware.d.ts @@ -0,0 +1,13 @@ +import { + BuildMiddleware, + Pluggable, + RelativeMiddlewareOptions, +} from "@smithy/types"; +import { BucketEndpointResolvedConfig } from "./configurations"; +export declare const bucketEndpointMiddleware: ( + options: BucketEndpointResolvedConfig +) => BuildMiddleware; +export declare const bucketEndpointMiddlewareOptions: RelativeMiddlewareOptions; +export declare const getBucketEndpointPlugin: ( + options: BucketEndpointResolvedConfig +) => Pluggable; diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/bucketHostname.d.ts b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/bucketHostname.d.ts new file mode 100644 index 00000000..1d2b948b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/bucketHostname.d.ts @@ -0,0 +1,10 @@ +import { ArnHostnameParams, BucketHostnameParams } from "./bucketHostnameUtils"; +export interface BucketHostname { + hostname: string; + bucketEndpoint: boolean; + signingRegion?: string; + signingService?: string; +} +export declare const bucketHostname: ( + options: BucketHostnameParams | ArnHostnameParams +) => BucketHostname; diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/bucketHostnameUtils.d.ts b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/bucketHostnameUtils.d.ts new file mode 100644 index 00000000..30091f7c --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/bucketHostnameUtils.d.ts @@ -0,0 +1,80 @@ +import { ARN } from "@aws-sdk/util-arn-parser"; +export declare const DOT_PATTERN: RegExp; +export declare const S3_HOSTNAME_PATTERN: RegExp; +export interface AccessPointArn extends ARN { + accessPointName: string; +} +export interface BucketHostnameParams { + isCustomEndpoint?: boolean; + baseHostname: string; + bucketName: string; + clientRegion: string; + accelerateEndpoint?: boolean; + dualstackEndpoint?: boolean; + fipsEndpoint?: boolean; + pathStyleEndpoint?: boolean; + tlsCompatible?: boolean; +} +export interface ArnHostnameParams + extends Pick< + BucketHostnameParams, + Exclude + > { + bucketName: ARN; + clientSigningRegion?: string; + clientPartition?: string; + useArnRegion?: boolean; + disableMultiregionAccessPoints?: boolean; +} +export declare const isBucketNameOptions: ( + options: BucketHostnameParams | ArnHostnameParams +) => options is BucketHostnameParams; +export declare const isDnsCompatibleBucketName: (bucketName: string) => boolean; +export declare const getSuffix: (hostname: string) => [string, string]; +export declare const getSuffixForArnEndpoint: ( + hostname: string +) => [string, string]; +export declare const validateArnEndpointOptions: (options: { + accelerateEndpoint?: boolean; + tlsCompatible?: boolean; + pathStyleEndpoint?: boolean; +}) => void; +export declare const validateService: (service: string) => void; +export declare const validateS3Service: (service: string) => void; +export declare const validateOutpostService: (service: string) => void; +export declare const validatePartition: ( + partition: string, + options: { + clientPartition: string; + } +) => void; +export declare const validateRegion: ( + region: string, + options: { + useArnRegion?: boolean; + allowFipsRegion?: boolean; + clientRegion: string; + clientSigningRegion: string; + useFipsEndpoint: boolean; + } +) => void; +export declare const validateRegionalClient: (region: string) => void; +export declare const validateAccountId: (accountId: string) => void; +export declare const validateDNSHostLabel: ( + label: string, + options?: { + tlsCompatible?: boolean; + } +) => void; +export declare const validateCustomEndpoint: (options: { + isCustomEndpoint?: boolean; + dualstackEndpoint?: boolean; + accelerateEndpoint?: boolean; +}) => void; +export declare const getArnResources: (resource: string) => { + accesspointName: string; + outpostId?: string; +}; +export declare const validateNoDualstack: (dualstackEndpoint?: boolean) => void; +export declare const validateNoFIPS: (useFipsEndpoint?: boolean) => void; +export declare const validateMrapAlias: (name: string) => void; diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/configurations.d.ts b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/configurations.d.ts new file mode 100644 index 00000000..f53adf3e --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/configurations.d.ts @@ -0,0 +1,31 @@ +import { Provider, RegionInfoProvider } from "@smithy/types"; +export interface BucketEndpointInputConfig { + bucketEndpoint?: boolean; + forcePathStyle?: boolean; + useAccelerateEndpoint?: boolean; + useArnRegion?: boolean | Provider; + disableMultiregionAccessPoints?: boolean | Provider; +} +interface PreviouslyResolved { + isCustomEndpoint?: boolean; + region: Provider; + regionInfoProvider: RegionInfoProvider; + useFipsEndpoint: Provider; + useDualstackEndpoint: Provider; +} +export interface BucketEndpointResolvedConfig { + isCustomEndpoint?: boolean; + bucketEndpoint: boolean; + forcePathStyle: boolean; + useAccelerateEndpoint: boolean; + useFipsEndpoint: Provider; + useDualstackEndpoint: Provider; + useArnRegion: Provider; + region: Provider; + regionInfoProvider: RegionInfoProvider; + disableMultiregionAccessPoints: Provider; +} +export declare function resolveBucketEndpointConfig( + input: T & PreviouslyResolved & BucketEndpointInputConfig +): T & BucketEndpointResolvedConfig; +export {}; diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..9687a3ea --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/dist-types/ts3.4/index.d.ts @@ -0,0 +1,16 @@ +export * from "./NodeDisableMultiregionAccessPointConfigOptions"; +export * from "./NodeUseArnRegionConfigOptions"; +export * from "./bucketEndpointMiddleware"; +export * from "./bucketHostname"; +export * from "./configurations"; +export { + getArnResources, + getSuffixForArnEndpoint, + validateOutpostService, + validatePartition, + validateAccountId, + validateRegion, + validateDNSHostLabel, + validateNoDualstack, + validateNoFIPS, +} from "./bucketHostnameUtils"; diff --git a/node_modules/@aws-sdk/middleware-bucket-endpoint/package.json b/node_modules/@aws-sdk/middleware-bucket-endpoint/package.json new file mode 100644 index 00000000..27fac9d5 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-bucket-endpoint/package.json @@ -0,0 +1,59 @@ +{ + "name": "@aws-sdk/middleware-bucket-endpoint", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-bucket-endpoint", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-arn-parser": "3.723.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-bucket-endpoint", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-bucket-endpoint" + } +} diff --git a/node_modules/@aws-sdk/middleware-expect-continue/LICENSE b/node_modules/@aws-sdk/middleware-expect-continue/LICENSE new file mode 100644 index 00000000..dd65ae06 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-expect-continue/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-sdk/middleware-expect-continue/README.md b/node_modules/@aws-sdk/middleware-expect-continue/README.md new file mode 100644 index 00000000..e19bd83b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-expect-continue/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-expect-continue + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-expect-continue/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-expect-continue) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-expect-continue.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-expect-continue) diff --git a/node_modules/@aws-sdk/middleware-expect-continue/dist-cjs/index.js b/node_modules/@aws-sdk/middleware-expect-continue/dist-cjs/index.js new file mode 100644 index 00000000..4502e5b9 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-expect-continue/dist-cjs/index.js @@ -0,0 +1,66 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + addExpectContinueMiddleware: () => addExpectContinueMiddleware, + addExpectContinueMiddlewareOptions: () => addExpectContinueMiddlewareOptions, + getAddExpectContinuePlugin: () => getAddExpectContinuePlugin +}); +module.exports = __toCommonJS(index_exports); +var import_protocol_http = require("@smithy/protocol-http"); +function addExpectContinueMiddleware(options) { + return (next) => async (args) => { + const { request } = args; + if (import_protocol_http.HttpRequest.isInstance(request) && request.body && options.runtime === "node") { + if (options.requestHandler?.constructor?.name !== "FetchHttpHandler") { + request.headers = { + ...request.headers, + Expect: "100-continue" + }; + } + } + return next({ + ...args, + request + }); + }; +} +__name(addExpectContinueMiddleware, "addExpectContinueMiddleware"); +var addExpectContinueMiddlewareOptions = { + step: "build", + tags: ["SET_EXPECT_HEADER", "EXPECT_HEADER"], + name: "addExpectContinueMiddleware", + override: true +}; +var getAddExpectContinuePlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(addExpectContinueMiddleware(options), addExpectContinueMiddlewareOptions); + }, "applyToStack") +}), "getAddExpectContinuePlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + addExpectContinueMiddleware, + addExpectContinueMiddlewareOptions, + getAddExpectContinuePlugin +}); + diff --git a/node_modules/@aws-sdk/middleware-expect-continue/dist-es/index.js b/node_modules/@aws-sdk/middleware-expect-continue/dist-es/index.js new file mode 100644 index 00000000..a2a92a28 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-expect-continue/dist-es/index.js @@ -0,0 +1,29 @@ +import { HttpRequest } from "@smithy/protocol-http"; +export function addExpectContinueMiddleware(options) { + return (next) => async (args) => { + const { request } = args; + if (HttpRequest.isInstance(request) && request.body && options.runtime === "node") { + if (options.requestHandler?.constructor?.name !== "FetchHttpHandler") { + request.headers = { + ...request.headers, + Expect: "100-continue", + }; + } + } + return next({ + ...args, + request, + }); + }; +} +export const addExpectContinueMiddlewareOptions = { + step: "build", + tags: ["SET_EXPECT_HEADER", "EXPECT_HEADER"], + name: "addExpectContinueMiddleware", + override: true, +}; +export const getAddExpectContinuePlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(addExpectContinueMiddleware(options), addExpectContinueMiddlewareOptions); + }, +}); diff --git a/node_modules/@aws-sdk/middleware-expect-continue/dist-types/index.d.ts b/node_modules/@aws-sdk/middleware-expect-continue/dist-types/index.d.ts new file mode 100644 index 00000000..db0e0a5a --- /dev/null +++ b/node_modules/@aws-sdk/middleware-expect-continue/dist-types/index.d.ts @@ -0,0 +1,10 @@ +import { HttpHandler } from "@smithy/protocol-http"; +import { BuildHandlerOptions, BuildMiddleware, Pluggable, RequestHandler } from "@smithy/types"; +interface PreviouslyResolved { + runtime: string; + requestHandler?: RequestHandler | HttpHandler; +} +export declare function addExpectContinueMiddleware(options: PreviouslyResolved): BuildMiddleware; +export declare const addExpectContinueMiddlewareOptions: BuildHandlerOptions; +export declare const getAddExpectContinuePlugin: (options: PreviouslyResolved) => Pluggable; +export {}; diff --git a/node_modules/@aws-sdk/middleware-expect-continue/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/middleware-expect-continue/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..5125f491 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-expect-continue/dist-types/ts3.4/index.d.ts @@ -0,0 +1,19 @@ +import { HttpHandler } from "@smithy/protocol-http"; +import { + BuildHandlerOptions, + BuildMiddleware, + Pluggable, + RequestHandler, +} from "@smithy/types"; +interface PreviouslyResolved { + runtime: string; + requestHandler?: RequestHandler | HttpHandler; +} +export declare function addExpectContinueMiddleware( + options: PreviouslyResolved +): BuildMiddleware; +export declare const addExpectContinueMiddlewareOptions: BuildHandlerOptions; +export declare const getAddExpectContinuePlugin: ( + options: PreviouslyResolved +) => Pluggable; +export {}; diff --git a/node_modules/@aws-sdk/middleware-expect-continue/package.json b/node_modules/@aws-sdk/middleware-expect-continue/package.json new file mode 100644 index 00000000..956859f6 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-expect-continue/package.json @@ -0,0 +1,57 @@ +{ + "name": "@aws-sdk/middleware-expect-continue", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-expect-continue", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-expect-continue", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-expect-continue" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + } +} diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/LICENSE b/node_modules/@aws-sdk/middleware-flexible-checksums/LICENSE new file mode 100644 index 00000000..8efcd8d5 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/README.md b/node_modules/@aws-sdk/middleware-flexible-checksums/README.md new file mode 100644 index 00000000..ac7b4d35 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/README.md @@ -0,0 +1,7 @@ +# @aws-sdk/middleware-flexible-checksums + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-flexible-checksums/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-flexible-checksums) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-flexible-checksums.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-flexible-checksums) + +This package provides AWS SDK for JavaScript middleware that applies a checksum +of the request body as a header. diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-cjs/getCrc32ChecksumAlgorithmFunction.browser.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-cjs/getCrc32ChecksumAlgorithmFunction.browser.js new file mode 100644 index 00000000..3fc25764 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-cjs/getCrc32ChecksumAlgorithmFunction.browser.js @@ -0,0 +1,6 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getCrc32ChecksumAlgorithmFunction = void 0; +const crc32_1 = require("@aws-crypto/crc32"); +const getCrc32ChecksumAlgorithmFunction = () => crc32_1.AwsCrc32; +exports.getCrc32ChecksumAlgorithmFunction = getCrc32ChecksumAlgorithmFunction; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-cjs/getCrc32ChecksumAlgorithmFunction.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-cjs/getCrc32ChecksumAlgorithmFunction.js new file mode 100644 index 00000000..a2a0dff5 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-cjs/getCrc32ChecksumAlgorithmFunction.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getCrc32ChecksumAlgorithmFunction = void 0; +const tslib_1 = require("tslib"); +const crc32_1 = require("@aws-crypto/crc32"); +const util_1 = require("@aws-crypto/util"); +const zlib = tslib_1.__importStar(require("zlib")); +class NodeCrc32 { + checksum = 0; + update(data) { + this.checksum = zlib.crc32(data, this.checksum); + } + async digest() { + return (0, util_1.numToUint8)(this.checksum); + } + reset() { + this.checksum = 0; + } +} +const getCrc32ChecksumAlgorithmFunction = () => { + if (typeof zlib.crc32 === "undefined") { + return crc32_1.AwsCrc32; + } + return NodeCrc32; +}; +exports.getCrc32ChecksumAlgorithmFunction = getCrc32ChecksumAlgorithmFunction; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-cjs/index.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-cjs/index.js new file mode 100644 index 00000000..55420b92 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-cjs/index.js @@ -0,0 +1,525 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + CONFIG_REQUEST_CHECKSUM_CALCULATION: () => CONFIG_REQUEST_CHECKSUM_CALCULATION, + CONFIG_RESPONSE_CHECKSUM_VALIDATION: () => CONFIG_RESPONSE_CHECKSUM_VALIDATION, + ChecksumAlgorithm: () => ChecksumAlgorithm, + ChecksumLocation: () => ChecksumLocation, + DEFAULT_CHECKSUM_ALGORITHM: () => DEFAULT_CHECKSUM_ALGORITHM, + DEFAULT_REQUEST_CHECKSUM_CALCULATION: () => DEFAULT_REQUEST_CHECKSUM_CALCULATION, + DEFAULT_RESPONSE_CHECKSUM_VALIDATION: () => DEFAULT_RESPONSE_CHECKSUM_VALIDATION, + ENV_REQUEST_CHECKSUM_CALCULATION: () => ENV_REQUEST_CHECKSUM_CALCULATION, + ENV_RESPONSE_CHECKSUM_VALIDATION: () => ENV_RESPONSE_CHECKSUM_VALIDATION, + NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS: () => NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS, + NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS: () => NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS, + RequestChecksumCalculation: () => RequestChecksumCalculation, + ResponseChecksumValidation: () => ResponseChecksumValidation, + crc64NvmeCrtContainer: () => crc64NvmeCrtContainer, + flexibleChecksumsMiddleware: () => flexibleChecksumsMiddleware, + flexibleChecksumsMiddlewareOptions: () => flexibleChecksumsMiddlewareOptions, + getFlexibleChecksumsPlugin: () => getFlexibleChecksumsPlugin, + resolveFlexibleChecksumsConfig: () => resolveFlexibleChecksumsConfig +}); +module.exports = __toCommonJS(index_exports); + +// src/constants.ts +var RequestChecksumCalculation = { + /** + * When set, a checksum will be calculated for all request payloads of operations + * modeled with the {@link httpChecksum} trait where `requestChecksumRequired` is `true` + * AND/OR a `requestAlgorithmMember` is modeled. + * {@link https://smithy.io/2.0/aws/aws-core.html#aws-protocols-httpchecksum-trait httpChecksum} + */ + WHEN_SUPPORTED: "WHEN_SUPPORTED", + /** + * When set, a checksum will only be calculated for request payloads of operations + * modeled with the {@link httpChecksum} trait where `requestChecksumRequired` is `true` + * OR where a `requestAlgorithmMember` is modeled and the user sets it. + * {@link https://smithy.io/2.0/aws/aws-core.html#aws-protocols-httpchecksum-trait httpChecksum} + */ + WHEN_REQUIRED: "WHEN_REQUIRED" +}; +var DEFAULT_REQUEST_CHECKSUM_CALCULATION = RequestChecksumCalculation.WHEN_SUPPORTED; +var ResponseChecksumValidation = { + /** + * When set, checksum validation MUST be performed on all response payloads of operations + * modeled with the {@link httpChecksum} trait where `responseAlgorithms` is modeled, + * except when no modeled checksum algorithms are supported by an SDK. + * {@link https://smithy.io/2.0/aws/aws-core.html#aws-protocols-httpchecksum-trait httpChecksum} + */ + WHEN_SUPPORTED: "WHEN_SUPPORTED", + /** + * When set, checksum validation MUST NOT be performed on response payloads of operations UNLESS + * the SDK supports the modeled checksum algorithms AND the user has set the `requestValidationModeMember` to `ENABLED`. + * It is currently impossible to model an operation as requiring a response checksum, + * but this setting leaves the door open for future updates. + */ + WHEN_REQUIRED: "WHEN_REQUIRED" +}; +var DEFAULT_RESPONSE_CHECKSUM_VALIDATION = RequestChecksumCalculation.WHEN_SUPPORTED; +var ChecksumAlgorithm = /* @__PURE__ */ ((ChecksumAlgorithm3) => { + ChecksumAlgorithm3["MD5"] = "MD5"; + ChecksumAlgorithm3["CRC32"] = "CRC32"; + ChecksumAlgorithm3["CRC32C"] = "CRC32C"; + ChecksumAlgorithm3["CRC64NVME"] = "CRC64NVME"; + ChecksumAlgorithm3["SHA1"] = "SHA1"; + ChecksumAlgorithm3["SHA256"] = "SHA256"; + return ChecksumAlgorithm3; +})(ChecksumAlgorithm || {}); +var ChecksumLocation = /* @__PURE__ */ ((ChecksumLocation2) => { + ChecksumLocation2["HEADER"] = "header"; + ChecksumLocation2["TRAILER"] = "trailer"; + return ChecksumLocation2; +})(ChecksumLocation || {}); +var DEFAULT_CHECKSUM_ALGORITHM = "CRC32" /* CRC32 */; + +// src/stringUnionSelector.ts +var stringUnionSelector = /* @__PURE__ */ __name((obj, key, union, type) => { + if (!(key in obj)) return void 0; + const value = obj[key].toUpperCase(); + if (!Object.values(union).includes(value)) { + throw new TypeError(`Cannot load ${type} '${key}'. Expected one of ${Object.values(union)}, got '${obj[key]}'.`); + } + return value; +}, "stringUnionSelector"); + +// src/NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS.ts +var ENV_REQUEST_CHECKSUM_CALCULATION = "AWS_REQUEST_CHECKSUM_CALCULATION"; +var CONFIG_REQUEST_CHECKSUM_CALCULATION = "request_checksum_calculation"; +var NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env) => stringUnionSelector(env, ENV_REQUEST_CHECKSUM_CALCULATION, RequestChecksumCalculation, "env" /* ENV */), "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => stringUnionSelector(profile, CONFIG_REQUEST_CHECKSUM_CALCULATION, RequestChecksumCalculation, "shared config entry" /* CONFIG */), "configFileSelector"), + default: DEFAULT_REQUEST_CHECKSUM_CALCULATION +}; + +// src/NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS.ts +var ENV_RESPONSE_CHECKSUM_VALIDATION = "AWS_RESPONSE_CHECKSUM_VALIDATION"; +var CONFIG_RESPONSE_CHECKSUM_VALIDATION = "response_checksum_validation"; +var NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env) => stringUnionSelector(env, ENV_RESPONSE_CHECKSUM_VALIDATION, ResponseChecksumValidation, "env" /* ENV */), "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => stringUnionSelector(profile, CONFIG_RESPONSE_CHECKSUM_VALIDATION, ResponseChecksumValidation, "shared config entry" /* CONFIG */), "configFileSelector"), + default: DEFAULT_RESPONSE_CHECKSUM_VALIDATION +}; + +// src/crc64-nvme-crt-container.ts +var crc64NvmeCrtContainer = { + CrtCrc64Nvme: null +}; + +// src/flexibleChecksumsMiddleware.ts +var import_core = require("@aws-sdk/core"); +var import_protocol_http = require("@smithy/protocol-http"); +var import_util_stream = require("@smithy/util-stream"); + +// src/types.ts +var CLIENT_SUPPORTED_ALGORITHMS = [ + "CRC32" /* CRC32 */, + "CRC32C" /* CRC32C */, + "CRC64NVME" /* CRC64NVME */, + "SHA1" /* SHA1 */, + "SHA256" /* SHA256 */ +]; +var PRIORITY_ORDER_ALGORITHMS = [ + "SHA256" /* SHA256 */, + "SHA1" /* SHA1 */, + "CRC32" /* CRC32 */, + "CRC32C" /* CRC32C */, + "CRC64NVME" /* CRC64NVME */ +]; + +// src/getChecksumAlgorithmForRequest.ts +var getChecksumAlgorithmForRequest = /* @__PURE__ */ __name((input, { requestChecksumRequired, requestAlgorithmMember, requestChecksumCalculation }) => { + if (!requestAlgorithmMember) { + return requestChecksumCalculation === RequestChecksumCalculation.WHEN_SUPPORTED || requestChecksumRequired ? DEFAULT_CHECKSUM_ALGORITHM : void 0; + } + if (!input[requestAlgorithmMember]) { + return void 0; + } + const checksumAlgorithm = input[requestAlgorithmMember]; + if (!CLIENT_SUPPORTED_ALGORITHMS.includes(checksumAlgorithm)) { + throw new Error( + `The checksum algorithm "${checksumAlgorithm}" is not supported by the client. Select one of ${CLIENT_SUPPORTED_ALGORITHMS}.` + ); + } + return checksumAlgorithm; +}, "getChecksumAlgorithmForRequest"); + +// src/getChecksumLocationName.ts +var getChecksumLocationName = /* @__PURE__ */ __name((algorithm) => algorithm === "MD5" /* MD5 */ ? "content-md5" : `x-amz-checksum-${algorithm.toLowerCase()}`, "getChecksumLocationName"); + +// src/hasHeader.ts +var hasHeader = /* @__PURE__ */ __name((header, headers) => { + const soughtHeader = header.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return true; + } + } + return false; +}, "hasHeader"); + +// src/hasHeaderWithPrefix.ts +var hasHeaderWithPrefix = /* @__PURE__ */ __name((headerPrefix, headers) => { + const soughtHeaderPrefix = headerPrefix.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (headerName.toLowerCase().startsWith(soughtHeaderPrefix)) { + return true; + } + } + return false; +}, "hasHeaderWithPrefix"); + +// src/isStreaming.ts +var import_is_array_buffer = require("@smithy/is-array-buffer"); +var isStreaming = /* @__PURE__ */ __name((body) => body !== void 0 && typeof body !== "string" && !ArrayBuffer.isView(body) && !(0, import_is_array_buffer.isArrayBuffer)(body), "isStreaming"); + +// src/selectChecksumAlgorithmFunction.ts +var import_crc32c = require("@aws-crypto/crc32c"); +var import_getCrc32ChecksumAlgorithmFunction = require("././getCrc32ChecksumAlgorithmFunction"); +var selectChecksumAlgorithmFunction = /* @__PURE__ */ __name((checksumAlgorithm, config) => { + switch (checksumAlgorithm) { + case "MD5" /* MD5 */: + return config.md5; + case "CRC32" /* CRC32 */: + return (0, import_getCrc32ChecksumAlgorithmFunction.getCrc32ChecksumAlgorithmFunction)(); + case "CRC32C" /* CRC32C */: + return import_crc32c.AwsCrc32c; + case "CRC64NVME" /* CRC64NVME */: + if (typeof crc64NvmeCrtContainer.CrtCrc64Nvme !== "function") { + throw new Error( + `Please check whether you have installed the "@aws-sdk/crc64-nvme-crt" package explicitly. +You must also register the package by calling [require("@aws-sdk/crc64-nvme-crt");] or an ESM equivalent such as [import "@aws-sdk/crc64-nvme-crt";]. +For more information please go to https://github.com/aws/aws-sdk-js-v3#functionality-requiring-aws-common-runtime-crt` + ); + } + return crc64NvmeCrtContainer.CrtCrc64Nvme; + case "SHA1" /* SHA1 */: + return config.sha1; + case "SHA256" /* SHA256 */: + return config.sha256; + default: + throw new Error(`Unsupported checksum algorithm: ${checksumAlgorithm}`); + } +}, "selectChecksumAlgorithmFunction"); + +// src/stringHasher.ts +var import_util_utf8 = require("@smithy/util-utf8"); +var stringHasher = /* @__PURE__ */ __name((checksumAlgorithmFn, body) => { + const hash = new checksumAlgorithmFn(); + hash.update((0, import_util_utf8.toUint8Array)(body || "")); + return hash.digest(); +}, "stringHasher"); + +// src/flexibleChecksumsMiddleware.ts +var flexibleChecksumsMiddlewareOptions = { + name: "flexibleChecksumsMiddleware", + step: "build", + tags: ["BODY_CHECKSUM"], + override: true +}; +var flexibleChecksumsMiddleware = /* @__PURE__ */ __name((config, middlewareConfig) => (next, context) => async (args) => { + if (!import_protocol_http.HttpRequest.isInstance(args.request)) { + return next(args); + } + if (hasHeaderWithPrefix("x-amz-checksum-", args.request.headers)) { + return next(args); + } + const { request, input } = args; + const { body: requestBody, headers } = request; + const { base64Encoder, streamHasher } = config; + const { requestChecksumRequired, requestAlgorithmMember } = middlewareConfig; + const requestChecksumCalculation = await config.requestChecksumCalculation(); + const requestAlgorithmMemberName = requestAlgorithmMember?.name; + const requestAlgorithmMemberHttpHeader = requestAlgorithmMember?.httpHeader; + if (requestAlgorithmMemberName && !input[requestAlgorithmMemberName]) { + if (requestChecksumCalculation === RequestChecksumCalculation.WHEN_SUPPORTED || requestChecksumRequired) { + input[requestAlgorithmMemberName] = DEFAULT_CHECKSUM_ALGORITHM; + if (requestAlgorithmMemberHttpHeader) { + headers[requestAlgorithmMemberHttpHeader] = DEFAULT_CHECKSUM_ALGORITHM; + } + } + } + const checksumAlgorithm = getChecksumAlgorithmForRequest(input, { + requestChecksumRequired, + requestAlgorithmMember: requestAlgorithmMember?.name, + requestChecksumCalculation + }); + let updatedBody = requestBody; + let updatedHeaders = headers; + if (checksumAlgorithm) { + switch (checksumAlgorithm) { + case "CRC32" /* CRC32 */: + (0, import_core.setFeature)(context, "FLEXIBLE_CHECKSUMS_REQ_CRC32", "U"); + break; + case "CRC32C" /* CRC32C */: + (0, import_core.setFeature)(context, "FLEXIBLE_CHECKSUMS_REQ_CRC32C", "V"); + break; + case "CRC64NVME" /* CRC64NVME */: + (0, import_core.setFeature)(context, "FLEXIBLE_CHECKSUMS_REQ_CRC64", "W"); + break; + case "SHA1" /* SHA1 */: + (0, import_core.setFeature)(context, "FLEXIBLE_CHECKSUMS_REQ_SHA1", "X"); + break; + case "SHA256" /* SHA256 */: + (0, import_core.setFeature)(context, "FLEXIBLE_CHECKSUMS_REQ_SHA256", "Y"); + break; + } + const checksumLocationName = getChecksumLocationName(checksumAlgorithm); + const checksumAlgorithmFn = selectChecksumAlgorithmFunction(checksumAlgorithm, config); + if (isStreaming(requestBody)) { + const { getAwsChunkedEncodingStream, bodyLengthChecker } = config; + updatedBody = getAwsChunkedEncodingStream( + typeof config.requestStreamBufferSize === "number" && config.requestStreamBufferSize >= 8 * 1024 ? (0, import_util_stream.createBufferedReadable)(requestBody, config.requestStreamBufferSize, context.logger) : requestBody, + { + base64Encoder, + bodyLengthChecker, + checksumLocationName, + checksumAlgorithmFn, + streamHasher + } + ); + updatedHeaders = { + ...headers, + "content-encoding": headers["content-encoding"] ? `${headers["content-encoding"]},aws-chunked` : "aws-chunked", + "transfer-encoding": "chunked", + "x-amz-decoded-content-length": headers["content-length"], + "x-amz-content-sha256": "STREAMING-UNSIGNED-PAYLOAD-TRAILER", + "x-amz-trailer": checksumLocationName + }; + delete updatedHeaders["content-length"]; + } else if (!hasHeader(checksumLocationName, headers)) { + const rawChecksum = await stringHasher(checksumAlgorithmFn, requestBody); + updatedHeaders = { + ...headers, + [checksumLocationName]: base64Encoder(rawChecksum) + }; + } + } + const result = await next({ + ...args, + request: { + ...request, + headers: updatedHeaders, + body: updatedBody + } + }); + return result; +}, "flexibleChecksumsMiddleware"); + +// src/flexibleChecksumsInputMiddleware.ts + +var flexibleChecksumsInputMiddlewareOptions = { + name: "flexibleChecksumsInputMiddleware", + toMiddleware: "serializerMiddleware", + relation: "before", + tags: ["BODY_CHECKSUM"], + override: true +}; +var flexibleChecksumsInputMiddleware = /* @__PURE__ */ __name((config, middlewareConfig) => (next, context) => async (args) => { + const input = args.input; + const { requestValidationModeMember } = middlewareConfig; + const requestChecksumCalculation = await config.requestChecksumCalculation(); + const responseChecksumValidation = await config.responseChecksumValidation(); + switch (requestChecksumCalculation) { + case RequestChecksumCalculation.WHEN_REQUIRED: + (0, import_core.setFeature)(context, "FLEXIBLE_CHECKSUMS_REQ_WHEN_REQUIRED", "a"); + break; + case RequestChecksumCalculation.WHEN_SUPPORTED: + (0, import_core.setFeature)(context, "FLEXIBLE_CHECKSUMS_REQ_WHEN_SUPPORTED", "Z"); + break; + } + switch (responseChecksumValidation) { + case ResponseChecksumValidation.WHEN_REQUIRED: + (0, import_core.setFeature)(context, "FLEXIBLE_CHECKSUMS_RES_WHEN_REQUIRED", "c"); + break; + case ResponseChecksumValidation.WHEN_SUPPORTED: + (0, import_core.setFeature)(context, "FLEXIBLE_CHECKSUMS_RES_WHEN_SUPPORTED", "b"); + break; + } + if (requestValidationModeMember && !input[requestValidationModeMember]) { + if (responseChecksumValidation === ResponseChecksumValidation.WHEN_SUPPORTED) { + input[requestValidationModeMember] = "ENABLED"; + } + } + return next(args); +}, "flexibleChecksumsInputMiddleware"); + +// src/flexibleChecksumsResponseMiddleware.ts + + +// src/getChecksumAlgorithmListForResponse.ts +var getChecksumAlgorithmListForResponse = /* @__PURE__ */ __name((responseAlgorithms = []) => { + const validChecksumAlgorithms = []; + for (const algorithm of PRIORITY_ORDER_ALGORITHMS) { + if (!responseAlgorithms.includes(algorithm) || !CLIENT_SUPPORTED_ALGORITHMS.includes(algorithm)) { + continue; + } + validChecksumAlgorithms.push(algorithm); + } + return validChecksumAlgorithms; +}, "getChecksumAlgorithmListForResponse"); + +// src/isChecksumWithPartNumber.ts +var isChecksumWithPartNumber = /* @__PURE__ */ __name((checksum) => { + const lastHyphenIndex = checksum.lastIndexOf("-"); + if (lastHyphenIndex !== -1) { + const numberPart = checksum.slice(lastHyphenIndex + 1); + if (!numberPart.startsWith("0")) { + const number = parseInt(numberPart, 10); + if (!isNaN(number) && number >= 1 && number <= 1e4) { + return true; + } + } + } + return false; +}, "isChecksumWithPartNumber"); + +// src/validateChecksumFromResponse.ts + + +// src/getChecksum.ts +var getChecksum = /* @__PURE__ */ __name(async (body, { checksumAlgorithmFn, base64Encoder }) => base64Encoder(await stringHasher(checksumAlgorithmFn, body)), "getChecksum"); + +// src/validateChecksumFromResponse.ts +var validateChecksumFromResponse = /* @__PURE__ */ __name(async (response, { config, responseAlgorithms, logger }) => { + const checksumAlgorithms = getChecksumAlgorithmListForResponse(responseAlgorithms); + const { body: responseBody, headers: responseHeaders } = response; + for (const algorithm of checksumAlgorithms) { + const responseHeader = getChecksumLocationName(algorithm); + const checksumFromResponse = responseHeaders[responseHeader]; + if (checksumFromResponse) { + let checksumAlgorithmFn; + try { + checksumAlgorithmFn = selectChecksumAlgorithmFunction(algorithm, config); + } catch (error) { + if (algorithm === "CRC64NVME" /* CRC64NVME */) { + logger?.warn(`Skipping ${"CRC64NVME" /* CRC64NVME */} checksum validation: ${error.message}`); + continue; + } + throw error; + } + const { base64Encoder } = config; + if (isStreaming(responseBody)) { + response.body = (0, import_util_stream.createChecksumStream)({ + expectedChecksum: checksumFromResponse, + checksumSourceLocation: responseHeader, + checksum: new checksumAlgorithmFn(), + source: responseBody, + base64Encoder + }); + return; + } + const checksum = await getChecksum(responseBody, { checksumAlgorithmFn, base64Encoder }); + if (checksum === checksumFromResponse) { + break; + } + throw new Error( + `Checksum mismatch: expected "${checksum}" but received "${checksumFromResponse}" in response header "${responseHeader}".` + ); + } + } +}, "validateChecksumFromResponse"); + +// src/flexibleChecksumsResponseMiddleware.ts +var flexibleChecksumsResponseMiddlewareOptions = { + name: "flexibleChecksumsResponseMiddleware", + toMiddleware: "deserializerMiddleware", + relation: "after", + tags: ["BODY_CHECKSUM"], + override: true +}; +var flexibleChecksumsResponseMiddleware = /* @__PURE__ */ __name((config, middlewareConfig) => (next, context) => async (args) => { + if (!import_protocol_http.HttpRequest.isInstance(args.request)) { + return next(args); + } + const input = args.input; + const result = await next(args); + const response = result.response; + const { requestValidationModeMember, responseAlgorithms } = middlewareConfig; + if (requestValidationModeMember && input[requestValidationModeMember] === "ENABLED") { + const { clientName, commandName } = context; + const isS3WholeObjectMultipartGetResponseChecksum = clientName === "S3Client" && commandName === "GetObjectCommand" && getChecksumAlgorithmListForResponse(responseAlgorithms).every((algorithm) => { + const responseHeader = getChecksumLocationName(algorithm); + const checksumFromResponse = response.headers[responseHeader]; + return !checksumFromResponse || isChecksumWithPartNumber(checksumFromResponse); + }); + if (isS3WholeObjectMultipartGetResponseChecksum) { + return result; + } + await validateChecksumFromResponse(response, { + config, + responseAlgorithms, + logger: context.logger + }); + } + return result; +}, "flexibleChecksumsResponseMiddleware"); + +// src/getFlexibleChecksumsPlugin.ts +var getFlexibleChecksumsPlugin = /* @__PURE__ */ __name((config, middlewareConfig) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(flexibleChecksumsMiddleware(config, middlewareConfig), flexibleChecksumsMiddlewareOptions); + clientStack.addRelativeTo( + flexibleChecksumsInputMiddleware(config, middlewareConfig), + flexibleChecksumsInputMiddlewareOptions + ); + clientStack.addRelativeTo( + flexibleChecksumsResponseMiddleware(config, middlewareConfig), + flexibleChecksumsResponseMiddlewareOptions + ); + }, "applyToStack") +}), "getFlexibleChecksumsPlugin"); + +// src/resolveFlexibleChecksumsConfig.ts +var import_util_middleware = require("@smithy/util-middleware"); +var resolveFlexibleChecksumsConfig = /* @__PURE__ */ __name((input) => { + const { requestChecksumCalculation, responseChecksumValidation, requestStreamBufferSize } = input; + return Object.assign(input, { + requestChecksumCalculation: (0, import_util_middleware.normalizeProvider)(requestChecksumCalculation ?? DEFAULT_REQUEST_CHECKSUM_CALCULATION), + responseChecksumValidation: (0, import_util_middleware.normalizeProvider)(responseChecksumValidation ?? DEFAULT_RESPONSE_CHECKSUM_VALIDATION), + requestStreamBufferSize: Number(requestStreamBufferSize ?? 0) + }); +}, "resolveFlexibleChecksumsConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + ENV_REQUEST_CHECKSUM_CALCULATION, + CONFIG_REQUEST_CHECKSUM_CALCULATION, + NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS, + ENV_RESPONSE_CHECKSUM_VALIDATION, + CONFIG_RESPONSE_CHECKSUM_VALIDATION, + NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS, + RequestChecksumCalculation, + DEFAULT_REQUEST_CHECKSUM_CALCULATION, + ResponseChecksumValidation, + DEFAULT_RESPONSE_CHECKSUM_VALIDATION, + ChecksumAlgorithm, + ChecksumLocation, + DEFAULT_CHECKSUM_ALGORITHM, + crc64NvmeCrtContainer, + flexibleChecksumsMiddlewareOptions, + flexibleChecksumsMiddleware, + getFlexibleChecksumsPlugin, + resolveFlexibleChecksumsConfig +}); + diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS.js new file mode 100644 index 00000000..b46f9334 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS.js @@ -0,0 +1,9 @@ +import { DEFAULT_REQUEST_CHECKSUM_CALCULATION, RequestChecksumCalculation } from "./constants"; +import { SelectorType, stringUnionSelector } from "./stringUnionSelector"; +export const ENV_REQUEST_CHECKSUM_CALCULATION = "AWS_REQUEST_CHECKSUM_CALCULATION"; +export const CONFIG_REQUEST_CHECKSUM_CALCULATION = "request_checksum_calculation"; +export const NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => stringUnionSelector(env, ENV_REQUEST_CHECKSUM_CALCULATION, RequestChecksumCalculation, SelectorType.ENV), + configFileSelector: (profile) => stringUnionSelector(profile, CONFIG_REQUEST_CHECKSUM_CALCULATION, RequestChecksumCalculation, SelectorType.CONFIG), + default: DEFAULT_REQUEST_CHECKSUM_CALCULATION, +}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS.js new file mode 100644 index 00000000..36619f78 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS.js @@ -0,0 +1,9 @@ +import { DEFAULT_RESPONSE_CHECKSUM_VALIDATION, ResponseChecksumValidation } from "./constants"; +import { SelectorType, stringUnionSelector } from "./stringUnionSelector"; +export const ENV_RESPONSE_CHECKSUM_VALIDATION = "AWS_RESPONSE_CHECKSUM_VALIDATION"; +export const CONFIG_RESPONSE_CHECKSUM_VALIDATION = "response_checksum_validation"; +export const NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => stringUnionSelector(env, ENV_RESPONSE_CHECKSUM_VALIDATION, ResponseChecksumValidation, SelectorType.ENV), + configFileSelector: (profile) => stringUnionSelector(profile, CONFIG_RESPONSE_CHECKSUM_VALIDATION, ResponseChecksumValidation, SelectorType.CONFIG), + default: DEFAULT_RESPONSE_CHECKSUM_VALIDATION, +}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/configuration.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/configuration.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/configuration.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/constants.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/constants.js new file mode 100644 index 00000000..a817c534 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/constants.js @@ -0,0 +1,25 @@ +export const RequestChecksumCalculation = { + WHEN_SUPPORTED: "WHEN_SUPPORTED", + WHEN_REQUIRED: "WHEN_REQUIRED", +}; +export const DEFAULT_REQUEST_CHECKSUM_CALCULATION = RequestChecksumCalculation.WHEN_SUPPORTED; +export const ResponseChecksumValidation = { + WHEN_SUPPORTED: "WHEN_SUPPORTED", + WHEN_REQUIRED: "WHEN_REQUIRED", +}; +export const DEFAULT_RESPONSE_CHECKSUM_VALIDATION = RequestChecksumCalculation.WHEN_SUPPORTED; +export var ChecksumAlgorithm; +(function (ChecksumAlgorithm) { + ChecksumAlgorithm["MD5"] = "MD5"; + ChecksumAlgorithm["CRC32"] = "CRC32"; + ChecksumAlgorithm["CRC32C"] = "CRC32C"; + ChecksumAlgorithm["CRC64NVME"] = "CRC64NVME"; + ChecksumAlgorithm["SHA1"] = "SHA1"; + ChecksumAlgorithm["SHA256"] = "SHA256"; +})(ChecksumAlgorithm || (ChecksumAlgorithm = {})); +export var ChecksumLocation; +(function (ChecksumLocation) { + ChecksumLocation["HEADER"] = "header"; + ChecksumLocation["TRAILER"] = "trailer"; +})(ChecksumLocation || (ChecksumLocation = {})); +export const DEFAULT_CHECKSUM_ALGORITHM = ChecksumAlgorithm.CRC32; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/crc64-nvme-crt-container.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/crc64-nvme-crt-container.js new file mode 100644 index 00000000..6cc799b1 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/crc64-nvme-crt-container.js @@ -0,0 +1,3 @@ +export const crc64NvmeCrtContainer = { + CrtCrc64Nvme: null, +}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/flexibleChecksumsInputMiddleware.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/flexibleChecksumsInputMiddleware.js new file mode 100644 index 00000000..93e12d46 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/flexibleChecksumsInputMiddleware.js @@ -0,0 +1,37 @@ +import { setFeature } from "@aws-sdk/core"; +import { RequestChecksumCalculation, ResponseChecksumValidation } from "./constants"; +export const flexibleChecksumsInputMiddlewareOptions = { + name: "flexibleChecksumsInputMiddleware", + toMiddleware: "serializerMiddleware", + relation: "before", + tags: ["BODY_CHECKSUM"], + override: true, +}; +export const flexibleChecksumsInputMiddleware = (config, middlewareConfig) => (next, context) => async (args) => { + const input = args.input; + const { requestValidationModeMember } = middlewareConfig; + const requestChecksumCalculation = await config.requestChecksumCalculation(); + const responseChecksumValidation = await config.responseChecksumValidation(); + switch (requestChecksumCalculation) { + case RequestChecksumCalculation.WHEN_REQUIRED: + setFeature(context, "FLEXIBLE_CHECKSUMS_REQ_WHEN_REQUIRED", "a"); + break; + case RequestChecksumCalculation.WHEN_SUPPORTED: + setFeature(context, "FLEXIBLE_CHECKSUMS_REQ_WHEN_SUPPORTED", "Z"); + break; + } + switch (responseChecksumValidation) { + case ResponseChecksumValidation.WHEN_REQUIRED: + setFeature(context, "FLEXIBLE_CHECKSUMS_RES_WHEN_REQUIRED", "c"); + break; + case ResponseChecksumValidation.WHEN_SUPPORTED: + setFeature(context, "FLEXIBLE_CHECKSUMS_RES_WHEN_SUPPORTED", "b"); + break; + } + if (requestValidationModeMember && !input[requestValidationModeMember]) { + if (responseChecksumValidation === ResponseChecksumValidation.WHEN_SUPPORTED) { + input[requestValidationModeMember] = "ENABLED"; + } + } + return next(args); +}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/flexibleChecksumsMiddleware.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/flexibleChecksumsMiddleware.js new file mode 100644 index 00000000..83e0fd7b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/flexibleChecksumsMiddleware.js @@ -0,0 +1,107 @@ +import { setFeature } from "@aws-sdk/core"; +import { HttpRequest } from "@smithy/protocol-http"; +import { createBufferedReadable } from "@smithy/util-stream"; +import { ChecksumAlgorithm, DEFAULT_CHECKSUM_ALGORITHM, RequestChecksumCalculation } from "./constants"; +import { getChecksumAlgorithmForRequest } from "./getChecksumAlgorithmForRequest"; +import { getChecksumLocationName } from "./getChecksumLocationName"; +import { hasHeader } from "./hasHeader"; +import { hasHeaderWithPrefix } from "./hasHeaderWithPrefix"; +import { isStreaming } from "./isStreaming"; +import { selectChecksumAlgorithmFunction } from "./selectChecksumAlgorithmFunction"; +import { stringHasher } from "./stringHasher"; +export const flexibleChecksumsMiddlewareOptions = { + name: "flexibleChecksumsMiddleware", + step: "build", + tags: ["BODY_CHECKSUM"], + override: true, +}; +export const flexibleChecksumsMiddleware = (config, middlewareConfig) => (next, context) => async (args) => { + if (!HttpRequest.isInstance(args.request)) { + return next(args); + } + if (hasHeaderWithPrefix("x-amz-checksum-", args.request.headers)) { + return next(args); + } + const { request, input } = args; + const { body: requestBody, headers } = request; + const { base64Encoder, streamHasher } = config; + const { requestChecksumRequired, requestAlgorithmMember } = middlewareConfig; + const requestChecksumCalculation = await config.requestChecksumCalculation(); + const requestAlgorithmMemberName = requestAlgorithmMember?.name; + const requestAlgorithmMemberHttpHeader = requestAlgorithmMember?.httpHeader; + if (requestAlgorithmMemberName && !input[requestAlgorithmMemberName]) { + if (requestChecksumCalculation === RequestChecksumCalculation.WHEN_SUPPORTED || requestChecksumRequired) { + input[requestAlgorithmMemberName] = DEFAULT_CHECKSUM_ALGORITHM; + if (requestAlgorithmMemberHttpHeader) { + headers[requestAlgorithmMemberHttpHeader] = DEFAULT_CHECKSUM_ALGORITHM; + } + } + } + const checksumAlgorithm = getChecksumAlgorithmForRequest(input, { + requestChecksumRequired, + requestAlgorithmMember: requestAlgorithmMember?.name, + requestChecksumCalculation, + }); + let updatedBody = requestBody; + let updatedHeaders = headers; + if (checksumAlgorithm) { + switch (checksumAlgorithm) { + case ChecksumAlgorithm.CRC32: + setFeature(context, "FLEXIBLE_CHECKSUMS_REQ_CRC32", "U"); + break; + case ChecksumAlgorithm.CRC32C: + setFeature(context, "FLEXIBLE_CHECKSUMS_REQ_CRC32C", "V"); + break; + case ChecksumAlgorithm.CRC64NVME: + setFeature(context, "FLEXIBLE_CHECKSUMS_REQ_CRC64", "W"); + break; + case ChecksumAlgorithm.SHA1: + setFeature(context, "FLEXIBLE_CHECKSUMS_REQ_SHA1", "X"); + break; + case ChecksumAlgorithm.SHA256: + setFeature(context, "FLEXIBLE_CHECKSUMS_REQ_SHA256", "Y"); + break; + } + const checksumLocationName = getChecksumLocationName(checksumAlgorithm); + const checksumAlgorithmFn = selectChecksumAlgorithmFunction(checksumAlgorithm, config); + if (isStreaming(requestBody)) { + const { getAwsChunkedEncodingStream, bodyLengthChecker } = config; + updatedBody = getAwsChunkedEncodingStream(typeof config.requestStreamBufferSize === "number" && config.requestStreamBufferSize >= 8 * 1024 + ? createBufferedReadable(requestBody, config.requestStreamBufferSize, context.logger) + : requestBody, { + base64Encoder, + bodyLengthChecker, + checksumLocationName, + checksumAlgorithmFn, + streamHasher, + }); + updatedHeaders = { + ...headers, + "content-encoding": headers["content-encoding"] + ? `${headers["content-encoding"]},aws-chunked` + : "aws-chunked", + "transfer-encoding": "chunked", + "x-amz-decoded-content-length": headers["content-length"], + "x-amz-content-sha256": "STREAMING-UNSIGNED-PAYLOAD-TRAILER", + "x-amz-trailer": checksumLocationName, + }; + delete updatedHeaders["content-length"]; + } + else if (!hasHeader(checksumLocationName, headers)) { + const rawChecksum = await stringHasher(checksumAlgorithmFn, requestBody); + updatedHeaders = { + ...headers, + [checksumLocationName]: base64Encoder(rawChecksum), + }; + } + } + const result = await next({ + ...args, + request: { + ...request, + headers: updatedHeaders, + body: updatedBody, + }, + }); + return result; +}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/flexibleChecksumsResponseMiddleware.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/flexibleChecksumsResponseMiddleware.js new file mode 100644 index 00000000..44101f19 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/flexibleChecksumsResponseMiddleware.js @@ -0,0 +1,40 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { getChecksumAlgorithmListForResponse } from "./getChecksumAlgorithmListForResponse"; +import { getChecksumLocationName } from "./getChecksumLocationName"; +import { isChecksumWithPartNumber } from "./isChecksumWithPartNumber"; +import { validateChecksumFromResponse } from "./validateChecksumFromResponse"; +export const flexibleChecksumsResponseMiddlewareOptions = { + name: "flexibleChecksumsResponseMiddleware", + toMiddleware: "deserializerMiddleware", + relation: "after", + tags: ["BODY_CHECKSUM"], + override: true, +}; +export const flexibleChecksumsResponseMiddleware = (config, middlewareConfig) => (next, context) => async (args) => { + if (!HttpRequest.isInstance(args.request)) { + return next(args); + } + const input = args.input; + const result = await next(args); + const response = result.response; + const { requestValidationModeMember, responseAlgorithms } = middlewareConfig; + if (requestValidationModeMember && input[requestValidationModeMember] === "ENABLED") { + const { clientName, commandName } = context; + const isS3WholeObjectMultipartGetResponseChecksum = clientName === "S3Client" && + commandName === "GetObjectCommand" && + getChecksumAlgorithmListForResponse(responseAlgorithms).every((algorithm) => { + const responseHeader = getChecksumLocationName(algorithm); + const checksumFromResponse = response.headers[responseHeader]; + return !checksumFromResponse || isChecksumWithPartNumber(checksumFromResponse); + }); + if (isS3WholeObjectMultipartGetResponseChecksum) { + return result; + } + await validateChecksumFromResponse(response, { + config, + responseAlgorithms, + logger: context.logger, + }); + } + return result; +}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getChecksum.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getChecksum.js new file mode 100644 index 00000000..886d669b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getChecksum.js @@ -0,0 +1,2 @@ +import { stringHasher } from "./stringHasher"; +export const getChecksum = async (body, { checksumAlgorithmFn, base64Encoder }) => base64Encoder(await stringHasher(checksumAlgorithmFn, body)); diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getChecksumAlgorithmForRequest.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getChecksumAlgorithmForRequest.js new file mode 100644 index 00000000..d1ba813b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getChecksumAlgorithmForRequest.js @@ -0,0 +1,18 @@ +import { DEFAULT_CHECKSUM_ALGORITHM, RequestChecksumCalculation } from "./constants"; +import { CLIENT_SUPPORTED_ALGORITHMS } from "./types"; +export const getChecksumAlgorithmForRequest = (input, { requestChecksumRequired, requestAlgorithmMember, requestChecksumCalculation }) => { + if (!requestAlgorithmMember) { + return requestChecksumCalculation === RequestChecksumCalculation.WHEN_SUPPORTED || requestChecksumRequired + ? DEFAULT_CHECKSUM_ALGORITHM + : undefined; + } + if (!input[requestAlgorithmMember]) { + return undefined; + } + const checksumAlgorithm = input[requestAlgorithmMember]; + if (!CLIENT_SUPPORTED_ALGORITHMS.includes(checksumAlgorithm)) { + throw new Error(`The checksum algorithm "${checksumAlgorithm}" is not supported by the client.` + + ` Select one of ${CLIENT_SUPPORTED_ALGORITHMS}.`); + } + return checksumAlgorithm; +}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getChecksumAlgorithmListForResponse.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getChecksumAlgorithmListForResponse.js new file mode 100644 index 00000000..4dfb6d72 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getChecksumAlgorithmListForResponse.js @@ -0,0 +1,11 @@ +import { CLIENT_SUPPORTED_ALGORITHMS, PRIORITY_ORDER_ALGORITHMS } from "./types"; +export const getChecksumAlgorithmListForResponse = (responseAlgorithms = []) => { + const validChecksumAlgorithms = []; + for (const algorithm of PRIORITY_ORDER_ALGORITHMS) { + if (!responseAlgorithms.includes(algorithm) || !CLIENT_SUPPORTED_ALGORITHMS.includes(algorithm)) { + continue; + } + validChecksumAlgorithms.push(algorithm); + } + return validChecksumAlgorithms; +}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getChecksumLocationName.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getChecksumLocationName.js new file mode 100644 index 00000000..0e2d21e9 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getChecksumLocationName.js @@ -0,0 +1,2 @@ +import { ChecksumAlgorithm } from "./constants"; +export const getChecksumLocationName = (algorithm) => algorithm === ChecksumAlgorithm.MD5 ? "content-md5" : `x-amz-checksum-${algorithm.toLowerCase()}`; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getCrc32ChecksumAlgorithmFunction.browser.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getCrc32ChecksumAlgorithmFunction.browser.js new file mode 100644 index 00000000..904c4e05 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getCrc32ChecksumAlgorithmFunction.browser.js @@ -0,0 +1,2 @@ +import { AwsCrc32 } from "@aws-crypto/crc32"; +export const getCrc32ChecksumAlgorithmFunction = () => AwsCrc32; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getCrc32ChecksumAlgorithmFunction.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getCrc32ChecksumAlgorithmFunction.js new file mode 100644 index 00000000..1a7f370c --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getCrc32ChecksumAlgorithmFunction.js @@ -0,0 +1,21 @@ +import { AwsCrc32 } from "@aws-crypto/crc32"; +import { numToUint8 } from "@aws-crypto/util"; +import * as zlib from "zlib"; +class NodeCrc32 { + checksum = 0; + update(data) { + this.checksum = zlib.crc32(data, this.checksum); + } + async digest() { + return numToUint8(this.checksum); + } + reset() { + this.checksum = 0; + } +} +export const getCrc32ChecksumAlgorithmFunction = () => { + if (typeof zlib.crc32 === "undefined") { + return AwsCrc32; + } + return NodeCrc32; +}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getFlexibleChecksumsPlugin.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getFlexibleChecksumsPlugin.js new file mode 100644 index 00000000..97dc1d6f --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/getFlexibleChecksumsPlugin.js @@ -0,0 +1,10 @@ +import { flexibleChecksumsInputMiddleware, flexibleChecksumsInputMiddlewareOptions, } from "./flexibleChecksumsInputMiddleware"; +import { flexibleChecksumsMiddleware, flexibleChecksumsMiddlewareOptions, } from "./flexibleChecksumsMiddleware"; +import { flexibleChecksumsResponseMiddleware, flexibleChecksumsResponseMiddlewareOptions, } from "./flexibleChecksumsResponseMiddleware"; +export const getFlexibleChecksumsPlugin = (config, middlewareConfig) => ({ + applyToStack: (clientStack) => { + clientStack.add(flexibleChecksumsMiddleware(config, middlewareConfig), flexibleChecksumsMiddlewareOptions); + clientStack.addRelativeTo(flexibleChecksumsInputMiddleware(config, middlewareConfig), flexibleChecksumsInputMiddlewareOptions); + clientStack.addRelativeTo(flexibleChecksumsResponseMiddleware(config, middlewareConfig), flexibleChecksumsResponseMiddlewareOptions); + }, +}); diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/hasHeader.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/hasHeader.js new file mode 100644 index 00000000..8455075e --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/hasHeader.js @@ -0,0 +1,9 @@ +export const hasHeader = (header, headers) => { + const soughtHeader = header.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return true; + } + } + return false; +}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/hasHeaderWithPrefix.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/hasHeaderWithPrefix.js new file mode 100644 index 00000000..cf9bdfec --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/hasHeaderWithPrefix.js @@ -0,0 +1,9 @@ +export const hasHeaderWithPrefix = (headerPrefix, headers) => { + const soughtHeaderPrefix = headerPrefix.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (headerName.toLowerCase().startsWith(soughtHeaderPrefix)) { + return true; + } + } + return false; +}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/index.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/index.js new file mode 100644 index 00000000..32223508 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/index.js @@ -0,0 +1,7 @@ +export * from "./NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS"; +export * from "./NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS"; +export * from "./constants"; +export * from "./crc64-nvme-crt-container"; +export * from "./flexibleChecksumsMiddleware"; +export * from "./getFlexibleChecksumsPlugin"; +export * from "./resolveFlexibleChecksumsConfig"; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/isChecksumWithPartNumber.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/isChecksumWithPartNumber.js new file mode 100644 index 00000000..aa1d840c --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/isChecksumWithPartNumber.js @@ -0,0 +1,13 @@ +export const isChecksumWithPartNumber = (checksum) => { + const lastHyphenIndex = checksum.lastIndexOf("-"); + if (lastHyphenIndex !== -1) { + const numberPart = checksum.slice(lastHyphenIndex + 1); + if (!numberPart.startsWith("0")) { + const number = parseInt(numberPart, 10); + if (!isNaN(number) && number >= 1 && number <= 10000) { + return true; + } + } + } + return false; +}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/isStreaming.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/isStreaming.js new file mode 100644 index 00000000..e9fcd7e4 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/isStreaming.js @@ -0,0 +1,2 @@ +import { isArrayBuffer } from "@smithy/is-array-buffer"; +export const isStreaming = (body) => body !== undefined && typeof body !== "string" && !ArrayBuffer.isView(body) && !isArrayBuffer(body); diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/resolveFlexibleChecksumsConfig.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/resolveFlexibleChecksumsConfig.js new file mode 100644 index 00000000..5e21271a --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/resolveFlexibleChecksumsConfig.js @@ -0,0 +1,10 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +import { DEFAULT_REQUEST_CHECKSUM_CALCULATION, DEFAULT_RESPONSE_CHECKSUM_VALIDATION, } from "./constants"; +export const resolveFlexibleChecksumsConfig = (input) => { + const { requestChecksumCalculation, responseChecksumValidation, requestStreamBufferSize } = input; + return Object.assign(input, { + requestChecksumCalculation: normalizeProvider(requestChecksumCalculation ?? DEFAULT_REQUEST_CHECKSUM_CALCULATION), + responseChecksumValidation: normalizeProvider(responseChecksumValidation ?? DEFAULT_RESPONSE_CHECKSUM_VALIDATION), + requestStreamBufferSize: Number(requestStreamBufferSize ?? 0), + }); +}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/selectChecksumAlgorithmFunction.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/selectChecksumAlgorithmFunction.js new file mode 100644 index 00000000..a2bc0102 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/selectChecksumAlgorithmFunction.js @@ -0,0 +1,29 @@ +import { AwsCrc32c } from "@aws-crypto/crc32c"; +import { ChecksumAlgorithm } from "./constants"; +import { crc64NvmeCrtContainer } from "./crc64-nvme-crt-container"; +import { getCrc32ChecksumAlgorithmFunction } from "./getCrc32ChecksumAlgorithmFunction"; +export const selectChecksumAlgorithmFunction = (checksumAlgorithm, config) => { + switch (checksumAlgorithm) { + case ChecksumAlgorithm.MD5: + return config.md5; + case ChecksumAlgorithm.CRC32: + return getCrc32ChecksumAlgorithmFunction(); + case ChecksumAlgorithm.CRC32C: + return AwsCrc32c; + case ChecksumAlgorithm.CRC64NVME: + if (typeof crc64NvmeCrtContainer.CrtCrc64Nvme !== "function") { + throw new Error(`Please check whether you have installed the "@aws-sdk/crc64-nvme-crt" package explicitly. \n` + + `You must also register the package by calling [require("@aws-sdk/crc64-nvme-crt");] ` + + `or an ESM equivalent such as [import "@aws-sdk/crc64-nvme-crt";]. \n` + + "For more information please go to " + + "https://github.com/aws/aws-sdk-js-v3#functionality-requiring-aws-common-runtime-crt"); + } + return crc64NvmeCrtContainer.CrtCrc64Nvme; + case ChecksumAlgorithm.SHA1: + return config.sha1; + case ChecksumAlgorithm.SHA256: + return config.sha256; + default: + throw new Error(`Unsupported checksum algorithm: ${checksumAlgorithm}`); + } +}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/stringHasher.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/stringHasher.js new file mode 100644 index 00000000..642325da --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/stringHasher.js @@ -0,0 +1,6 @@ +import { toUint8Array } from "@smithy/util-utf8"; +export const stringHasher = (checksumAlgorithmFn, body) => { + const hash = new checksumAlgorithmFn(); + hash.update(toUint8Array(body || "")); + return hash.digest(); +}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/stringUnionSelector.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/stringUnionSelector.js new file mode 100644 index 00000000..5f5bfc8d --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/stringUnionSelector.js @@ -0,0 +1,14 @@ +export var SelectorType; +(function (SelectorType) { + SelectorType["ENV"] = "env"; + SelectorType["CONFIG"] = "shared config entry"; +})(SelectorType || (SelectorType = {})); +export const stringUnionSelector = (obj, key, union, type) => { + if (!(key in obj)) + return undefined; + const value = obj[key].toUpperCase(); + if (!Object.values(union).includes(value)) { + throw new TypeError(`Cannot load ${type} '${key}'. Expected one of ${Object.values(union)}, got '${obj[key]}'.`); + } + return value; +}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/types.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/types.js new file mode 100644 index 00000000..751d4c5b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/types.js @@ -0,0 +1,15 @@ +import { ChecksumAlgorithm } from "./constants"; +export const CLIENT_SUPPORTED_ALGORITHMS = [ + ChecksumAlgorithm.CRC32, + ChecksumAlgorithm.CRC32C, + ChecksumAlgorithm.CRC64NVME, + ChecksumAlgorithm.SHA1, + ChecksumAlgorithm.SHA256, +]; +export const PRIORITY_ORDER_ALGORITHMS = [ + ChecksumAlgorithm.SHA256, + ChecksumAlgorithm.SHA1, + ChecksumAlgorithm.CRC32, + ChecksumAlgorithm.CRC32C, + ChecksumAlgorithm.CRC64NVME, +]; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/validateChecksumFromResponse.js b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/validateChecksumFromResponse.js new file mode 100644 index 00000000..1ce8d748 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-es/validateChecksumFromResponse.js @@ -0,0 +1,45 @@ +import { createChecksumStream } from "@smithy/util-stream"; +import { ChecksumAlgorithm } from "./constants"; +import { getChecksum } from "./getChecksum"; +import { getChecksumAlgorithmListForResponse } from "./getChecksumAlgorithmListForResponse"; +import { getChecksumLocationName } from "./getChecksumLocationName"; +import { isStreaming } from "./isStreaming"; +import { selectChecksumAlgorithmFunction } from "./selectChecksumAlgorithmFunction"; +export const validateChecksumFromResponse = async (response, { config, responseAlgorithms, logger }) => { + const checksumAlgorithms = getChecksumAlgorithmListForResponse(responseAlgorithms); + const { body: responseBody, headers: responseHeaders } = response; + for (const algorithm of checksumAlgorithms) { + const responseHeader = getChecksumLocationName(algorithm); + const checksumFromResponse = responseHeaders[responseHeader]; + if (checksumFromResponse) { + let checksumAlgorithmFn; + try { + checksumAlgorithmFn = selectChecksumAlgorithmFunction(algorithm, config); + } + catch (error) { + if (algorithm === ChecksumAlgorithm.CRC64NVME) { + logger?.warn(`Skipping ${ChecksumAlgorithm.CRC64NVME} checksum validation: ${error.message}`); + continue; + } + throw error; + } + const { base64Encoder } = config; + if (isStreaming(responseBody)) { + response.body = createChecksumStream({ + expectedChecksum: checksumFromResponse, + checksumSourceLocation: responseHeader, + checksum: new checksumAlgorithmFn(), + source: responseBody, + base64Encoder, + }); + return; + } + const checksum = await getChecksum(responseBody, { checksumAlgorithmFn, base64Encoder }); + if (checksum === checksumFromResponse) { + break; + } + throw new Error(`Checksum mismatch: expected "${checksum}" but received "${checksumFromResponse}"` + + ` in response header "${responseHeader}".`); + } + } +}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS.d.ts new file mode 100644 index 00000000..a1ec3496 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS.d.ts @@ -0,0 +1,14 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { RequestChecksumCalculation } from "./constants"; +/** + * @internal + */ +export declare const ENV_REQUEST_CHECKSUM_CALCULATION = "AWS_REQUEST_CHECKSUM_CALCULATION"; +/** + * @internal + */ +export declare const CONFIG_REQUEST_CHECKSUM_CALCULATION = "request_checksum_calculation"; +/** + * @internal + */ +export declare const NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS.d.ts new file mode 100644 index 00000000..62d6f3ec --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS.d.ts @@ -0,0 +1,14 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { ResponseChecksumValidation } from "./constants"; +/** + * @internal + */ +export declare const ENV_RESPONSE_CHECKSUM_VALIDATION = "AWS_RESPONSE_CHECKSUM_VALIDATION"; +/** + * @internal + */ +export declare const CONFIG_RESPONSE_CHECKSUM_VALIDATION = "response_checksum_validation"; +/** + * @internal + */ +export declare const NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/configuration.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/configuration.d.ts new file mode 100644 index 00000000..eea5b5a1 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/configuration.d.ts @@ -0,0 +1,56 @@ +import { BodyLengthCalculator, ChecksumConstructor, Encoder, GetAwsChunkedEncodingStream, HashConstructor, Provider, StreamCollector, StreamHasher } from "@smithy/types"; +import { RequestChecksumCalculation, ResponseChecksumValidation } from "./constants"; +/** + * @internal + */ +export interface PreviouslyResolved { + /** + * The function that will be used to convert binary data to a base64-encoded string. + * @internal + */ + base64Encoder: Encoder; + /** + * A function that can calculate the length of a body. + */ + bodyLengthChecker: BodyLengthCalculator; + /** + * A function that returns Readable Stream which follows aws-chunked encoding stream. + */ + getAwsChunkedEncodingStream: GetAwsChunkedEncodingStream; + /** + * A constructor for a class implementing the {@link Hash} interface that computes MD5 hashes. + * @internal + */ + md5: ChecksumConstructor | HashConstructor; + /** + * Determines when a checksum will be calculated for request payloads + */ + requestChecksumCalculation: Provider; + /** + * Determines when a checksum will be calculated for response payloads + */ + responseChecksumValidation: Provider; + /** + * A constructor for a class implementing the {@link Hash} interface that computes SHA1 hashes. + * @internal + */ + sha1: ChecksumConstructor | HashConstructor; + /** + * A constructor for a class implementing the {@link Hash} interface that computes SHA256 hashes. + * @internal + */ + sha256: ChecksumConstructor | HashConstructor; + /** + * A function that, given a hash constructor and a stream, calculates the hash of the streamed value. + * @internal + */ + streamHasher: StreamHasher; + /** + * Collects streams into buffers. + */ + streamCollector: StreamCollector; + /** + * Minimum bytes from a stream to buffer into a chunk before passing to chunked encoding. + */ + requestStreamBufferSize: number; +} diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/constants.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/constants.d.ts new file mode 100644 index 00000000..5da3f005 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/constants.d.ts @@ -0,0 +1,83 @@ +/** + * Determines when a checksum will be calculated for request payloads. + * @public + */ +export declare const RequestChecksumCalculation: { + /** + * When set, a checksum will be calculated for all request payloads of operations + * modeled with the {@link httpChecksum} trait where `requestChecksumRequired` is `true` + * AND/OR a `requestAlgorithmMember` is modeled. + * {@link https://smithy.io/2.0/aws/aws-core.html#aws-protocols-httpchecksum-trait httpChecksum} + */ + readonly WHEN_SUPPORTED: "WHEN_SUPPORTED"; + /** + * When set, a checksum will only be calculated for request payloads of operations + * modeled with the {@link httpChecksum} trait where `requestChecksumRequired` is `true` + * OR where a `requestAlgorithmMember` is modeled and the user sets it. + * {@link https://smithy.io/2.0/aws/aws-core.html#aws-protocols-httpchecksum-trait httpChecksum} + */ + readonly WHEN_REQUIRED: "WHEN_REQUIRED"; +}; +/** + * @public + */ +export type RequestChecksumCalculation = (typeof RequestChecksumCalculation)[keyof typeof RequestChecksumCalculation]; +/** + * @internal + */ +export declare const DEFAULT_REQUEST_CHECKSUM_CALCULATION: "WHEN_SUPPORTED"; +/** + * Determines when checksum validation will be performed on response payloads. + * @public + */ +export declare const ResponseChecksumValidation: { + /** + * When set, checksum validation MUST be performed on all response payloads of operations + * modeled with the {@link httpChecksum} trait where `responseAlgorithms` is modeled, + * except when no modeled checksum algorithms are supported by an SDK. + * {@link https://smithy.io/2.0/aws/aws-core.html#aws-protocols-httpchecksum-trait httpChecksum} + */ + readonly WHEN_SUPPORTED: "WHEN_SUPPORTED"; + /** + * When set, checksum validation MUST NOT be performed on response payloads of operations UNLESS + * the SDK supports the modeled checksum algorithms AND the user has set the `requestValidationModeMember` to `ENABLED`. + * It is currently impossible to model an operation as requiring a response checksum, + * but this setting leaves the door open for future updates. + */ + readonly WHEN_REQUIRED: "WHEN_REQUIRED"; +}; +/** + * @public + */ +export type ResponseChecksumValidation = (typeof ResponseChecksumValidation)[keyof typeof ResponseChecksumValidation]; +/** + * @internal + */ +export declare const DEFAULT_RESPONSE_CHECKSUM_VALIDATION: "WHEN_SUPPORTED"; +/** + * Checksum Algorithms supported by the SDK. + * @public + */ +export declare enum ChecksumAlgorithm { + /** + * @deprecated Use {@link ChecksumAlgorithm.CRC32} instead. + */ + MD5 = "MD5", + CRC32 = "CRC32", + CRC32C = "CRC32C", + CRC64NVME = "CRC64NVME", + SHA1 = "SHA1", + SHA256 = "SHA256" +} +/** + * Location when the checksum is stored in the request body. + * @internal + */ +export declare enum ChecksumLocation { + HEADER = "header", + TRAILER = "trailer" +} +/** + * @internal + */ +export declare const DEFAULT_CHECKSUM_ALGORITHM = ChecksumAlgorithm.CRC32; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/crc64-nvme-crt-container.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/crc64-nvme-crt-container.d.ts new file mode 100644 index 00000000..ccce2e89 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/crc64-nvme-crt-container.d.ts @@ -0,0 +1,13 @@ +import { ChecksumConstructor } from "@smithy/types"; +/** + * @internal + * + * \@aws-sdk/crc64-nvme-crt will install the constructor in this + * container if it is installed. + * + * This avoids a runtime-require being interpreted statically by bundlers. + * + */ +export declare const crc64NvmeCrtContainer: { + CrtCrc64Nvme: null | ChecksumConstructor; +}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/flexibleChecksumsInputMiddleware.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/flexibleChecksumsInputMiddleware.d.ts new file mode 100644 index 00000000..9f8fd0eb --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/flexibleChecksumsInputMiddleware.d.ts @@ -0,0 +1,22 @@ +import { RelativeMiddlewareOptions, SerializeMiddleware } from "@smithy/types"; +import { PreviouslyResolved } from "./configuration"; +/** + * @internal + */ +export interface FlexibleChecksumsInputMiddlewareConfig { + /** + * Defines a top-level operation input member used to opt-in to best-effort validation + * of a checksum returned in the HTTP response of the operation. + */ + requestValidationModeMember?: string; +} +/** + * @internal + */ +export declare const flexibleChecksumsInputMiddlewareOptions: RelativeMiddlewareOptions; +/** + * @internal + * + * The input counterpart to the flexibleChecksumsMiddleware. + */ +export declare const flexibleChecksumsInputMiddleware: (config: PreviouslyResolved, middlewareConfig: FlexibleChecksumsInputMiddlewareConfig) => SerializeMiddleware; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/flexibleChecksumsMiddleware.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/flexibleChecksumsMiddleware.d.ts new file mode 100644 index 00000000..3f533969 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/flexibleChecksumsMiddleware.d.ts @@ -0,0 +1,33 @@ +import { BuildHandlerOptions, BuildMiddleware } from "@smithy/types"; +import { PreviouslyResolved } from "./configuration"; +/** + * @internal + */ +export interface FlexibleChecksumsRequestMiddlewareConfig { + /** + * Indicates an operation requires a checksum in its HTTP request. + */ + requestChecksumRequired: boolean; + /** + * Member that is used to configure request checksum behavior. + */ + requestAlgorithmMember?: { + /** + * Defines a top-level operation input member that is used to configure request checksum behavior. + */ + name: string; + /** + * The {@link httpHeader} value, if present. + * {@link https://smithy.io/2.0/spec/http-bindings.html#httpheader-trait httpHeader} + */ + httpHeader?: string; + }; +} +/** + * @internal + */ +export declare const flexibleChecksumsMiddlewareOptions: BuildHandlerOptions; +/** + * @internal + */ +export declare const flexibleChecksumsMiddleware: (config: PreviouslyResolved, middlewareConfig: FlexibleChecksumsRequestMiddlewareConfig) => BuildMiddleware; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/flexibleChecksumsResponseMiddleware.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/flexibleChecksumsResponseMiddleware.d.ts new file mode 100644 index 00000000..b4fac9a0 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/flexibleChecksumsResponseMiddleware.d.ts @@ -0,0 +1,27 @@ +import { DeserializeMiddleware, RelativeMiddlewareOptions } from "@smithy/types"; +import { PreviouslyResolved } from "./configuration"; +/** + * @internal + */ +export interface FlexibleChecksumsResponseMiddlewareConfig { + /** + * Defines a top-level operation input member used to opt-in to best-effort validation + * of a checksum returned in the HTTP response of the operation. + */ + requestValidationModeMember?: string; + /** + * Defines the checksum algorithms clients SHOULD look for when validating checksums + * returned in the HTTP response. + */ + responseAlgorithms?: string[]; +} +/** + * @internal + */ +export declare const flexibleChecksumsResponseMiddlewareOptions: RelativeMiddlewareOptions; +/** + * @internal + * + * The validation counterpart to the flexibleChecksumsMiddleware. + */ +export declare const flexibleChecksumsResponseMiddleware: (config: PreviouslyResolved, middlewareConfig: FlexibleChecksumsResponseMiddlewareConfig) => DeserializeMiddleware; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getChecksum.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getChecksum.d.ts new file mode 100644 index 00000000..1fd70447 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getChecksum.d.ts @@ -0,0 +1,6 @@ +import { ChecksumConstructor, Encoder, HashConstructor } from "@smithy/types"; +export interface GetChecksumDigestOptions { + checksumAlgorithmFn: ChecksumConstructor | HashConstructor; + base64Encoder: Encoder; +} +export declare const getChecksum: (body: unknown, { checksumAlgorithmFn, base64Encoder }: GetChecksumDigestOptions) => Promise; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getChecksumAlgorithmForRequest.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getChecksumAlgorithmForRequest.d.ts new file mode 100644 index 00000000..3d41d09c --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getChecksumAlgorithmForRequest.d.ts @@ -0,0 +1,21 @@ +import { ChecksumAlgorithm, RequestChecksumCalculation } from "./constants"; +export interface GetChecksumAlgorithmForRequestOptions { + /** + * Indicates an operation requires a checksum in its HTTP request. + */ + requestChecksumRequired: boolean; + /** + * Defines a top-level operation input member that is used to configure request checksum behavior. + */ + requestAlgorithmMember?: string; + /** + * Determines when a checksum will be calculated for request payloads + */ + requestChecksumCalculation: RequestChecksumCalculation; +} +/** + * Returns the checksum algorithm to use for the request, along with + * the priority array of location to use to populate checksum and names + * to be used as a key at the location. + */ +export declare const getChecksumAlgorithmForRequest: (input: any, { requestChecksumRequired, requestAlgorithmMember, requestChecksumCalculation }: GetChecksumAlgorithmForRequestOptions) => ChecksumAlgorithm | undefined; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getChecksumAlgorithmListForResponse.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getChecksumAlgorithmListForResponse.d.ts new file mode 100644 index 00000000..32a31bc2 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getChecksumAlgorithmListForResponse.d.ts @@ -0,0 +1,6 @@ +import { ChecksumAlgorithm } from "./constants"; +/** + * Returns the priority array of algorithm to use to verify checksum and names + * to be used as a key in the response header. + */ +export declare const getChecksumAlgorithmListForResponse: (responseAlgorithms?: string[]) => ChecksumAlgorithm[]; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getChecksumLocationName.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getChecksumLocationName.d.ts new file mode 100644 index 00000000..881781c8 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getChecksumLocationName.d.ts @@ -0,0 +1,5 @@ +import { ChecksumAlgorithm } from "./constants"; +/** + * Returns location (header/trailer) name to use to populate checksum in. + */ +export declare const getChecksumLocationName: (algorithm: ChecksumAlgorithm) => string; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getCrc32ChecksumAlgorithmFunction.browser.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getCrc32ChecksumAlgorithmFunction.browser.d.ts new file mode 100644 index 00000000..889142fb --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getCrc32ChecksumAlgorithmFunction.browser.d.ts @@ -0,0 +1,2 @@ +import { AwsCrc32 } from "@aws-crypto/crc32"; +export declare const getCrc32ChecksumAlgorithmFunction: () => typeof AwsCrc32; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getCrc32ChecksumAlgorithmFunction.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getCrc32ChecksumAlgorithmFunction.d.ts new file mode 100644 index 00000000..d1eb9642 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getCrc32ChecksumAlgorithmFunction.d.ts @@ -0,0 +1,10 @@ +import { AwsCrc32 } from "@aws-crypto/crc32"; +import { Checksum } from "@smithy/types"; +declare class NodeCrc32 implements Checksum { + private checksum; + update(data: Uint8Array): void; + digest(): Promise; + reset(): void; +} +export declare const getCrc32ChecksumAlgorithmFunction: () => typeof NodeCrc32 | typeof AwsCrc32; +export {}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getFlexibleChecksumsPlugin.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getFlexibleChecksumsPlugin.d.ts new file mode 100644 index 00000000..73fde747 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/getFlexibleChecksumsPlugin.d.ts @@ -0,0 +1,14 @@ +import { Pluggable } from "@smithy/types"; +import { PreviouslyResolved } from "./configuration"; +import { FlexibleChecksumsInputMiddlewareConfig } from "./flexibleChecksumsInputMiddleware"; +import { FlexibleChecksumsRequestMiddlewareConfig } from "./flexibleChecksumsMiddleware"; +import { FlexibleChecksumsResponseMiddlewareConfig } from "./flexibleChecksumsResponseMiddleware"; +/** + * @internal + */ +export interface FlexibleChecksumsMiddlewareConfig extends FlexibleChecksumsRequestMiddlewareConfig, FlexibleChecksumsInputMiddlewareConfig, FlexibleChecksumsResponseMiddlewareConfig { +} +/** + * @internal + */ +export declare const getFlexibleChecksumsPlugin: (config: PreviouslyResolved, middlewareConfig: FlexibleChecksumsMiddlewareConfig) => Pluggable; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/hasHeader.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/hasHeader.d.ts new file mode 100644 index 00000000..50a6b756 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/hasHeader.d.ts @@ -0,0 +1,6 @@ +import { HeaderBag } from "@smithy/types"; +/** + * Returns true if header is present in headers. + * Comparisons are case-insensitive. + */ +export declare const hasHeader: (header: string, headers: HeaderBag) => boolean; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/hasHeaderWithPrefix.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/hasHeaderWithPrefix.d.ts new file mode 100644 index 00000000..a4d76107 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/hasHeaderWithPrefix.d.ts @@ -0,0 +1,6 @@ +import { HeaderBag } from "@smithy/types"; +/** + * Returns true if header with headerPrefix is present in headers. + * Comparisons are case-insensitive. + */ +export declare const hasHeaderWithPrefix: (headerPrefix: string, headers: HeaderBag) => boolean; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/index.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/index.d.ts new file mode 100644 index 00000000..32223508 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS"; +export * from "./NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS"; +export * from "./constants"; +export * from "./crc64-nvme-crt-container"; +export * from "./flexibleChecksumsMiddleware"; +export * from "./getFlexibleChecksumsPlugin"; +export * from "./resolveFlexibleChecksumsConfig"; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/isChecksumWithPartNumber.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/isChecksumWithPartNumber.d.ts new file mode 100644 index 00000000..99f6c799 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/isChecksumWithPartNumber.d.ts @@ -0,0 +1 @@ +export declare const isChecksumWithPartNumber: (checksum: string) => boolean; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/isStreaming.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/isStreaming.d.ts new file mode 100644 index 00000000..8f38d140 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/isStreaming.d.ts @@ -0,0 +1,4 @@ +/** + * Returns true if the given value is a streaming response. + */ +export declare const isStreaming: (body: unknown) => boolean; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/resolveFlexibleChecksumsConfig.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/resolveFlexibleChecksumsConfig.d.ts new file mode 100644 index 00000000..5b8b65c9 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/resolveFlexibleChecksumsConfig.d.ts @@ -0,0 +1,43 @@ +import { Provider } from "@smithy/types"; +import { RequestChecksumCalculation, ResponseChecksumValidation } from "./constants"; +/** + * @public + */ +export interface FlexibleChecksumsInputConfig { + /** + * Determines when a checksum will be calculated for request payloads. + */ + requestChecksumCalculation?: RequestChecksumCalculation | Provider; + /** + * Determines when checksum validation will be performed on response payloads. + */ + responseChecksumValidation?: ResponseChecksumValidation | Provider; + /** + * Default 0 (off). + * + * When set to a value greater than or equal to 8192, sets the minimum number + * of bytes to buffer into a chunk when processing input streams + * with chunked encoding (that is, when request checksums are enabled). + * A minimum of 8kb = 8 * 1024 is required, and 64kb or higher is recommended. + * + * See https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-streaming.html. + * + * This has a slight performance penalty because it must wrap and buffer + * your input stream. + * You do not need to set this value if your stream already flows chunks + * of 8kb or greater. + */ + requestStreamBufferSize?: number | false; +} +/** + * @internal + */ +export interface FlexibleChecksumsResolvedConfig { + requestChecksumCalculation: Provider; + responseChecksumValidation: Provider; + requestStreamBufferSize: number; +} +/** + * @internal + */ +export declare const resolveFlexibleChecksumsConfig: (input: T & FlexibleChecksumsInputConfig) => T & FlexibleChecksumsResolvedConfig; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/selectChecksumAlgorithmFunction.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/selectChecksumAlgorithmFunction.d.ts new file mode 100644 index 00000000..bdb6210b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/selectChecksumAlgorithmFunction.d.ts @@ -0,0 +1,7 @@ +import { ChecksumConstructor, HashConstructor } from "@smithy/types"; +import { PreviouslyResolved } from "./configuration"; +import { ChecksumAlgorithm } from "./constants"; +/** + * Returns the function that will compute the checksum for the given {@link ChecksumAlgorithm}. + */ +export declare const selectChecksumAlgorithmFunction: (checksumAlgorithm: ChecksumAlgorithm, config: PreviouslyResolved) => ChecksumConstructor | HashConstructor; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/stringHasher.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/stringHasher.d.ts new file mode 100644 index 00000000..a208fda0 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/stringHasher.d.ts @@ -0,0 +1,5 @@ +import { ChecksumConstructor, HashConstructor } from "@smithy/types"; +/** + * A function that, given a hash constructor and a string, calculates the hash of the string. + */ +export declare const stringHasher: (checksumAlgorithmFn: ChecksumConstructor | HashConstructor, body: any) => Promise; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/stringUnionSelector.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/stringUnionSelector.d.ts new file mode 100644 index 00000000..446f09d6 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/stringUnionSelector.d.ts @@ -0,0 +1,12 @@ +export declare enum SelectorType { + ENV = "env", + CONFIG = "shared config entry" +} +/** + * Returns undefined, if obj[key] is not defined. + * Returns string value, if the string is defined in obj[key] and it's uppercase matches union value. + * Throws error for all other cases. + * + * @internal + */ +export declare const stringUnionSelector: (obj: Record, key: string, union: U, type: SelectorType) => U[K] | undefined; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS.d.ts new file mode 100644 index 00000000..4ce4dc59 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS.d.ts @@ -0,0 +1,7 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { RequestChecksumCalculation } from "./constants"; +export declare const ENV_REQUEST_CHECKSUM_CALCULATION = + "AWS_REQUEST_CHECKSUM_CALCULATION"; +export declare const CONFIG_REQUEST_CHECKSUM_CALCULATION = + "request_checksum_calculation"; +export declare const NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS.d.ts new file mode 100644 index 00000000..be8aa90c --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS.d.ts @@ -0,0 +1,7 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { ResponseChecksumValidation } from "./constants"; +export declare const ENV_RESPONSE_CHECKSUM_VALIDATION = + "AWS_RESPONSE_CHECKSUM_VALIDATION"; +export declare const CONFIG_RESPONSE_CHECKSUM_VALIDATION = + "response_checksum_validation"; +export declare const NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/configuration.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/configuration.d.ts new file mode 100644 index 00000000..3538fc60 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/configuration.d.ts @@ -0,0 +1,27 @@ +import { + BodyLengthCalculator, + ChecksumConstructor, + Encoder, + GetAwsChunkedEncodingStream, + HashConstructor, + Provider, + StreamCollector, + StreamHasher, +} from "@smithy/types"; +import { + RequestChecksumCalculation, + ResponseChecksumValidation, +} from "./constants"; +export interface PreviouslyResolved { + base64Encoder: Encoder; + bodyLengthChecker: BodyLengthCalculator; + getAwsChunkedEncodingStream: GetAwsChunkedEncodingStream; + md5: ChecksumConstructor | HashConstructor; + requestChecksumCalculation: Provider; + responseChecksumValidation: Provider; + sha1: ChecksumConstructor | HashConstructor; + sha256: ChecksumConstructor | HashConstructor; + streamHasher: StreamHasher; + streamCollector: StreamCollector; + requestStreamBufferSize: number; +} diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/constants.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/constants.d.ts new file mode 100644 index 00000000..4f752723 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,27 @@ +export declare const RequestChecksumCalculation: { + readonly WHEN_SUPPORTED: "WHEN_SUPPORTED"; + readonly WHEN_REQUIRED: "WHEN_REQUIRED"; +}; +export type RequestChecksumCalculation = + (typeof RequestChecksumCalculation)[keyof typeof RequestChecksumCalculation]; +export declare const DEFAULT_REQUEST_CHECKSUM_CALCULATION: "WHEN_SUPPORTED"; +export declare const ResponseChecksumValidation: { + readonly WHEN_SUPPORTED: "WHEN_SUPPORTED"; + readonly WHEN_REQUIRED: "WHEN_REQUIRED"; +}; +export type ResponseChecksumValidation = + (typeof ResponseChecksumValidation)[keyof typeof ResponseChecksumValidation]; +export declare const DEFAULT_RESPONSE_CHECKSUM_VALIDATION: "WHEN_SUPPORTED"; +export declare enum ChecksumAlgorithm { + MD5 = "MD5", + CRC32 = "CRC32", + CRC32C = "CRC32C", + CRC64NVME = "CRC64NVME", + SHA1 = "SHA1", + SHA256 = "SHA256", +} +export declare enum ChecksumLocation { + HEADER = "header", + TRAILER = "trailer", +} +export declare const DEFAULT_CHECKSUM_ALGORITHM = ChecksumAlgorithm.CRC32; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/crc64-nvme-crt-container.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/crc64-nvme-crt-container.d.ts new file mode 100644 index 00000000..4277ae53 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/crc64-nvme-crt-container.d.ts @@ -0,0 +1,4 @@ +import { ChecksumConstructor } from "@smithy/types"; +export declare const crc64NvmeCrtContainer: { + CrtCrc64Nvme: null | ChecksumConstructor; +}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/flexibleChecksumsInputMiddleware.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/flexibleChecksumsInputMiddleware.d.ts new file mode 100644 index 00000000..b8537215 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/flexibleChecksumsInputMiddleware.d.ts @@ -0,0 +1,10 @@ +import { RelativeMiddlewareOptions, SerializeMiddleware } from "@smithy/types"; +import { PreviouslyResolved } from "./configuration"; +export interface FlexibleChecksumsInputMiddlewareConfig { + requestValidationModeMember?: string; +} +export declare const flexibleChecksumsInputMiddlewareOptions: RelativeMiddlewareOptions; +export declare const flexibleChecksumsInputMiddleware: ( + config: PreviouslyResolved, + middlewareConfig: FlexibleChecksumsInputMiddlewareConfig +) => SerializeMiddleware; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/flexibleChecksumsMiddleware.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/flexibleChecksumsMiddleware.d.ts new file mode 100644 index 00000000..14f45d62 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/flexibleChecksumsMiddleware.d.ts @@ -0,0 +1,14 @@ +import { BuildHandlerOptions, BuildMiddleware } from "@smithy/types"; +import { PreviouslyResolved } from "./configuration"; +export interface FlexibleChecksumsRequestMiddlewareConfig { + requestChecksumRequired: boolean; + requestAlgorithmMember?: { + name: string; + httpHeader?: string; + }; +} +export declare const flexibleChecksumsMiddlewareOptions: BuildHandlerOptions; +export declare const flexibleChecksumsMiddleware: ( + config: PreviouslyResolved, + middlewareConfig: FlexibleChecksumsRequestMiddlewareConfig +) => BuildMiddleware; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/flexibleChecksumsResponseMiddleware.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/flexibleChecksumsResponseMiddleware.d.ts new file mode 100644 index 00000000..6d39d5f3 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/flexibleChecksumsResponseMiddleware.d.ts @@ -0,0 +1,14 @@ +import { + DeserializeMiddleware, + RelativeMiddlewareOptions, +} from "@smithy/types"; +import { PreviouslyResolved } from "./configuration"; +export interface FlexibleChecksumsResponseMiddlewareConfig { + requestValidationModeMember?: string; + responseAlgorithms?: string[]; +} +export declare const flexibleChecksumsResponseMiddlewareOptions: RelativeMiddlewareOptions; +export declare const flexibleChecksumsResponseMiddleware: ( + config: PreviouslyResolved, + middlewareConfig: FlexibleChecksumsResponseMiddlewareConfig +) => DeserializeMiddleware; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getChecksum.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getChecksum.d.ts new file mode 100644 index 00000000..ab46bb5f --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getChecksum.d.ts @@ -0,0 +1,9 @@ +import { ChecksumConstructor, Encoder, HashConstructor } from "@smithy/types"; +export interface GetChecksumDigestOptions { + checksumAlgorithmFn: ChecksumConstructor | HashConstructor; + base64Encoder: Encoder; +} +export declare const getChecksum: ( + body: unknown, + { checksumAlgorithmFn, base64Encoder }: GetChecksumDigestOptions +) => Promise; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getChecksumAlgorithmForRequest.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getChecksumAlgorithmForRequest.d.ts new file mode 100644 index 00000000..e745111c --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getChecksumAlgorithmForRequest.d.ts @@ -0,0 +1,14 @@ +import { ChecksumAlgorithm, RequestChecksumCalculation } from "./constants"; +export interface GetChecksumAlgorithmForRequestOptions { + requestChecksumRequired: boolean; + requestAlgorithmMember?: string; + requestChecksumCalculation: RequestChecksumCalculation; +} +export declare const getChecksumAlgorithmForRequest: ( + input: any, + { + requestChecksumRequired, + requestAlgorithmMember, + requestChecksumCalculation, + }: GetChecksumAlgorithmForRequestOptions +) => ChecksumAlgorithm | undefined; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getChecksumAlgorithmListForResponse.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getChecksumAlgorithmListForResponse.d.ts new file mode 100644 index 00000000..39f2eb5e --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getChecksumAlgorithmListForResponse.d.ts @@ -0,0 +1,4 @@ +import { ChecksumAlgorithm } from "./constants"; +export declare const getChecksumAlgorithmListForResponse: ( + responseAlgorithms?: string[] +) => ChecksumAlgorithm[]; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getChecksumLocationName.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getChecksumLocationName.d.ts new file mode 100644 index 00000000..e76122a5 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getChecksumLocationName.d.ts @@ -0,0 +1,4 @@ +import { ChecksumAlgorithm } from "./constants"; +export declare const getChecksumLocationName: ( + algorithm: ChecksumAlgorithm +) => string; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getCrc32ChecksumAlgorithmFunction.browser.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getCrc32ChecksumAlgorithmFunction.browser.d.ts new file mode 100644 index 00000000..889142fb --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getCrc32ChecksumAlgorithmFunction.browser.d.ts @@ -0,0 +1,2 @@ +import { AwsCrc32 } from "@aws-crypto/crc32"; +export declare const getCrc32ChecksumAlgorithmFunction: () => typeof AwsCrc32; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getCrc32ChecksumAlgorithmFunction.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getCrc32ChecksumAlgorithmFunction.d.ts new file mode 100644 index 00000000..62f01dff --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getCrc32ChecksumAlgorithmFunction.d.ts @@ -0,0 +1,12 @@ +import { AwsCrc32 } from "@aws-crypto/crc32"; +import { Checksum } from "@smithy/types"; +declare class NodeCrc32 implements Checksum { + private checksum; + update(data: Uint8Array): void; + digest(): Promise; + reset(): void; +} +export declare const getCrc32ChecksumAlgorithmFunction: () => + | typeof NodeCrc32 + | typeof AwsCrc32; +export {}; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getFlexibleChecksumsPlugin.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getFlexibleChecksumsPlugin.d.ts new file mode 100644 index 00000000..2d4b0943 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/getFlexibleChecksumsPlugin.d.ts @@ -0,0 +1,13 @@ +import { Pluggable } from "@smithy/types"; +import { PreviouslyResolved } from "./configuration"; +import { FlexibleChecksumsInputMiddlewareConfig } from "./flexibleChecksumsInputMiddleware"; +import { FlexibleChecksumsRequestMiddlewareConfig } from "./flexibleChecksumsMiddleware"; +import { FlexibleChecksumsResponseMiddlewareConfig } from "./flexibleChecksumsResponseMiddleware"; +export interface FlexibleChecksumsMiddlewareConfig + extends FlexibleChecksumsRequestMiddlewareConfig, + FlexibleChecksumsInputMiddlewareConfig, + FlexibleChecksumsResponseMiddlewareConfig {} +export declare const getFlexibleChecksumsPlugin: ( + config: PreviouslyResolved, + middlewareConfig: FlexibleChecksumsMiddlewareConfig +) => Pluggable; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/hasHeader.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/hasHeader.d.ts new file mode 100644 index 00000000..a3f38cd0 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/hasHeader.d.ts @@ -0,0 +1,2 @@ +import { HeaderBag } from "@smithy/types"; +export declare const hasHeader: (header: string, headers: HeaderBag) => boolean; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/hasHeaderWithPrefix.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/hasHeaderWithPrefix.d.ts new file mode 100644 index 00000000..3caf7a2c --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/hasHeaderWithPrefix.d.ts @@ -0,0 +1,5 @@ +import { HeaderBag } from "@smithy/types"; +export declare const hasHeaderWithPrefix: ( + headerPrefix: string, + headers: HeaderBag +) => boolean; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..32223508 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/index.d.ts @@ -0,0 +1,7 @@ +export * from "./NODE_REQUEST_CHECKSUM_CALCULATION_CONFIG_OPTIONS"; +export * from "./NODE_RESPONSE_CHECKSUM_VALIDATION_CONFIG_OPTIONS"; +export * from "./constants"; +export * from "./crc64-nvme-crt-container"; +export * from "./flexibleChecksumsMiddleware"; +export * from "./getFlexibleChecksumsPlugin"; +export * from "./resolveFlexibleChecksumsConfig"; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/isChecksumWithPartNumber.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/isChecksumWithPartNumber.d.ts new file mode 100644 index 00000000..99f6c799 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/isChecksumWithPartNumber.d.ts @@ -0,0 +1 @@ +export declare const isChecksumWithPartNumber: (checksum: string) => boolean; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/isStreaming.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/isStreaming.d.ts new file mode 100644 index 00000000..0ee946ee --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/isStreaming.d.ts @@ -0,0 +1 @@ +export declare const isStreaming: (body: unknown) => boolean; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/resolveFlexibleChecksumsConfig.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/resolveFlexibleChecksumsConfig.d.ts new file mode 100644 index 00000000..b4742390 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/resolveFlexibleChecksumsConfig.d.ts @@ -0,0 +1,22 @@ +import { Provider } from "@smithy/types"; +import { + RequestChecksumCalculation, + ResponseChecksumValidation, +} from "./constants"; +export interface FlexibleChecksumsInputConfig { + requestChecksumCalculation?: + | RequestChecksumCalculation + | Provider; + responseChecksumValidation?: + | ResponseChecksumValidation + | Provider; + requestStreamBufferSize?: number | false; +} +export interface FlexibleChecksumsResolvedConfig { + requestChecksumCalculation: Provider; + responseChecksumValidation: Provider; + requestStreamBufferSize: number; +} +export declare const resolveFlexibleChecksumsConfig: ( + input: T & FlexibleChecksumsInputConfig +) => T & FlexibleChecksumsResolvedConfig; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/selectChecksumAlgorithmFunction.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/selectChecksumAlgorithmFunction.d.ts new file mode 100644 index 00000000..9d47380b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/selectChecksumAlgorithmFunction.d.ts @@ -0,0 +1,7 @@ +import { ChecksumConstructor, HashConstructor } from "@smithy/types"; +import { PreviouslyResolved } from "./configuration"; +import { ChecksumAlgorithm } from "./constants"; +export declare const selectChecksumAlgorithmFunction: ( + checksumAlgorithm: ChecksumAlgorithm, + config: PreviouslyResolved +) => ChecksumConstructor | HashConstructor; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/stringHasher.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/stringHasher.d.ts new file mode 100644 index 00000000..fbccb534 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/stringHasher.d.ts @@ -0,0 +1,5 @@ +import { ChecksumConstructor, HashConstructor } from "@smithy/types"; +export declare const stringHasher: ( + checksumAlgorithmFn: ChecksumConstructor | HashConstructor, + body: any +) => Promise; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/stringUnionSelector.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/stringUnionSelector.d.ts new file mode 100644 index 00000000..88f28758 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/stringUnionSelector.d.ts @@ -0,0 +1,10 @@ +export declare enum SelectorType { + ENV = "env", + CONFIG = "shared config entry", +} +export declare const stringUnionSelector: ( + obj: Record, + key: string, + union: U, + type: SelectorType +) => U[K] | undefined; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/types.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/types.d.ts new file mode 100644 index 00000000..00065416 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/types.d.ts @@ -0,0 +1,3 @@ +import { ChecksumAlgorithm } from "./constants"; +export declare const CLIENT_SUPPORTED_ALGORITHMS: ChecksumAlgorithm[]; +export declare const PRIORITY_ORDER_ALGORITHMS: ChecksumAlgorithm[]; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/validateChecksumFromResponse.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/validateChecksumFromResponse.d.ts new file mode 100644 index 00000000..0fc2fd0f --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/ts3.4/validateChecksumFromResponse.d.ts @@ -0,0 +1,12 @@ +import { HttpResponse } from "@smithy/protocol-http"; +import { Logger } from "@smithy/types"; +import { PreviouslyResolved } from "./configuration"; +export interface ValidateChecksumFromResponseOptions { + config: PreviouslyResolved; + responseAlgorithms?: string[]; + logger?: Logger; +} +export declare const validateChecksumFromResponse: ( + response: HttpResponse, + { config, responseAlgorithms, logger }: ValidateChecksumFromResponseOptions +) => Promise; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/types.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/types.d.ts new file mode 100644 index 00000000..518b84e9 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/types.d.ts @@ -0,0 +1,9 @@ +import { ChecksumAlgorithm } from "./constants"; +/** + * List of algorithms supported by client. + */ +export declare const CLIENT_SUPPORTED_ALGORITHMS: ChecksumAlgorithm[]; +/** + * Priority order for validating checksum algorithm. A faster algorithm has higher priority. + */ +export declare const PRIORITY_ORDER_ALGORITHMS: ChecksumAlgorithm[]; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/validateChecksumFromResponse.d.ts b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/validateChecksumFromResponse.d.ts new file mode 100644 index 00000000..caff36a4 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/dist-types/validateChecksumFromResponse.d.ts @@ -0,0 +1,13 @@ +import { HttpResponse } from "@smithy/protocol-http"; +import { Logger } from "@smithy/types"; +import { PreviouslyResolved } from "./configuration"; +export interface ValidateChecksumFromResponseOptions { + config: PreviouslyResolved; + /** + * Defines the checksum algorithms clients SHOULD look for when validating checksums + * returned in the HTTP response. + */ + responseAlgorithms?: string[]; + logger?: Logger; +} +export declare const validateChecksumFromResponse: (response: HttpResponse, { config, responseAlgorithms, logger }: ValidateChecksumFromResponseOptions) => Promise; diff --git a/node_modules/@aws-sdk/middleware-flexible-checksums/package.json b/node_modules/@aws-sdk/middleware-flexible-checksums/package.json new file mode 100644 index 00000000..10c712f1 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-flexible-checksums/package.json @@ -0,0 +1,76 @@ +{ + "name": "@aws-sdk/middleware-flexible-checksums", + "version": "3.799.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-flexible-checksums", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts", + "test:e2e": "yarn g:vitest run -c vitest.config.e2e.ts", + "test:e2e:watch": "yarn g:vitest watch -c vitest.config.e2e.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "browser": { + "./dist-es/getCrc32ChecksumAlgorithmFunction": "./dist-es/getCrc32ChecksumAlgorithmFunction.browser" + }, + "react-native": { + "./dist-es/getCrc32ChecksumAlgorithmFunction": "./dist-es/getCrc32ChecksumAlgorithmFunction.browser", + "./dist-cjs/getCrc32ChecksumAlgorithmFunction": "./dist-cjs/getCrc32ChecksumAlgorithmFunction.browser" + }, + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/crc32": "5.2.0", + "@aws-crypto/crc32c": "5.2.0", + "@aws-crypto/util": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/is-array-buffer": "^4.0.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@smithy/node-http-handler": "^4.0.4", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-flexible-checksums", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-flexible-checksums" + } +} diff --git a/node_modules/@aws-sdk/middleware-host-header/LICENSE b/node_modules/@aws-sdk/middleware-host-header/LICENSE new file mode 100644 index 00000000..e907b586 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-host-header/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-sdk/middleware-host-header/README.md b/node_modules/@aws-sdk/middleware-host-header/README.md new file mode 100644 index 00000000..123940e6 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-host-header/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-host-header + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-host-header/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-host-header) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-host-header.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-host-header) diff --git a/node_modules/@aws-sdk/middleware-host-header/dist-cjs/index.js b/node_modules/@aws-sdk/middleware-host-header/dist-cjs/index.js new file mode 100644 index 00000000..bdfe2a5a --- /dev/null +++ b/node_modules/@aws-sdk/middleware-host-header/dist-cjs/index.js @@ -0,0 +1,69 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + getHostHeaderPlugin: () => getHostHeaderPlugin, + hostHeaderMiddleware: () => hostHeaderMiddleware, + hostHeaderMiddlewareOptions: () => hostHeaderMiddlewareOptions, + resolveHostHeaderConfig: () => resolveHostHeaderConfig +}); +module.exports = __toCommonJS(index_exports); +var import_protocol_http = require("@smithy/protocol-http"); +function resolveHostHeaderConfig(input) { + return input; +} +__name(resolveHostHeaderConfig, "resolveHostHeaderConfig"); +var hostHeaderMiddleware = /* @__PURE__ */ __name((options) => (next) => async (args) => { + if (!import_protocol_http.HttpRequest.isInstance(args.request)) return next(args); + const { request } = args; + const { handlerProtocol = "" } = options.requestHandler.metadata || {}; + if (handlerProtocol.indexOf("h2") >= 0 && !request.headers[":authority"]) { + delete request.headers["host"]; + request.headers[":authority"] = request.hostname + (request.port ? ":" + request.port : ""); + } else if (!request.headers["host"]) { + let host = request.hostname; + if (request.port != null) host += `:${request.port}`; + request.headers["host"] = host; + } + return next(args); +}, "hostHeaderMiddleware"); +var hostHeaderMiddlewareOptions = { + name: "hostHeaderMiddleware", + step: "build", + priority: "low", + tags: ["HOST"], + override: true +}; +var getHostHeaderPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(hostHeaderMiddleware(options), hostHeaderMiddlewareOptions); + }, "applyToStack") +}), "getHostHeaderPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + resolveHostHeaderConfig, + hostHeaderMiddleware, + hostHeaderMiddlewareOptions, + getHostHeaderPlugin +}); + diff --git a/node_modules/@aws-sdk/middleware-host-header/dist-es/index.js b/node_modules/@aws-sdk/middleware-host-header/dist-es/index.js new file mode 100644 index 00000000..2e2fb62e --- /dev/null +++ b/node_modules/@aws-sdk/middleware-host-header/dist-es/index.js @@ -0,0 +1,33 @@ +import { HttpRequest } from "@smithy/protocol-http"; +export function resolveHostHeaderConfig(input) { + return input; +} +export const hostHeaderMiddleware = (options) => (next) => async (args) => { + if (!HttpRequest.isInstance(args.request)) + return next(args); + const { request } = args; + const { handlerProtocol = "" } = options.requestHandler.metadata || {}; + if (handlerProtocol.indexOf("h2") >= 0 && !request.headers[":authority"]) { + delete request.headers["host"]; + request.headers[":authority"] = request.hostname + (request.port ? ":" + request.port : ""); + } + else if (!request.headers["host"]) { + let host = request.hostname; + if (request.port != null) + host += `:${request.port}`; + request.headers["host"] = host; + } + return next(args); +}; +export const hostHeaderMiddlewareOptions = { + name: "hostHeaderMiddleware", + step: "build", + priority: "low", + tags: ["HOST"], + override: true, +}; +export const getHostHeaderPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(hostHeaderMiddleware(options), hostHeaderMiddlewareOptions); + }, +}); diff --git a/node_modules/@aws-sdk/middleware-host-header/dist-types/index.d.ts b/node_modules/@aws-sdk/middleware-host-header/dist-types/index.d.ts new file mode 100644 index 00000000..752bb00b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-host-header/dist-types/index.d.ts @@ -0,0 +1,35 @@ +import { AbsoluteLocation, BuildHandlerOptions, BuildMiddleware, Pluggable, RequestHandler } from "@smithy/types"; +/** + * @public + */ +export interface HostHeaderInputConfig { +} +interface PreviouslyResolved { + requestHandler: RequestHandler; +} +/** + * @internal + */ +export interface HostHeaderResolvedConfig { + /** + * The HTTP handler to use. Fetch in browser and Https in Nodejs. + */ + requestHandler: RequestHandler; +} +/** + * @internal + */ +export declare function resolveHostHeaderConfig(input: T & PreviouslyResolved & HostHeaderInputConfig): T & HostHeaderResolvedConfig; +/** + * @internal + */ +export declare const hostHeaderMiddleware: (options: HostHeaderResolvedConfig) => BuildMiddleware; +/** + * @internal + */ +export declare const hostHeaderMiddlewareOptions: BuildHandlerOptions & AbsoluteLocation; +/** + * @internal + */ +export declare const getHostHeaderPlugin: (options: HostHeaderResolvedConfig) => Pluggable; +export {}; diff --git a/node_modules/@aws-sdk/middleware-host-header/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/middleware-host-header/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..3ca5561e --- /dev/null +++ b/node_modules/@aws-sdk/middleware-host-header/dist-types/ts3.4/index.d.ts @@ -0,0 +1,29 @@ +import { + AbsoluteLocation, + BuildHandlerOptions, + BuildMiddleware, + Pluggable, + RequestHandler, +} from "@smithy/types"; +export interface HostHeaderInputConfig {} +interface PreviouslyResolved { + requestHandler: RequestHandler; +} +export interface HostHeaderResolvedConfig { + requestHandler: RequestHandler; +} +export declare function resolveHostHeaderConfig( + input: T & PreviouslyResolved & HostHeaderInputConfig +): T & HostHeaderResolvedConfig; +export declare const hostHeaderMiddleware: < + Input extends object, + Output extends object +>( + options: HostHeaderResolvedConfig +) => BuildMiddleware; +export declare const hostHeaderMiddlewareOptions: BuildHandlerOptions & + AbsoluteLocation; +export declare const getHostHeaderPlugin: ( + options: HostHeaderResolvedConfig +) => Pluggable; +export {}; diff --git a/node_modules/@aws-sdk/middleware-host-header/package.json b/node_modules/@aws-sdk/middleware-host-header/package.json new file mode 100644 index 00000000..523f8a11 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-host-header/package.json @@ -0,0 +1,58 @@ +{ + "name": "@aws-sdk/middleware-host-header", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-host-header", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-host-header", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-host-header" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + } +} diff --git a/node_modules/@aws-sdk/middleware-location-constraint/LICENSE b/node_modules/@aws-sdk/middleware-location-constraint/LICENSE new file mode 100644 index 00000000..dd65ae06 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-location-constraint/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-sdk/middleware-location-constraint/README.md b/node_modules/@aws-sdk/middleware-location-constraint/README.md new file mode 100644 index 00000000..2a1b4f05 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-location-constraint/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-location-constraint + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-location-constraint/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-location-constraint) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-location-constraint.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-location-constraint) diff --git a/node_modules/@aws-sdk/middleware-location-constraint/dist-cjs/index.js b/node_modules/@aws-sdk/middleware-location-constraint/dist-cjs/index.js new file mode 100644 index 00000000..d400bab6 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-location-constraint/dist-cjs/index.js @@ -0,0 +1,64 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + getLocationConstraintPlugin: () => getLocationConstraintPlugin, + locationConstraintMiddleware: () => locationConstraintMiddleware, + locationConstraintMiddlewareOptions: () => locationConstraintMiddlewareOptions +}); +module.exports = __toCommonJS(index_exports); +function locationConstraintMiddleware(options) { + return (next) => async (args) => { + const { CreateBucketConfiguration } = args.input; + const region = await options.region(); + if (!CreateBucketConfiguration?.LocationConstraint && !CreateBucketConfiguration?.Location) { + args = { + ...args, + input: { + ...args.input, + CreateBucketConfiguration: region === "us-east-1" ? void 0 : { LocationConstraint: region } + } + }; + } + return next(args); + }; +} +__name(locationConstraintMiddleware, "locationConstraintMiddleware"); +var locationConstraintMiddlewareOptions = { + step: "initialize", + tags: ["LOCATION_CONSTRAINT", "CREATE_BUCKET_CONFIGURATION"], + name: "locationConstraintMiddleware", + override: true +}; +var getLocationConstraintPlugin = /* @__PURE__ */ __name((config) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(locationConstraintMiddleware(config), locationConstraintMiddlewareOptions); + }, "applyToStack") +}), "getLocationConstraintPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + locationConstraintMiddleware, + locationConstraintMiddlewareOptions, + getLocationConstraintPlugin +}); + diff --git a/node_modules/@aws-sdk/middleware-location-constraint/dist-es/configuration.js b/node_modules/@aws-sdk/middleware-location-constraint/dist-es/configuration.js new file mode 100644 index 00000000..40dfb353 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-location-constraint/dist-es/configuration.js @@ -0,0 +1,3 @@ +export function resolveLocationConstraintConfig(input) { + return input; +} diff --git a/node_modules/@aws-sdk/middleware-location-constraint/dist-es/index.js b/node_modules/@aws-sdk/middleware-location-constraint/dist-es/index.js new file mode 100644 index 00000000..dd53b74c --- /dev/null +++ b/node_modules/@aws-sdk/middleware-location-constraint/dist-es/index.js @@ -0,0 +1,27 @@ +export function locationConstraintMiddleware(options) { + return (next) => async (args) => { + const { CreateBucketConfiguration } = args.input; + const region = await options.region(); + if (!CreateBucketConfiguration?.LocationConstraint && !CreateBucketConfiguration?.Location) { + args = { + ...args, + input: { + ...args.input, + CreateBucketConfiguration: region === "us-east-1" ? undefined : { LocationConstraint: region }, + }, + }; + } + return next(args); + }; +} +export const locationConstraintMiddlewareOptions = { + step: "initialize", + tags: ["LOCATION_CONSTRAINT", "CREATE_BUCKET_CONFIGURATION"], + name: "locationConstraintMiddleware", + override: true, +}; +export const getLocationConstraintPlugin = (config) => ({ + applyToStack: (clientStack) => { + clientStack.add(locationConstraintMiddleware(config), locationConstraintMiddlewareOptions); + }, +}); diff --git a/node_modules/@aws-sdk/middleware-location-constraint/dist-types/configuration.d.ts b/node_modules/@aws-sdk/middleware-location-constraint/dist-types/configuration.d.ts new file mode 100644 index 00000000..c825bf5c --- /dev/null +++ b/node_modules/@aws-sdk/middleware-location-constraint/dist-types/configuration.d.ts @@ -0,0 +1,17 @@ +import { Provider } from "@smithy/types"; +/** + * @public + */ +export interface LocationConstraintInputConfig { +} +interface PreviouslyResolved { + region: Provider; +} +export interface LocationConstraintResolvedConfig { + /** + * Resolved value for input config {@link RegionInputConfig.region} + */ + region: Provider; +} +export declare function resolveLocationConstraintConfig(input: T & LocationConstraintInputConfig & PreviouslyResolved): T & LocationConstraintResolvedConfig; +export {}; diff --git a/node_modules/@aws-sdk/middleware-location-constraint/dist-types/index.d.ts b/node_modules/@aws-sdk/middleware-location-constraint/dist-types/index.d.ts new file mode 100644 index 00000000..8a57aa3b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-location-constraint/dist-types/index.d.ts @@ -0,0 +1,10 @@ +import { InitializeHandlerOptions, InitializeMiddleware, Pluggable } from "@smithy/types"; +import { LocationConstraintResolvedConfig } from "./configuration"; +/** + * This middleware modifies the input on S3 CreateBucket requests. If the LocationConstraint has not been set, this + * middleware will set a LocationConstraint to match the configured region. The CreateBucketConfiguration will be + * removed entirely on requests to the us-east-1 region. + */ +export declare function locationConstraintMiddleware(options: LocationConstraintResolvedConfig): InitializeMiddleware; +export declare const locationConstraintMiddlewareOptions: InitializeHandlerOptions; +export declare const getLocationConstraintPlugin: (config: LocationConstraintResolvedConfig) => Pluggable; diff --git a/node_modules/@aws-sdk/middleware-location-constraint/dist-types/ts3.4/configuration.d.ts b/node_modules/@aws-sdk/middleware-location-constraint/dist-types/ts3.4/configuration.d.ts new file mode 100644 index 00000000..05fd779b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-location-constraint/dist-types/ts3.4/configuration.d.ts @@ -0,0 +1,12 @@ +import { Provider } from "@smithy/types"; +export interface LocationConstraintInputConfig {} +interface PreviouslyResolved { + region: Provider; +} +export interface LocationConstraintResolvedConfig { + region: Provider; +} +export declare function resolveLocationConstraintConfig( + input: T & LocationConstraintInputConfig & PreviouslyResolved +): T & LocationConstraintResolvedConfig; +export {}; diff --git a/node_modules/@aws-sdk/middleware-location-constraint/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/middleware-location-constraint/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..5e115134 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-location-constraint/dist-types/ts3.4/index.d.ts @@ -0,0 +1,13 @@ +import { + InitializeHandlerOptions, + InitializeMiddleware, + Pluggable, +} from "@smithy/types"; +import { LocationConstraintResolvedConfig } from "./configuration"; +export declare function locationConstraintMiddleware( + options: LocationConstraintResolvedConfig +): InitializeMiddleware; +export declare const locationConstraintMiddlewareOptions: InitializeHandlerOptions; +export declare const getLocationConstraintPlugin: ( + config: LocationConstraintResolvedConfig +) => Pluggable; diff --git a/node_modules/@aws-sdk/middleware-location-constraint/package.json b/node_modules/@aws-sdk/middleware-location-constraint/package.json new file mode 100644 index 00000000..08d0b1de --- /dev/null +++ b/node_modules/@aws-sdk/middleware-location-constraint/package.json @@ -0,0 +1,56 @@ +{ + "name": "@aws-sdk/middleware-location-constraint", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-location-constraint", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-location-constraint", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-location-constraint" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + } +} diff --git a/node_modules/@aws-sdk/middleware-logger/LICENSE b/node_modules/@aws-sdk/middleware-logger/LICENSE new file mode 100644 index 00000000..74d4e5c3 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-logger/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-sdk/middleware-logger/README.md b/node_modules/@aws-sdk/middleware-logger/README.md new file mode 100644 index 00000000..861fa43f --- /dev/null +++ b/node_modules/@aws-sdk/middleware-logger/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-logger + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-logger/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-logger) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-logger.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-logger) diff --git a/node_modules/@aws-sdk/middleware-logger/dist-cjs/index.js b/node_modules/@aws-sdk/middleware-logger/dist-cjs/index.js new file mode 100644 index 00000000..b1db3083 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-logger/dist-cjs/index.js @@ -0,0 +1,79 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + getLoggerPlugin: () => getLoggerPlugin, + loggerMiddleware: () => loggerMiddleware, + loggerMiddlewareOptions: () => loggerMiddlewareOptions +}); +module.exports = __toCommonJS(index_exports); + +// src/loggerMiddleware.ts +var loggerMiddleware = /* @__PURE__ */ __name(() => (next, context) => async (args) => { + try { + const response = await next(args); + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog, overrideOutputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog ?? context.inputFilterSensitiveLog; + const outputFilterSensitiveLog = overrideOutputFilterSensitiveLog ?? context.outputFilterSensitiveLog; + const { $metadata, ...outputWithoutMetadata } = response.output; + logger?.info?.({ + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + output: outputFilterSensitiveLog(outputWithoutMetadata), + metadata: $metadata + }); + return response; + } catch (error) { + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog ?? context.inputFilterSensitiveLog; + logger?.error?.({ + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + error, + metadata: error.$metadata + }); + throw error; + } +}, "loggerMiddleware"); +var loggerMiddlewareOptions = { + name: "loggerMiddleware", + tags: ["LOGGER"], + step: "initialize", + override: true +}; +var getLoggerPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(loggerMiddleware(), loggerMiddlewareOptions); + }, "applyToStack") +}), "getLoggerPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + loggerMiddleware, + loggerMiddlewareOptions, + getLoggerPlugin +}); + diff --git a/node_modules/@aws-sdk/middleware-logger/dist-es/index.js b/node_modules/@aws-sdk/middleware-logger/dist-es/index.js new file mode 100644 index 00000000..171e3bc5 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-logger/dist-es/index.js @@ -0,0 +1 @@ +export * from "./loggerMiddleware"; diff --git a/node_modules/@aws-sdk/middleware-logger/dist-es/loggerMiddleware.js b/node_modules/@aws-sdk/middleware-logger/dist-es/loggerMiddleware.js new file mode 100644 index 00000000..50da4cca --- /dev/null +++ b/node_modules/@aws-sdk/middleware-logger/dist-es/loggerMiddleware.js @@ -0,0 +1,42 @@ +export const loggerMiddleware = () => (next, context) => async (args) => { + try { + const response = await next(args); + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog, overrideOutputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog ?? context.inputFilterSensitiveLog; + const outputFilterSensitiveLog = overrideOutputFilterSensitiveLog ?? context.outputFilterSensitiveLog; + const { $metadata, ...outputWithoutMetadata } = response.output; + logger?.info?.({ + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + output: outputFilterSensitiveLog(outputWithoutMetadata), + metadata: $metadata, + }); + return response; + } + catch (error) { + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog ?? context.inputFilterSensitiveLog; + logger?.error?.({ + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + error, + metadata: error.$metadata, + }); + throw error; + } +}; +export const loggerMiddlewareOptions = { + name: "loggerMiddleware", + tags: ["LOGGER"], + step: "initialize", + override: true, +}; +export const getLoggerPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(loggerMiddleware(), loggerMiddlewareOptions); + }, +}); diff --git a/node_modules/@aws-sdk/middleware-logger/dist-types/index.d.ts b/node_modules/@aws-sdk/middleware-logger/dist-types/index.d.ts new file mode 100644 index 00000000..171e3bc5 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-logger/dist-types/index.d.ts @@ -0,0 +1 @@ +export * from "./loggerMiddleware"; diff --git a/node_modules/@aws-sdk/middleware-logger/dist-types/loggerMiddleware.d.ts b/node_modules/@aws-sdk/middleware-logger/dist-types/loggerMiddleware.d.ts new file mode 100644 index 00000000..5712017d --- /dev/null +++ b/node_modules/@aws-sdk/middleware-logger/dist-types/loggerMiddleware.d.ts @@ -0,0 +1,4 @@ +import { AbsoluteLocation, HandlerExecutionContext, InitializeHandler, InitializeHandlerOptions, MetadataBearer, Pluggable } from "@smithy/types"; +export declare const loggerMiddleware: () => (next: InitializeHandler, context: HandlerExecutionContext) => InitializeHandler; +export declare const loggerMiddlewareOptions: InitializeHandlerOptions & AbsoluteLocation; +export declare const getLoggerPlugin: (options: any) => Pluggable; diff --git a/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..171e3bc5 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./loggerMiddleware"; diff --git a/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/loggerMiddleware.d.ts b/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/loggerMiddleware.d.ts new file mode 100644 index 00000000..10ded9e2 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/loggerMiddleware.d.ts @@ -0,0 +1,17 @@ +import { + AbsoluteLocation, + HandlerExecutionContext, + InitializeHandler, + InitializeHandlerOptions, + MetadataBearer, + Pluggable, +} from "@smithy/types"; +export declare const loggerMiddleware: () => < + Output extends MetadataBearer = MetadataBearer +>( + next: InitializeHandler, + context: HandlerExecutionContext +) => InitializeHandler; +export declare const loggerMiddlewareOptions: InitializeHandlerOptions & + AbsoluteLocation; +export declare const getLoggerPlugin: (options: any) => Pluggable; diff --git a/node_modules/@aws-sdk/middleware-logger/package.json b/node_modules/@aws-sdk/middleware-logger/package.json new file mode 100644 index 00000000..7187da95 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-logger/package.json @@ -0,0 +1,58 @@ +{ + "name": "@aws-sdk/middleware-logger", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-logger", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-logger", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-logger" + } +} diff --git a/node_modules/@aws-sdk/middleware-recursion-detection/LICENSE b/node_modules/@aws-sdk/middleware-recursion-detection/LICENSE new file mode 100644 index 00000000..e907b586 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-recursion-detection/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-sdk/middleware-recursion-detection/README.md b/node_modules/@aws-sdk/middleware-recursion-detection/README.md new file mode 100644 index 00000000..2d5437e0 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-recursion-detection/README.md @@ -0,0 +1,10 @@ +# @aws-sdk/middleware-recursion-detection + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-recursion-detection/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-recursion-detection) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-recursion-detection.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-recursion-detection) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@aws-sdk/middleware-recursion-detection/dist-cjs/index.js b/node_modules/@aws-sdk/middleware-recursion-detection/dist-cjs/index.js new file mode 100644 index 00000000..a3876874 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-recursion-detection/dist-cjs/index.js @@ -0,0 +1,72 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + addRecursionDetectionMiddlewareOptions: () => addRecursionDetectionMiddlewareOptions, + getRecursionDetectionPlugin: () => getRecursionDetectionPlugin, + recursionDetectionMiddleware: () => recursionDetectionMiddleware +}); +module.exports = __toCommonJS(index_exports); +var import_protocol_http = require("@smithy/protocol-http"); +var TRACE_ID_HEADER_NAME = "X-Amzn-Trace-Id"; +var ENV_LAMBDA_FUNCTION_NAME = "AWS_LAMBDA_FUNCTION_NAME"; +var ENV_TRACE_ID = "_X_AMZN_TRACE_ID"; +var recursionDetectionMiddleware = /* @__PURE__ */ __name((options) => (next) => async (args) => { + const { request } = args; + if (!import_protocol_http.HttpRequest.isInstance(request) || options.runtime !== "node") { + return next(args); + } + const traceIdHeader = Object.keys(request.headers ?? {}).find((h) => h.toLowerCase() === TRACE_ID_HEADER_NAME.toLowerCase()) ?? TRACE_ID_HEADER_NAME; + if (request.headers.hasOwnProperty(traceIdHeader)) { + return next(args); + } + const functionName = process.env[ENV_LAMBDA_FUNCTION_NAME]; + const traceId = process.env[ENV_TRACE_ID]; + const nonEmptyString = /* @__PURE__ */ __name((str) => typeof str === "string" && str.length > 0, "nonEmptyString"); + if (nonEmptyString(functionName) && nonEmptyString(traceId)) { + request.headers[TRACE_ID_HEADER_NAME] = traceId; + } + return next({ + ...args, + request + }); +}, "recursionDetectionMiddleware"); +var addRecursionDetectionMiddlewareOptions = { + step: "build", + tags: ["RECURSION_DETECTION"], + name: "recursionDetectionMiddleware", + override: true, + priority: "low" +}; +var getRecursionDetectionPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(recursionDetectionMiddleware(options), addRecursionDetectionMiddlewareOptions); + }, "applyToStack") +}), "getRecursionDetectionPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + recursionDetectionMiddleware, + addRecursionDetectionMiddlewareOptions, + getRecursionDetectionPlugin +}); + diff --git a/node_modules/@aws-sdk/middleware-recursion-detection/dist-es/index.js b/node_modules/@aws-sdk/middleware-recursion-detection/dist-es/index.js new file mode 100644 index 00000000..8ac47480 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-recursion-detection/dist-es/index.js @@ -0,0 +1,37 @@ +import { HttpRequest } from "@smithy/protocol-http"; +const TRACE_ID_HEADER_NAME = "X-Amzn-Trace-Id"; +const ENV_LAMBDA_FUNCTION_NAME = "AWS_LAMBDA_FUNCTION_NAME"; +const ENV_TRACE_ID = "_X_AMZN_TRACE_ID"; +export const recursionDetectionMiddleware = (options) => (next) => async (args) => { + const { request } = args; + if (!HttpRequest.isInstance(request) || options.runtime !== "node") { + return next(args); + } + const traceIdHeader = Object.keys(request.headers ?? {}).find((h) => h.toLowerCase() === TRACE_ID_HEADER_NAME.toLowerCase()) ?? + TRACE_ID_HEADER_NAME; + if (request.headers.hasOwnProperty(traceIdHeader)) { + return next(args); + } + const functionName = process.env[ENV_LAMBDA_FUNCTION_NAME]; + const traceId = process.env[ENV_TRACE_ID]; + const nonEmptyString = (str) => typeof str === "string" && str.length > 0; + if (nonEmptyString(functionName) && nonEmptyString(traceId)) { + request.headers[TRACE_ID_HEADER_NAME] = traceId; + } + return next({ + ...args, + request, + }); +}; +export const addRecursionDetectionMiddlewareOptions = { + step: "build", + tags: ["RECURSION_DETECTION"], + name: "recursionDetectionMiddleware", + override: true, + priority: "low", +}; +export const getRecursionDetectionPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(recursionDetectionMiddleware(options), addRecursionDetectionMiddlewareOptions); + }, +}); diff --git a/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/index.d.ts b/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/index.d.ts new file mode 100644 index 00000000..9f929840 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/index.d.ts @@ -0,0 +1,18 @@ +import { AbsoluteLocation, BuildHandlerOptions, BuildMiddleware, Pluggable } from "@smithy/types"; +interface PreviouslyResolved { + runtime: string; +} +/** + * Inject to trace ID to request header to detect recursion invocation in Lambda. + * @internal + */ +export declare const recursionDetectionMiddleware: (options: PreviouslyResolved) => BuildMiddleware; +/** + * @internal + */ +export declare const addRecursionDetectionMiddlewareOptions: BuildHandlerOptions & AbsoluteLocation; +/** + * @internal + */ +export declare const getRecursionDetectionPlugin: (options: PreviouslyResolved) => Pluggable; +export {}; diff --git a/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..8d1658be --- /dev/null +++ b/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/ts3.4/index.d.ts @@ -0,0 +1,18 @@ +import { + AbsoluteLocation, + BuildHandlerOptions, + BuildMiddleware, + Pluggable, +} from "@smithy/types"; +interface PreviouslyResolved { + runtime: string; +} +export declare const recursionDetectionMiddleware: ( + options: PreviouslyResolved +) => BuildMiddleware; +export declare const addRecursionDetectionMiddlewareOptions: BuildHandlerOptions & + AbsoluteLocation; +export declare const getRecursionDetectionPlugin: ( + options: PreviouslyResolved +) => Pluggable; +export {}; diff --git a/node_modules/@aws-sdk/middleware-recursion-detection/package.json b/node_modules/@aws-sdk/middleware-recursion-detection/package.json new file mode 100644 index 00000000..7c831f90 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-recursion-detection/package.json @@ -0,0 +1,57 @@ +{ + "name": "@aws-sdk/middleware-recursion-detection", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-recursion-detection", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-recursion-detection", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-recursion-detection" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + } +} diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/LICENSE b/node_modules/@aws-sdk/middleware-sdk-s3/LICENSE new file mode 100644 index 00000000..e907b586 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/README.md b/node_modules/@aws-sdk/middleware-sdk-s3/README.md new file mode 100644 index 00000000..81639449 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-sdk-s3 + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-sdk-s3/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-sdk-s3) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-sdk-s3.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-sdk-s3) diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-cjs/index.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-cjs/index.js new file mode 100644 index 00000000..5debf5fa --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-cjs/index.js @@ -0,0 +1,696 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_OPTIONS: () => NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_OPTIONS, + S3ExpressIdentityCache: () => S3ExpressIdentityCache, + S3ExpressIdentityCacheEntry: () => S3ExpressIdentityCacheEntry, + S3ExpressIdentityProviderImpl: () => S3ExpressIdentityProviderImpl, + SignatureV4S3Express: () => SignatureV4S3Express, + checkContentLengthHeader: () => checkContentLengthHeader, + checkContentLengthHeaderMiddlewareOptions: () => checkContentLengthHeaderMiddlewareOptions, + getCheckContentLengthHeaderPlugin: () => getCheckContentLengthHeaderPlugin, + getRegionRedirectMiddlewarePlugin: () => getRegionRedirectMiddlewarePlugin, + getS3ExpiresMiddlewarePlugin: () => getS3ExpiresMiddlewarePlugin, + getS3ExpressHttpSigningPlugin: () => getS3ExpressHttpSigningPlugin, + getS3ExpressPlugin: () => getS3ExpressPlugin, + getThrow200ExceptionsPlugin: () => getThrow200ExceptionsPlugin, + getValidateBucketNamePlugin: () => getValidateBucketNamePlugin, + regionRedirectEndpointMiddleware: () => regionRedirectEndpointMiddleware, + regionRedirectEndpointMiddlewareOptions: () => regionRedirectEndpointMiddlewareOptions, + regionRedirectMiddleware: () => regionRedirectMiddleware, + regionRedirectMiddlewareOptions: () => regionRedirectMiddlewareOptions, + resolveS3Config: () => resolveS3Config, + s3ExpiresMiddleware: () => s3ExpiresMiddleware, + s3ExpiresMiddlewareOptions: () => s3ExpiresMiddlewareOptions, + s3ExpressHttpSigningMiddleware: () => s3ExpressHttpSigningMiddleware, + s3ExpressHttpSigningMiddlewareOptions: () => s3ExpressHttpSigningMiddlewareOptions, + s3ExpressMiddleware: () => s3ExpressMiddleware, + s3ExpressMiddlewareOptions: () => s3ExpressMiddlewareOptions, + throw200ExceptionsMiddleware: () => throw200ExceptionsMiddleware, + throw200ExceptionsMiddlewareOptions: () => throw200ExceptionsMiddlewareOptions, + validateBucketNameMiddleware: () => validateBucketNameMiddleware, + validateBucketNameMiddlewareOptions: () => validateBucketNameMiddlewareOptions +}); +module.exports = __toCommonJS(index_exports); + +// src/check-content-length-header.ts +var import_protocol_http = require("@smithy/protocol-http"); +var import_smithy_client = require("@smithy/smithy-client"); +var CONTENT_LENGTH_HEADER = "content-length"; +var DECODED_CONTENT_LENGTH_HEADER = "x-amz-decoded-content-length"; +function checkContentLengthHeader() { + return (next, context) => async (args) => { + const { request } = args; + if (import_protocol_http.HttpRequest.isInstance(request)) { + if (!(CONTENT_LENGTH_HEADER in request.headers) && !(DECODED_CONTENT_LENGTH_HEADER in request.headers)) { + const message = `Are you using a Stream of unknown length as the Body of a PutObject request? Consider using Upload instead from @aws-sdk/lib-storage.`; + if (typeof context?.logger?.warn === "function" && !(context.logger instanceof import_smithy_client.NoOpLogger)) { + context.logger.warn(message); + } else { + console.warn(message); + } + } + } + return next({ ...args }); + }; +} +__name(checkContentLengthHeader, "checkContentLengthHeader"); +var checkContentLengthHeaderMiddlewareOptions = { + step: "finalizeRequest", + tags: ["CHECK_CONTENT_LENGTH_HEADER"], + name: "getCheckContentLengthHeaderPlugin", + override: true +}; +var getCheckContentLengthHeaderPlugin = /* @__PURE__ */ __name((unused) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(checkContentLengthHeader(), checkContentLengthHeaderMiddlewareOptions); + }, "applyToStack") +}), "getCheckContentLengthHeaderPlugin"); + +// src/region-redirect-endpoint-middleware.ts +var regionRedirectEndpointMiddleware = /* @__PURE__ */ __name((config) => { + return (next, context) => async (args) => { + const originalRegion = await config.region(); + const regionProviderRef = config.region; + let unlock = /* @__PURE__ */ __name(() => { + }, "unlock"); + if (context.__s3RegionRedirect) { + Object.defineProperty(config, "region", { + writable: false, + value: /* @__PURE__ */ __name(async () => { + return context.__s3RegionRedirect; + }, "value") + }); + unlock = /* @__PURE__ */ __name(() => Object.defineProperty(config, "region", { + writable: true, + value: regionProviderRef + }), "unlock"); + } + try { + const result = await next(args); + if (context.__s3RegionRedirect) { + unlock(); + const region = await config.region(); + if (originalRegion !== region) { + throw new Error("Region was not restored following S3 region redirect."); + } + } + return result; + } catch (e) { + unlock(); + throw e; + } + }; +}, "regionRedirectEndpointMiddleware"); +var regionRedirectEndpointMiddlewareOptions = { + tags: ["REGION_REDIRECT", "S3"], + name: "regionRedirectEndpointMiddleware", + override: true, + relation: "before", + toMiddleware: "endpointV2Middleware" +}; + +// src/region-redirect-middleware.ts +function regionRedirectMiddleware(clientConfig) { + return (next, context) => async (args) => { + try { + return await next(args); + } catch (err) { + if (clientConfig.followRegionRedirects) { + if (err?.$metadata?.httpStatusCode === 301 || // err.name === "PermanentRedirect" && --> removing the error name check, as that allows for HEAD operations (which have the 301 status code, but not the same error name) to be covered for region redirection as well + err?.$metadata?.httpStatusCode === 400 && err?.name === "IllegalLocationConstraintException") { + try { + const actualRegion = err.$response.headers["x-amz-bucket-region"]; + context.logger?.debug(`Redirecting from ${await clientConfig.region()} to ${actualRegion}`); + context.__s3RegionRedirect = actualRegion; + } catch (e) { + throw new Error("Region redirect failed: " + e); + } + return next(args); + } + } + throw err; + } + }; +} +__name(regionRedirectMiddleware, "regionRedirectMiddleware"); +var regionRedirectMiddlewareOptions = { + step: "initialize", + tags: ["REGION_REDIRECT", "S3"], + name: "regionRedirectMiddleware", + override: true +}; +var getRegionRedirectMiddlewarePlugin = /* @__PURE__ */ __name((clientConfig) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(regionRedirectMiddleware(clientConfig), regionRedirectMiddlewareOptions); + clientStack.addRelativeTo(regionRedirectEndpointMiddleware(clientConfig), regionRedirectEndpointMiddlewareOptions); + }, "applyToStack") +}), "getRegionRedirectMiddlewarePlugin"); + +// src/s3-expires-middleware.ts + + +var s3ExpiresMiddleware = /* @__PURE__ */ __name((config) => { + return (next, context) => async (args) => { + const result = await next(args); + const { response } = result; + if (import_protocol_http.HttpResponse.isInstance(response)) { + if (response.headers.expires) { + response.headers.expiresstring = response.headers.expires; + try { + (0, import_smithy_client.parseRfc7231DateTime)(response.headers.expires); + } catch (e) { + context.logger?.warn( + `AWS SDK Warning for ${context.clientName}::${context.commandName} response parsing (${response.headers.expires}): ${e}` + ); + delete response.headers.expires; + } + } + } + return result; + }; +}, "s3ExpiresMiddleware"); +var s3ExpiresMiddlewareOptions = { + tags: ["S3"], + name: "s3ExpiresMiddleware", + override: true, + relation: "after", + toMiddleware: "deserializerMiddleware" +}; +var getS3ExpiresMiddlewarePlugin = /* @__PURE__ */ __name((clientConfig) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.addRelativeTo(s3ExpiresMiddleware(clientConfig), s3ExpiresMiddlewareOptions); + }, "applyToStack") +}), "getS3ExpiresMiddlewarePlugin"); + +// src/s3-express/classes/S3ExpressIdentityCache.ts +var S3ExpressIdentityCache = class _S3ExpressIdentityCache { + constructor(data = {}) { + this.data = data; + } + static { + __name(this, "S3ExpressIdentityCache"); + } + lastPurgeTime = Date.now(); + static EXPIRED_CREDENTIAL_PURGE_INTERVAL_MS = 3e4; + get(key) { + const entry = this.data[key]; + if (!entry) { + return; + } + return entry; + } + set(key, entry) { + this.data[key] = entry; + return entry; + } + delete(key) { + delete this.data[key]; + } + async purgeExpired() { + const now = Date.now(); + if (this.lastPurgeTime + _S3ExpressIdentityCache.EXPIRED_CREDENTIAL_PURGE_INTERVAL_MS > now) { + return; + } + for (const key in this.data) { + const entry = this.data[key]; + if (!entry.isRefreshing) { + const credential = await entry.identity; + if (credential.expiration) { + if (credential.expiration.getTime() < now) { + delete this.data[key]; + } + } + } + } + } +}; + +// src/s3-express/classes/S3ExpressIdentityCacheEntry.ts +var S3ExpressIdentityCacheEntry = class { + /** + * @param identity - stored identity. + * @param accessed - timestamp of last access in epoch ms. + * @param isRefreshing - this key is currently in the process of being refreshed (background). + */ + constructor(_identity, isRefreshing = false, accessed = Date.now()) { + this._identity = _identity; + this.isRefreshing = isRefreshing; + this.accessed = accessed; + } + static { + __name(this, "S3ExpressIdentityCacheEntry"); + } + get identity() { + this.accessed = Date.now(); + return this._identity; + } +}; + +// src/s3-express/classes/S3ExpressIdentityProviderImpl.ts +var S3ExpressIdentityProviderImpl = class _S3ExpressIdentityProviderImpl { + constructor(createSessionFn, cache = new S3ExpressIdentityCache()) { + this.createSessionFn = createSessionFn; + this.cache = cache; + } + static { + __name(this, "S3ExpressIdentityProviderImpl"); + } + static REFRESH_WINDOW_MS = 6e4; + async getS3ExpressIdentity(awsIdentity, identityProperties) { + const key = identityProperties.Bucket; + const { cache } = this; + const entry = cache.get(key); + if (entry) { + return entry.identity.then((identity) => { + const isExpired = (identity.expiration?.getTime() ?? 0) < Date.now(); + if (isExpired) { + return cache.set(key, new S3ExpressIdentityCacheEntry(this.getIdentity(key))).identity; + } + const isExpiringSoon = (identity.expiration?.getTime() ?? 0) < Date.now() + _S3ExpressIdentityProviderImpl.REFRESH_WINDOW_MS; + if (isExpiringSoon && !entry.isRefreshing) { + entry.isRefreshing = true; + this.getIdentity(key).then((id) => { + cache.set(key, new S3ExpressIdentityCacheEntry(Promise.resolve(id))); + }); + } + return identity; + }); + } + return cache.set(key, new S3ExpressIdentityCacheEntry(this.getIdentity(key))).identity; + } + async getIdentity(key) { + await this.cache.purgeExpired().catch((error) => { + console.warn("Error while clearing expired entries in S3ExpressIdentityCache: \n" + error); + }); + const session = await this.createSessionFn(key); + if (!session.Credentials?.AccessKeyId || !session.Credentials?.SecretAccessKey) { + throw new Error("s3#createSession response credential missing AccessKeyId or SecretAccessKey."); + } + const identity = { + accessKeyId: session.Credentials.AccessKeyId, + secretAccessKey: session.Credentials.SecretAccessKey, + sessionToken: session.Credentials.SessionToken, + expiration: session.Credentials.Expiration ? new Date(session.Credentials.Expiration) : void 0 + }; + return identity; + } +}; + +// src/s3-express/classes/SignatureV4S3Express.ts +var import_signature_v4 = require("@smithy/signature-v4"); + +// src/s3-express/constants.ts +var import_util_config_provider = require("@smithy/util-config-provider"); +var S3_EXPRESS_BUCKET_TYPE = "Directory"; +var S3_EXPRESS_BACKEND = "S3Express"; +var S3_EXPRESS_AUTH_SCHEME = "sigv4-s3express"; +var SESSION_TOKEN_QUERY_PARAM = "X-Amz-S3session-Token"; +var SESSION_TOKEN_HEADER = SESSION_TOKEN_QUERY_PARAM.toLowerCase(); +var NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_ENV_NAME = "AWS_S3_DISABLE_EXPRESS_SESSION_AUTH"; +var NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_INI_NAME = "s3_disable_express_session_auth"; +var NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env) => (0, import_util_config_provider.booleanSelector)(env, NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_ENV_NAME, import_util_config_provider.SelectorType.ENV), "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => (0, import_util_config_provider.booleanSelector)(profile, NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_INI_NAME, import_util_config_provider.SelectorType.CONFIG), "configFileSelector"), + default: false +}; + +// src/s3-express/classes/SignatureV4S3Express.ts +var SignatureV4S3Express = class extends import_signature_v4.SignatureV4 { + static { + __name(this, "SignatureV4S3Express"); + } + /** + * Signs with alternate provided credentials instead of those provided in the + * constructor. + * + * Additionally omits the credential sessionToken and assigns it to the + * alternate header field for S3 Express. + */ + async signWithCredentials(requestToSign, credentials, options) { + const credentialsWithoutSessionToken = getCredentialsWithoutSessionToken(credentials); + requestToSign.headers[SESSION_TOKEN_HEADER] = credentials.sessionToken; + const privateAccess = this; + setSingleOverride(privateAccess, credentialsWithoutSessionToken); + return privateAccess.signRequest(requestToSign, options ?? {}); + } + /** + * Similar to {@link SignatureV4S3Express#signWithCredentials} but for presigning. + */ + async presignWithCredentials(requestToSign, credentials, options) { + const credentialsWithoutSessionToken = getCredentialsWithoutSessionToken(credentials); + delete requestToSign.headers[SESSION_TOKEN_HEADER]; + requestToSign.headers[SESSION_TOKEN_QUERY_PARAM] = credentials.sessionToken; + requestToSign.query = requestToSign.query ?? {}; + requestToSign.query[SESSION_TOKEN_QUERY_PARAM] = credentials.sessionToken; + const privateAccess = this; + setSingleOverride(privateAccess, credentialsWithoutSessionToken); + return this.presign(requestToSign, options); + } +}; +function getCredentialsWithoutSessionToken(credentials) { + const credentialsWithoutSessionToken = { + accessKeyId: credentials.accessKeyId, + secretAccessKey: credentials.secretAccessKey, + expiration: credentials.expiration + }; + return credentialsWithoutSessionToken; +} +__name(getCredentialsWithoutSessionToken, "getCredentialsWithoutSessionToken"); +function setSingleOverride(privateAccess, credentialsWithoutSessionToken) { + const id = setTimeout(() => { + throw new Error("SignatureV4S3Express credential override was created but not called."); + }, 10); + const currentCredentialProvider = privateAccess.credentialProvider; + const overrideCredentialsProviderOnce = /* @__PURE__ */ __name(() => { + clearTimeout(id); + privateAccess.credentialProvider = currentCredentialProvider; + return Promise.resolve(credentialsWithoutSessionToken); + }, "overrideCredentialsProviderOnce"); + privateAccess.credentialProvider = overrideCredentialsProviderOnce; +} +__name(setSingleOverride, "setSingleOverride"); + +// src/s3-express/functions/s3ExpressMiddleware.ts +var import_core = require("@aws-sdk/core"); + +var s3ExpressMiddleware = /* @__PURE__ */ __name((options) => { + return (next, context) => async (args) => { + if (context.endpointV2) { + const endpoint = context.endpointV2; + const isS3ExpressAuth = endpoint.properties?.authSchemes?.[0]?.name === S3_EXPRESS_AUTH_SCHEME; + const isS3ExpressBucket = endpoint.properties?.backend === S3_EXPRESS_BACKEND || endpoint.properties?.bucketType === S3_EXPRESS_BUCKET_TYPE; + if (isS3ExpressBucket) { + (0, import_core.setFeature)(context, "S3_EXPRESS_BUCKET", "J"); + context.isS3ExpressBucket = true; + } + if (isS3ExpressAuth) { + const requestBucket = args.input.Bucket; + if (requestBucket) { + const s3ExpressIdentity = await options.s3ExpressIdentityProvider.getS3ExpressIdentity( + await options.credentials(), + { + Bucket: requestBucket + } + ); + context.s3ExpressIdentity = s3ExpressIdentity; + if (import_protocol_http.HttpRequest.isInstance(args.request) && s3ExpressIdentity.sessionToken) { + args.request.headers[SESSION_TOKEN_HEADER] = s3ExpressIdentity.sessionToken; + } + } + } + } + return next(args); + }; +}, "s3ExpressMiddleware"); +var s3ExpressMiddlewareOptions = { + name: "s3ExpressMiddleware", + step: "build", + tags: ["S3", "S3_EXPRESS"], + override: true +}; +var getS3ExpressPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(s3ExpressMiddleware(options), s3ExpressMiddlewareOptions); + }, "applyToStack") +}), "getS3ExpressPlugin"); + +// src/s3-express/functions/s3ExpressHttpSigningMiddleware.ts +var import_core2 = require("@smithy/core"); + +var import_util_middleware = require("@smithy/util-middleware"); + +// src/s3-express/functions/signS3Express.ts +var signS3Express = /* @__PURE__ */ __name(async (s3ExpressIdentity, signingOptions, request, sigV4MultiRegionSigner) => { + const signedRequest = await sigV4MultiRegionSigner.signWithCredentials(request, s3ExpressIdentity, {}); + if (signedRequest.headers["X-Amz-Security-Token"] || signedRequest.headers["x-amz-security-token"]) { + throw new Error("X-Amz-Security-Token must not be set for s3-express requests."); + } + return signedRequest; +}, "signS3Express"); + +// src/s3-express/functions/s3ExpressHttpSigningMiddleware.ts +var defaultErrorHandler = /* @__PURE__ */ __name((signingProperties) => (error) => { + throw error; +}, "defaultErrorHandler"); +var defaultSuccessHandler = /* @__PURE__ */ __name((httpResponse, signingProperties) => { +}, "defaultSuccessHandler"); +var s3ExpressHttpSigningMiddlewareOptions = import_core2.httpSigningMiddlewareOptions; +var s3ExpressHttpSigningMiddleware = /* @__PURE__ */ __name((config) => (next, context) => async (args) => { + if (!import_protocol_http.HttpRequest.isInstance(args.request)) { + return next(args); + } + const smithyContext = (0, import_util_middleware.getSmithyContext)(context); + const scheme = smithyContext.selectedHttpAuthScheme; + if (!scheme) { + throw new Error(`No HttpAuthScheme was selected: unable to sign request`); + } + const { + httpAuthOption: { signingProperties = {} }, + identity, + signer + } = scheme; + let request; + if (context.s3ExpressIdentity) { + request = await signS3Express( + context.s3ExpressIdentity, + signingProperties, + args.request, + await config.signer() + ); + } else { + request = await signer.sign(args.request, identity, signingProperties); + } + const output = await next({ + ...args, + request + }).catch((signer.errorHandler || defaultErrorHandler)(signingProperties)); + (signer.successHandler || defaultSuccessHandler)(output.response, signingProperties); + return output; +}, "s3ExpressHttpSigningMiddleware"); +var getS3ExpressHttpSigningPlugin = /* @__PURE__ */ __name((config) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.addRelativeTo( + s3ExpressHttpSigningMiddleware(config), + import_core2.httpSigningMiddlewareOptions + ); + }, "applyToStack") +}), "getS3ExpressHttpSigningPlugin"); + +// src/s3Configuration.ts +var resolveS3Config = /* @__PURE__ */ __name((input, { + session +}) => { + const [s3ClientProvider, CreateSessionCommandCtor] = session; + const { + forcePathStyle, + useAccelerateEndpoint, + disableMultiregionAccessPoints, + followRegionRedirects, + s3ExpressIdentityProvider, + bucketEndpoint + } = input; + return Object.assign(input, { + forcePathStyle: forcePathStyle ?? false, + useAccelerateEndpoint: useAccelerateEndpoint ?? false, + disableMultiregionAccessPoints: disableMultiregionAccessPoints ?? false, + followRegionRedirects: followRegionRedirects ?? false, + s3ExpressIdentityProvider: s3ExpressIdentityProvider ?? new S3ExpressIdentityProviderImpl( + async (key) => s3ClientProvider().send( + new CreateSessionCommandCtor({ + Bucket: key + }) + ) + ), + bucketEndpoint: bucketEndpoint ?? false + }); +}, "resolveS3Config"); + +// src/throw-200-exceptions.ts + +var import_util_stream = require("@smithy/util-stream"); +var THROW_IF_EMPTY_BODY = { + CopyObjectCommand: true, + UploadPartCopyCommand: true, + CompleteMultipartUploadCommand: true +}; +var MAX_BYTES_TO_INSPECT = 3e3; +var throw200ExceptionsMiddleware = /* @__PURE__ */ __name((config) => (next, context) => async (args) => { + const result = await next(args); + const { response } = result; + if (!import_protocol_http.HttpResponse.isInstance(response)) { + return result; + } + const { statusCode, body: sourceBody } = response; + if (statusCode < 200 || statusCode >= 300) { + return result; + } + const isSplittableStream = typeof sourceBody?.stream === "function" || typeof sourceBody?.pipe === "function" || typeof sourceBody?.tee === "function"; + if (!isSplittableStream) { + return result; + } + let bodyCopy = sourceBody; + let body = sourceBody; + if (sourceBody && typeof sourceBody === "object" && !(sourceBody instanceof Uint8Array)) { + [bodyCopy, body] = await (0, import_util_stream.splitStream)(sourceBody); + } + response.body = body; + const bodyBytes = await collectBody(bodyCopy, { + streamCollector: /* @__PURE__ */ __name(async (stream) => { + return (0, import_util_stream.headStream)(stream, MAX_BYTES_TO_INSPECT); + }, "streamCollector") + }); + if (typeof bodyCopy?.destroy === "function") { + bodyCopy.destroy(); + } + const bodyStringTail = config.utf8Encoder(bodyBytes.subarray(bodyBytes.length - 16)); + if (bodyBytes.length === 0 && THROW_IF_EMPTY_BODY[context.commandName]) { + const err = new Error("S3 aborted request"); + err.name = "InternalError"; + throw err; + } + if (bodyStringTail && bodyStringTail.endsWith("")) { + response.statusCode = 400; + } + return result; +}, "throw200ExceptionsMiddleware"); +var collectBody = /* @__PURE__ */ __name((streamBody = new Uint8Array(), context) => { + if (streamBody instanceof Uint8Array) { + return Promise.resolve(streamBody); + } + return context.streamCollector(streamBody) || Promise.resolve(new Uint8Array()); +}, "collectBody"); +var throw200ExceptionsMiddlewareOptions = { + relation: "after", + toMiddleware: "deserializerMiddleware", + tags: ["THROW_200_EXCEPTIONS", "S3"], + name: "throw200ExceptionsMiddleware", + override: true +}; +var getThrow200ExceptionsPlugin = /* @__PURE__ */ __name((config) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.addRelativeTo(throw200ExceptionsMiddleware(config), throw200ExceptionsMiddlewareOptions); + }, "applyToStack") +}), "getThrow200ExceptionsPlugin"); + +// src/validate-bucket-name.ts +var import_util_arn_parser = require("@aws-sdk/util-arn-parser"); + +// src/bucket-endpoint-middleware.ts +function bucketEndpointMiddleware(options) { + return (next, context) => async (args) => { + if (options.bucketEndpoint) { + const endpoint = context.endpointV2; + if (endpoint) { + const bucket = args.input.Bucket; + if (typeof bucket === "string") { + try { + const bucketEndpointUrl = new URL(bucket); + context.endpointV2 = { + ...endpoint, + url: bucketEndpointUrl + }; + } catch (e) { + const warning = `@aws-sdk/middleware-sdk-s3: bucketEndpoint=true was set but Bucket=${bucket} could not be parsed as URL.`; + if (context.logger?.constructor?.name === "NoOpLogger") { + console.warn(warning); + } else { + context.logger?.warn?.(warning); + } + throw e; + } + } + } + } + return next(args); + }; +} +__name(bucketEndpointMiddleware, "bucketEndpointMiddleware"); +var bucketEndpointMiddlewareOptions = { + name: "bucketEndpointMiddleware", + override: true, + relation: "after", + toMiddleware: "endpointV2Middleware" +}; + +// src/validate-bucket-name.ts +function validateBucketNameMiddleware({ bucketEndpoint }) { + return (next) => async (args) => { + const { + input: { Bucket } + } = args; + if (!bucketEndpoint && typeof Bucket === "string" && !(0, import_util_arn_parser.validate)(Bucket) && Bucket.indexOf("/") >= 0) { + const err = new Error(`Bucket name shouldn't contain '/', received '${Bucket}'`); + err.name = "InvalidBucketName"; + throw err; + } + return next({ ...args }); + }; +} +__name(validateBucketNameMiddleware, "validateBucketNameMiddleware"); +var validateBucketNameMiddlewareOptions = { + step: "initialize", + tags: ["VALIDATE_BUCKET_NAME"], + name: "validateBucketNameMiddleware", + override: true +}; +var getValidateBucketNamePlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(validateBucketNameMiddleware(options), validateBucketNameMiddlewareOptions); + clientStack.addRelativeTo(bucketEndpointMiddleware(options), bucketEndpointMiddlewareOptions); + }, "applyToStack") +}), "getValidateBucketNamePlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + checkContentLengthHeader, + checkContentLengthHeaderMiddlewareOptions, + getCheckContentLengthHeaderPlugin, + regionRedirectEndpointMiddleware, + regionRedirectEndpointMiddlewareOptions, + regionRedirectMiddleware, + regionRedirectMiddlewareOptions, + getRegionRedirectMiddlewarePlugin, + s3ExpiresMiddleware, + s3ExpiresMiddlewareOptions, + getS3ExpiresMiddlewarePlugin, + S3ExpressIdentityCache, + S3ExpressIdentityCacheEntry, + S3ExpressIdentityProviderImpl, + SignatureV4S3Express, + NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_OPTIONS, + getS3ExpressPlugin, + s3ExpressMiddleware, + s3ExpressMiddlewareOptions, + getS3ExpressHttpSigningPlugin, + s3ExpressHttpSigningMiddleware, + s3ExpressHttpSigningMiddlewareOptions, + resolveS3Config, + throw200ExceptionsMiddleware, + throw200ExceptionsMiddlewareOptions, + getThrow200ExceptionsPlugin, + validateBucketNameMiddleware, + validateBucketNameMiddlewareOptions, + getValidateBucketNamePlugin +}); + diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/bucket-endpoint-middleware.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/bucket-endpoint-middleware.js new file mode 100644 index 00000000..1902fafe --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/bucket-endpoint-middleware.js @@ -0,0 +1,36 @@ +export function bucketEndpointMiddleware(options) { + return (next, context) => async (args) => { + if (options.bucketEndpoint) { + const endpoint = context.endpointV2; + if (endpoint) { + const bucket = args.input.Bucket; + if (typeof bucket === "string") { + try { + const bucketEndpointUrl = new URL(bucket); + context.endpointV2 = { + ...endpoint, + url: bucketEndpointUrl, + }; + } + catch (e) { + const warning = `@aws-sdk/middleware-sdk-s3: bucketEndpoint=true was set but Bucket=${bucket} could not be parsed as URL.`; + if (context.logger?.constructor?.name === "NoOpLogger") { + console.warn(warning); + } + else { + context.logger?.warn?.(warning); + } + throw e; + } + } + } + } + return next(args); + }; +} +export const bucketEndpointMiddlewareOptions = { + name: "bucketEndpointMiddleware", + override: true, + relation: "after", + toMiddleware: "endpointV2Middleware", +}; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/check-content-length-header.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/check-content-length-header.js new file mode 100644 index 00000000..87498fe0 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/check-content-length-header.js @@ -0,0 +1,32 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { NoOpLogger } from "@smithy/smithy-client"; +const CONTENT_LENGTH_HEADER = "content-length"; +const DECODED_CONTENT_LENGTH_HEADER = "x-amz-decoded-content-length"; +export function checkContentLengthHeader() { + return (next, context) => async (args) => { + const { request } = args; + if (HttpRequest.isInstance(request)) { + if (!(CONTENT_LENGTH_HEADER in request.headers) && !(DECODED_CONTENT_LENGTH_HEADER in request.headers)) { + const message = `Are you using a Stream of unknown length as the Body of a PutObject request? Consider using Upload instead from @aws-sdk/lib-storage.`; + if (typeof context?.logger?.warn === "function" && !(context.logger instanceof NoOpLogger)) { + context.logger.warn(message); + } + else { + console.warn(message); + } + } + } + return next({ ...args }); + }; +} +export const checkContentLengthHeaderMiddlewareOptions = { + step: "finalizeRequest", + tags: ["CHECK_CONTENT_LENGTH_HEADER"], + name: "getCheckContentLengthHeaderPlugin", + override: true, +}; +export const getCheckContentLengthHeaderPlugin = (unused) => ({ + applyToStack: (clientStack) => { + clientStack.add(checkContentLengthHeader(), checkContentLengthHeaderMiddlewareOptions); + }, +}); diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/index.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/index.js new file mode 100644 index 00000000..34f756c0 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/index.js @@ -0,0 +1,8 @@ +export * from "./check-content-length-header"; +export * from "./region-redirect-endpoint-middleware"; +export * from "./region-redirect-middleware"; +export * from "./s3-expires-middleware"; +export * from "./s3-express/index"; +export * from "./s3Configuration"; +export * from "./throw-200-exceptions"; +export * from "./validate-bucket-name"; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/region-redirect-endpoint-middleware.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/region-redirect-endpoint-middleware.js new file mode 100644 index 00000000..cd727616 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/region-redirect-endpoint-middleware.js @@ -0,0 +1,41 @@ +export const regionRedirectEndpointMiddleware = (config) => { + return (next, context) => async (args) => { + const originalRegion = await config.region(); + const regionProviderRef = config.region; + let unlock = () => { }; + if (context.__s3RegionRedirect) { + Object.defineProperty(config, "region", { + writable: false, + value: async () => { + return context.__s3RegionRedirect; + }, + }); + unlock = () => Object.defineProperty(config, "region", { + writable: true, + value: regionProviderRef, + }); + } + try { + const result = await next(args); + if (context.__s3RegionRedirect) { + unlock(); + const region = await config.region(); + if (originalRegion !== region) { + throw new Error("Region was not restored following S3 region redirect."); + } + } + return result; + } + catch (e) { + unlock(); + throw e; + } + }; +}; +export const regionRedirectEndpointMiddlewareOptions = { + tags: ["REGION_REDIRECT", "S3"], + name: "regionRedirectEndpointMiddleware", + override: true, + relation: "before", + toMiddleware: "endpointV2Middleware", +}; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/region-redirect-middleware.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/region-redirect-middleware.js new file mode 100644 index 00000000..60427873 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/region-redirect-middleware.js @@ -0,0 +1,37 @@ +import { regionRedirectEndpointMiddleware, regionRedirectEndpointMiddlewareOptions, } from "./region-redirect-endpoint-middleware"; +export function regionRedirectMiddleware(clientConfig) { + return (next, context) => async (args) => { + try { + return await next(args); + } + catch (err) { + if (clientConfig.followRegionRedirects) { + if (err?.$metadata?.httpStatusCode === 301 || + (err?.$metadata?.httpStatusCode === 400 && err?.name === "IllegalLocationConstraintException")) { + try { + const actualRegion = err.$response.headers["x-amz-bucket-region"]; + context.logger?.debug(`Redirecting from ${await clientConfig.region()} to ${actualRegion}`); + context.__s3RegionRedirect = actualRegion; + } + catch (e) { + throw new Error("Region redirect failed: " + e); + } + return next(args); + } + } + throw err; + } + }; +} +export const regionRedirectMiddlewareOptions = { + step: "initialize", + tags: ["REGION_REDIRECT", "S3"], + name: "regionRedirectMiddleware", + override: true, +}; +export const getRegionRedirectMiddlewarePlugin = (clientConfig) => ({ + applyToStack: (clientStack) => { + clientStack.add(regionRedirectMiddleware(clientConfig), regionRedirectMiddlewareOptions); + clientStack.addRelativeTo(regionRedirectEndpointMiddleware(clientConfig), regionRedirectEndpointMiddlewareOptions); + }, +}); diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-expires-middleware.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-expires-middleware.js new file mode 100644 index 00000000..1256487a --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-expires-middleware.js @@ -0,0 +1,33 @@ +import { HttpResponse } from "@smithy/protocol-http"; +import { parseRfc7231DateTime } from "@smithy/smithy-client"; +export const s3ExpiresMiddleware = (config) => { + return (next, context) => async (args) => { + const result = await next(args); + const { response } = result; + if (HttpResponse.isInstance(response)) { + if (response.headers.expires) { + response.headers.expiresstring = response.headers.expires; + try { + parseRfc7231DateTime(response.headers.expires); + } + catch (e) { + context.logger?.warn(`AWS SDK Warning for ${context.clientName}::${context.commandName} response parsing (${response.headers.expires}): ${e}`); + delete response.headers.expires; + } + } + } + return result; + }; +}; +export const s3ExpiresMiddlewareOptions = { + tags: ["S3"], + name: "s3ExpiresMiddleware", + override: true, + relation: "after", + toMiddleware: "deserializerMiddleware", +}; +export const getS3ExpiresMiddlewarePlugin = (clientConfig) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(s3ExpiresMiddleware(clientConfig), s3ExpiresMiddlewareOptions); + }, +}); diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/classes/S3ExpressIdentityCache.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/classes/S3ExpressIdentityCache.js new file mode 100644 index 00000000..30724a1d --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/classes/S3ExpressIdentityCache.js @@ -0,0 +1,39 @@ +export class S3ExpressIdentityCache { + data; + lastPurgeTime = Date.now(); + static EXPIRED_CREDENTIAL_PURGE_INTERVAL_MS = 30000; + constructor(data = {}) { + this.data = data; + } + get(key) { + const entry = this.data[key]; + if (!entry) { + return; + } + return entry; + } + set(key, entry) { + this.data[key] = entry; + return entry; + } + delete(key) { + delete this.data[key]; + } + async purgeExpired() { + const now = Date.now(); + if (this.lastPurgeTime + S3ExpressIdentityCache.EXPIRED_CREDENTIAL_PURGE_INTERVAL_MS > now) { + return; + } + for (const key in this.data) { + const entry = this.data[key]; + if (!entry.isRefreshing) { + const credential = await entry.identity; + if (credential.expiration) { + if (credential.expiration.getTime() < now) { + delete this.data[key]; + } + } + } + } + } +} diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/classes/S3ExpressIdentityCacheEntry.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/classes/S3ExpressIdentityCacheEntry.js new file mode 100644 index 00000000..30b284e5 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/classes/S3ExpressIdentityCacheEntry.js @@ -0,0 +1,14 @@ +export class S3ExpressIdentityCacheEntry { + _identity; + isRefreshing; + accessed; + constructor(_identity, isRefreshing = false, accessed = Date.now()) { + this._identity = _identity; + this.isRefreshing = isRefreshing; + this.accessed = accessed; + } + get identity() { + this.accessed = Date.now(); + return this._identity; + } +} diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/classes/S3ExpressIdentityProviderImpl.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/classes/S3ExpressIdentityProviderImpl.js new file mode 100644 index 00000000..481c1891 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/classes/S3ExpressIdentityProviderImpl.js @@ -0,0 +1,49 @@ +import { S3ExpressIdentityCache } from "./S3ExpressIdentityCache"; +import { S3ExpressIdentityCacheEntry } from "./S3ExpressIdentityCacheEntry"; +export class S3ExpressIdentityProviderImpl { + createSessionFn; + cache; + static REFRESH_WINDOW_MS = 60000; + constructor(createSessionFn, cache = new S3ExpressIdentityCache()) { + this.createSessionFn = createSessionFn; + this.cache = cache; + } + async getS3ExpressIdentity(awsIdentity, identityProperties) { + const key = identityProperties.Bucket; + const { cache } = this; + const entry = cache.get(key); + if (entry) { + return entry.identity.then((identity) => { + const isExpired = (identity.expiration?.getTime() ?? 0) < Date.now(); + if (isExpired) { + return cache.set(key, new S3ExpressIdentityCacheEntry(this.getIdentity(key))).identity; + } + const isExpiringSoon = (identity.expiration?.getTime() ?? 0) < Date.now() + S3ExpressIdentityProviderImpl.REFRESH_WINDOW_MS; + if (isExpiringSoon && !entry.isRefreshing) { + entry.isRefreshing = true; + this.getIdentity(key).then((id) => { + cache.set(key, new S3ExpressIdentityCacheEntry(Promise.resolve(id))); + }); + } + return identity; + }); + } + return cache.set(key, new S3ExpressIdentityCacheEntry(this.getIdentity(key))).identity; + } + async getIdentity(key) { + await this.cache.purgeExpired().catch((error) => { + console.warn("Error while clearing expired entries in S3ExpressIdentityCache: \n" + error); + }); + const session = await this.createSessionFn(key); + if (!session.Credentials?.AccessKeyId || !session.Credentials?.SecretAccessKey) { + throw new Error("s3#createSession response credential missing AccessKeyId or SecretAccessKey."); + } + const identity = { + accessKeyId: session.Credentials.AccessKeyId, + secretAccessKey: session.Credentials.SecretAccessKey, + sessionToken: session.Credentials.SessionToken, + expiration: session.Credentials.Expiration ? new Date(session.Credentials.Expiration) : undefined, + }; + return identity; + } +} diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/classes/SignatureV4S3Express.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/classes/SignatureV4S3Express.js new file mode 100644 index 00000000..af66190b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/classes/SignatureV4S3Express.js @@ -0,0 +1,41 @@ +import { SignatureV4 } from "@smithy/signature-v4"; +import { SESSION_TOKEN_HEADER, SESSION_TOKEN_QUERY_PARAM } from "../constants"; +export class SignatureV4S3Express extends SignatureV4 { + async signWithCredentials(requestToSign, credentials, options) { + const credentialsWithoutSessionToken = getCredentialsWithoutSessionToken(credentials); + requestToSign.headers[SESSION_TOKEN_HEADER] = credentials.sessionToken; + const privateAccess = this; + setSingleOverride(privateAccess, credentialsWithoutSessionToken); + return privateAccess.signRequest(requestToSign, options ?? {}); + } + async presignWithCredentials(requestToSign, credentials, options) { + const credentialsWithoutSessionToken = getCredentialsWithoutSessionToken(credentials); + delete requestToSign.headers[SESSION_TOKEN_HEADER]; + requestToSign.headers[SESSION_TOKEN_QUERY_PARAM] = credentials.sessionToken; + requestToSign.query = requestToSign.query ?? {}; + requestToSign.query[SESSION_TOKEN_QUERY_PARAM] = credentials.sessionToken; + const privateAccess = this; + setSingleOverride(privateAccess, credentialsWithoutSessionToken); + return this.presign(requestToSign, options); + } +} +function getCredentialsWithoutSessionToken(credentials) { + const credentialsWithoutSessionToken = { + accessKeyId: credentials.accessKeyId, + secretAccessKey: credentials.secretAccessKey, + expiration: credentials.expiration, + }; + return credentialsWithoutSessionToken; +} +function setSingleOverride(privateAccess, credentialsWithoutSessionToken) { + const id = setTimeout(() => { + throw new Error("SignatureV4S3Express credential override was created but not called."); + }, 10); + const currentCredentialProvider = privateAccess.credentialProvider; + const overrideCredentialsProviderOnce = () => { + clearTimeout(id); + privateAccess.credentialProvider = currentCredentialProvider; + return Promise.resolve(credentialsWithoutSessionToken); + }; + privateAccess.credentialProvider = overrideCredentialsProviderOnce; +} diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/constants.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/constants.js new file mode 100644 index 00000000..8ede71e7 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/constants.js @@ -0,0 +1,13 @@ +import { booleanSelector, SelectorType } from "@smithy/util-config-provider"; +export const S3_EXPRESS_BUCKET_TYPE = "Directory"; +export const S3_EXPRESS_BACKEND = "S3Express"; +export const S3_EXPRESS_AUTH_SCHEME = "sigv4-s3express"; +export const SESSION_TOKEN_QUERY_PARAM = "X-Amz-S3session-Token"; +export const SESSION_TOKEN_HEADER = SESSION_TOKEN_QUERY_PARAM.toLowerCase(); +export const NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_ENV_NAME = "AWS_S3_DISABLE_EXPRESS_SESSION_AUTH"; +export const NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_INI_NAME = "s3_disable_express_session_auth"; +export const NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_OPTIONS = { + environmentVariableSelector: (env) => booleanSelector(env, NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_ENV_NAME, SelectorType.ENV), + configFileSelector: (profile) => booleanSelector(profile, NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_INI_NAME, SelectorType.CONFIG), + default: false, +}; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/functions/s3ExpressHttpSigningMiddleware.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/functions/s3ExpressHttpSigningMiddleware.js new file mode 100644 index 00000000..93255c58 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/functions/s3ExpressHttpSigningMiddleware.js @@ -0,0 +1,39 @@ +import { httpSigningMiddlewareOptions } from "@smithy/core"; +import { HttpRequest } from "@smithy/protocol-http"; +import { SMITHY_CONTEXT_KEY, } from "@smithy/types"; +import { getSmithyContext } from "@smithy/util-middleware"; +import { signS3Express } from "./signS3Express"; +const defaultErrorHandler = (signingProperties) => (error) => { + throw error; +}; +const defaultSuccessHandler = (httpResponse, signingProperties) => { }; +export const s3ExpressHttpSigningMiddlewareOptions = httpSigningMiddlewareOptions; +export const s3ExpressHttpSigningMiddleware = (config) => (next, context) => async (args) => { + if (!HttpRequest.isInstance(args.request)) { + return next(args); + } + const smithyContext = getSmithyContext(context); + const scheme = smithyContext.selectedHttpAuthScheme; + if (!scheme) { + throw new Error(`No HttpAuthScheme was selected: unable to sign request`); + } + const { httpAuthOption: { signingProperties = {} }, identity, signer, } = scheme; + let request; + if (context.s3ExpressIdentity) { + request = await signS3Express(context.s3ExpressIdentity, signingProperties, args.request, await config.signer()); + } + else { + request = await signer.sign(args.request, identity, signingProperties); + } + const output = await next({ + ...args, + request, + }).catch((signer.errorHandler || defaultErrorHandler)(signingProperties)); + (signer.successHandler || defaultSuccessHandler)(output.response, signingProperties); + return output; +}; +export const getS3ExpressHttpSigningPlugin = (config) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(s3ExpressHttpSigningMiddleware(config), httpSigningMiddlewareOptions); + }, +}); diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/functions/s3ExpressMiddleware.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/functions/s3ExpressMiddleware.js new file mode 100644 index 00000000..b64c77de --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/functions/s3ExpressMiddleware.js @@ -0,0 +1,41 @@ +import { setFeature } from "@aws-sdk/core"; +import { HttpRequest } from "@smithy/protocol-http"; +import { S3_EXPRESS_AUTH_SCHEME, S3_EXPRESS_BACKEND, S3_EXPRESS_BUCKET_TYPE, SESSION_TOKEN_HEADER } from "../constants"; +export const s3ExpressMiddleware = (options) => { + return (next, context) => async (args) => { + if (context.endpointV2) { + const endpoint = context.endpointV2; + const isS3ExpressAuth = endpoint.properties?.authSchemes?.[0]?.name === S3_EXPRESS_AUTH_SCHEME; + const isS3ExpressBucket = endpoint.properties?.backend === S3_EXPRESS_BACKEND || + endpoint.properties?.bucketType === S3_EXPRESS_BUCKET_TYPE; + if (isS3ExpressBucket) { + setFeature(context, "S3_EXPRESS_BUCKET", "J"); + context.isS3ExpressBucket = true; + } + if (isS3ExpressAuth) { + const requestBucket = args.input.Bucket; + if (requestBucket) { + const s3ExpressIdentity = await options.s3ExpressIdentityProvider.getS3ExpressIdentity(await options.credentials(), { + Bucket: requestBucket, + }); + context.s3ExpressIdentity = s3ExpressIdentity; + if (HttpRequest.isInstance(args.request) && s3ExpressIdentity.sessionToken) { + args.request.headers[SESSION_TOKEN_HEADER] = s3ExpressIdentity.sessionToken; + } + } + } + } + return next(args); + }; +}; +export const s3ExpressMiddlewareOptions = { + name: "s3ExpressMiddleware", + step: "build", + tags: ["S3", "S3_EXPRESS"], + override: true, +}; +export const getS3ExpressPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(s3ExpressMiddleware(options), s3ExpressMiddlewareOptions); + }, +}); diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/functions/signS3Express.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/functions/signS3Express.js new file mode 100644 index 00000000..b12c9ec1 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/functions/signS3Express.js @@ -0,0 +1,7 @@ +export const signS3Express = async (s3ExpressIdentity, signingOptions, request, sigV4MultiRegionSigner) => { + const signedRequest = await sigV4MultiRegionSigner.signWithCredentials(request, s3ExpressIdentity, {}); + if (signedRequest.headers["X-Amz-Security-Token"] || signedRequest.headers["x-amz-security-token"]) { + throw new Error("X-Amz-Security-Token must not be set for s3-express requests."); + } + return signedRequest; +}; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/index.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/index.js new file mode 100644 index 00000000..e6c1da76 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/index.js @@ -0,0 +1,7 @@ +export { S3ExpressIdentityCache } from "./classes/S3ExpressIdentityCache"; +export { S3ExpressIdentityCacheEntry } from "./classes/S3ExpressIdentityCacheEntry"; +export { S3ExpressIdentityProviderImpl } from "./classes/S3ExpressIdentityProviderImpl"; +export { SignatureV4S3Express } from "./classes/SignatureV4S3Express"; +export { NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_OPTIONS } from "./constants"; +export { getS3ExpressPlugin, s3ExpressMiddleware, s3ExpressMiddlewareOptions } from "./functions/s3ExpressMiddleware"; +export { getS3ExpressHttpSigningPlugin, s3ExpressHttpSigningMiddleware, s3ExpressHttpSigningMiddlewareOptions, } from "./functions/s3ExpressHttpSigningMiddleware"; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/interfaces/S3ExpressIdentity.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/interfaces/S3ExpressIdentity.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/interfaces/S3ExpressIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/interfaces/S3ExpressIdentityProvider.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/interfaces/S3ExpressIdentityProvider.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3-express/interfaces/S3ExpressIdentityProvider.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3Configuration.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3Configuration.js new file mode 100644 index 00000000..73db429e --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/s3Configuration.js @@ -0,0 +1,16 @@ +import { S3ExpressIdentityProviderImpl } from "./s3-express"; +export const resolveS3Config = (input, { session, }) => { + const [s3ClientProvider, CreateSessionCommandCtor] = session; + const { forcePathStyle, useAccelerateEndpoint, disableMultiregionAccessPoints, followRegionRedirects, s3ExpressIdentityProvider, bucketEndpoint, } = input; + return Object.assign(input, { + forcePathStyle: forcePathStyle ?? false, + useAccelerateEndpoint: useAccelerateEndpoint ?? false, + disableMultiregionAccessPoints: disableMultiregionAccessPoints ?? false, + followRegionRedirects: followRegionRedirects ?? false, + s3ExpressIdentityProvider: s3ExpressIdentityProvider ?? + new S3ExpressIdentityProviderImpl(async (key) => s3ClientProvider().send(new CreateSessionCommandCtor({ + Bucket: key, + }))), + bucketEndpoint: bucketEndpoint ?? false, + }); +}; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/throw-200-exceptions.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/throw-200-exceptions.js new file mode 100644 index 00000000..f0f5ca37 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/throw-200-exceptions.js @@ -0,0 +1,67 @@ +import { HttpResponse } from "@smithy/protocol-http"; +import { headStream, splitStream } from "@smithy/util-stream"; +const THROW_IF_EMPTY_BODY = { + CopyObjectCommand: true, + UploadPartCopyCommand: true, + CompleteMultipartUploadCommand: true, +}; +const MAX_BYTES_TO_INSPECT = 3000; +export const throw200ExceptionsMiddleware = (config) => (next, context) => async (args) => { + const result = await next(args); + const { response } = result; + if (!HttpResponse.isInstance(response)) { + return result; + } + const { statusCode, body: sourceBody } = response; + if (statusCode < 200 || statusCode >= 300) { + return result; + } + const isSplittableStream = typeof sourceBody?.stream === "function" || + typeof sourceBody?.pipe === "function" || + typeof sourceBody?.tee === "function"; + if (!isSplittableStream) { + return result; + } + let bodyCopy = sourceBody; + let body = sourceBody; + if (sourceBody && typeof sourceBody === "object" && !(sourceBody instanceof Uint8Array)) { + [bodyCopy, body] = await splitStream(sourceBody); + } + response.body = body; + const bodyBytes = await collectBody(bodyCopy, { + streamCollector: async (stream) => { + return headStream(stream, MAX_BYTES_TO_INSPECT); + }, + }); + if (typeof bodyCopy?.destroy === "function") { + bodyCopy.destroy(); + } + const bodyStringTail = config.utf8Encoder(bodyBytes.subarray(bodyBytes.length - 16)); + if (bodyBytes.length === 0 && THROW_IF_EMPTY_BODY[context.commandName]) { + const err = new Error("S3 aborted request"); + err.name = "InternalError"; + throw err; + } + if (bodyStringTail && bodyStringTail.endsWith("")) { + response.statusCode = 400; + } + return result; +}; +const collectBody = (streamBody = new Uint8Array(), context) => { + if (streamBody instanceof Uint8Array) { + return Promise.resolve(streamBody); + } + return context.streamCollector(streamBody) || Promise.resolve(new Uint8Array()); +}; +export const throw200ExceptionsMiddlewareOptions = { + relation: "after", + toMiddleware: "deserializerMiddleware", + tags: ["THROW_200_EXCEPTIONS", "S3"], + name: "throw200ExceptionsMiddleware", + override: true, +}; +export const getThrow200ExceptionsPlugin = (config) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(throw200ExceptionsMiddleware(config), throw200ExceptionsMiddlewareOptions); + }, +}); diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/validate-bucket-name.js b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/validate-bucket-name.js new file mode 100644 index 00000000..9636cbec --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-es/validate-bucket-name.js @@ -0,0 +1,25 @@ +import { validate as validateArn } from "@aws-sdk/util-arn-parser"; +import { bucketEndpointMiddleware, bucketEndpointMiddlewareOptions } from "./bucket-endpoint-middleware"; +export function validateBucketNameMiddleware({ bucketEndpoint }) { + return (next) => async (args) => { + const { input: { Bucket }, } = args; + if (!bucketEndpoint && typeof Bucket === "string" && !validateArn(Bucket) && Bucket.indexOf("/") >= 0) { + const err = new Error(`Bucket name shouldn't contain '/', received '${Bucket}'`); + err.name = "InvalidBucketName"; + throw err; + } + return next({ ...args }); + }; +} +export const validateBucketNameMiddlewareOptions = { + step: "initialize", + tags: ["VALIDATE_BUCKET_NAME"], + name: "validateBucketNameMiddleware", + override: true, +}; +export const getValidateBucketNamePlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(validateBucketNameMiddleware(options), validateBucketNameMiddlewareOptions); + clientStack.addRelativeTo(bucketEndpointMiddleware(options), bucketEndpointMiddlewareOptions); + }, +}); diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/bucket-endpoint-middleware.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/bucket-endpoint-middleware.d.ts new file mode 100644 index 00000000..31633fe6 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/bucket-endpoint-middleware.d.ts @@ -0,0 +1,13 @@ +import { RelativeMiddlewareOptions, SerializeMiddleware } from "@smithy/types"; +interface PreviouslyResolved { + bucketEndpoint?: boolean; +} +/** + * @internal + */ +export declare function bucketEndpointMiddleware(options: PreviouslyResolved): SerializeMiddleware; +/** + * @internal + */ +export declare const bucketEndpointMiddlewareOptions: RelativeMiddlewareOptions; +export {}; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/check-content-length-header.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/check-content-length-header.d.ts new file mode 100644 index 00000000..5588e9af --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/check-content-length-header.d.ts @@ -0,0 +1,16 @@ +import { FinalizeRequestHandlerOptions, FinalizeRequestMiddleware, Pluggable } from "@smithy/types"; +/** + * @internal + * + * Log a warning if the input to PutObject is detected to be a Stream of unknown ContentLength and + * recommend the usage of the @aws-sdk/lib-storage Upload class. + */ +export declare function checkContentLengthHeader(): FinalizeRequestMiddleware; +/** + * @internal + */ +export declare const checkContentLengthHeaderMiddlewareOptions: FinalizeRequestHandlerOptions; +/** + * @internal + */ +export declare const getCheckContentLengthHeaderPlugin: (unused: any) => Pluggable; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/index.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/index.d.ts new file mode 100644 index 00000000..34f756c0 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/index.d.ts @@ -0,0 +1,8 @@ +export * from "./check-content-length-header"; +export * from "./region-redirect-endpoint-middleware"; +export * from "./region-redirect-middleware"; +export * from "./s3-expires-middleware"; +export * from "./s3-express/index"; +export * from "./s3Configuration"; +export * from "./throw-200-exceptions"; +export * from "./validate-bucket-name"; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/region-redirect-endpoint-middleware.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/region-redirect-endpoint-middleware.d.ts new file mode 100644 index 00000000..9f9d3ffe --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/region-redirect-endpoint-middleware.d.ts @@ -0,0 +1,10 @@ +import { RelativeMiddlewareOptions, SerializeMiddleware } from "@smithy/types"; +import { PreviouslyResolved } from "./region-redirect-middleware"; +/** + * @internal + */ +export declare const regionRedirectEndpointMiddleware: (config: PreviouslyResolved) => SerializeMiddleware; +/** + * @internal + */ +export declare const regionRedirectEndpointMiddlewareOptions: RelativeMiddlewareOptions; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/region-redirect-middleware.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/region-redirect-middleware.d.ts new file mode 100644 index 00000000..9c58e526 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/region-redirect-middleware.d.ts @@ -0,0 +1,20 @@ +import { InitializeHandlerOptions, InitializeMiddleware, Pluggable, Provider } from "@smithy/types"; +/** + * @internal + */ +export interface PreviouslyResolved { + region: Provider; + followRegionRedirects: boolean; +} +/** + * @internal + */ +export declare function regionRedirectMiddleware(clientConfig: PreviouslyResolved): InitializeMiddleware; +/** + * @internal + */ +export declare const regionRedirectMiddlewareOptions: InitializeHandlerOptions; +/** + * @internal + */ +export declare const getRegionRedirectMiddlewarePlugin: (clientConfig: PreviouslyResolved) => Pluggable; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-expires-middleware.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-expires-middleware.d.ts new file mode 100644 index 00000000..6637d09c --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-expires-middleware.d.ts @@ -0,0 +1,26 @@ +import { DeserializeMiddleware, Pluggable, RelativeMiddlewareOptions } from "@smithy/types"; +/** + * @internal + */ +interface PreviouslyResolved { +} +/** + * @internal + * + * From the S3 Expires compatibility spec. + * A model transform will ensure S3#Expires remains a timestamp shape, though + * it is deprecated. + * If a particular object has a non-date string set as the Expires value, + * the SDK will have the raw string as "ExpiresString" on the response. + * + */ +export declare const s3ExpiresMiddleware: (config: PreviouslyResolved) => DeserializeMiddleware; +/** + * @internal + */ +export declare const s3ExpiresMiddlewareOptions: RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getS3ExpiresMiddlewarePlugin: (clientConfig: PreviouslyResolved) => Pluggable; +export {}; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/classes/S3ExpressIdentityCache.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/classes/S3ExpressIdentityCache.d.ts new file mode 100644 index 00000000..797edf17 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/classes/S3ExpressIdentityCache.d.ts @@ -0,0 +1,16 @@ +import { S3ExpressIdentityCacheEntry } from "./S3ExpressIdentityCacheEntry"; +/** + * @internal + * + * Stores identities by key. + */ +export declare class S3ExpressIdentityCache { + private data; + private lastPurgeTime; + static EXPIRED_CREDENTIAL_PURGE_INTERVAL_MS: number; + constructor(data?: Record); + get(key: string): undefined | S3ExpressIdentityCacheEntry; + set(key: string, entry: S3ExpressIdentityCacheEntry): S3ExpressIdentityCacheEntry; + delete(key: string): void; + purgeExpired(): Promise; +} diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/classes/S3ExpressIdentityCacheEntry.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/classes/S3ExpressIdentityCacheEntry.d.ts new file mode 100644 index 00000000..5e17d052 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/classes/S3ExpressIdentityCacheEntry.d.ts @@ -0,0 +1,16 @@ +import { S3ExpressIdentity } from "../interfaces/S3ExpressIdentity"; +/** + * @internal + */ +export declare class S3ExpressIdentityCacheEntry { + private _identity; + isRefreshing: boolean; + accessed: number; + /** + * @param identity - stored identity. + * @param accessed - timestamp of last access in epoch ms. + * @param isRefreshing - this key is currently in the process of being refreshed (background). + */ + constructor(_identity: Promise, isRefreshing?: boolean, accessed?: number); + get identity(): Promise; +} diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/classes/S3ExpressIdentityProviderImpl.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/classes/S3ExpressIdentityProviderImpl.d.ts new file mode 100644 index 00000000..acef6460 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/classes/S3ExpressIdentityProviderImpl.d.ts @@ -0,0 +1,32 @@ +import { AwsCredentialIdentity } from "@aws-sdk/types"; +import { S3ExpressIdentity } from "../interfaces/S3ExpressIdentity"; +import { S3ExpressIdentityProvider } from "../interfaces/S3ExpressIdentityProvider"; +import { S3ExpressIdentityCache } from "./S3ExpressIdentityCache"; +/** + * @internal + * + * This should match S3::CreateSessionCommandOutput::SessionCredentials + * but it is not imported since that would create a circular dependency. + */ +type Credentials = { + AccessKeyId: string | undefined; + SecretAccessKey: string | undefined; + SessionToken: string | undefined; + Expiration: Date | undefined; +}; +/** + * @internal + */ +export declare class S3ExpressIdentityProviderImpl implements S3ExpressIdentityProvider { + private createSessionFn; + private cache; + static REFRESH_WINDOW_MS: number; + constructor(createSessionFn: (key: string) => Promise<{ + Credentials: Credentials; + }>, cache?: S3ExpressIdentityCache); + getS3ExpressIdentity(awsIdentity: AwsCredentialIdentity, identityProperties: { + Bucket: string; + } & Record): Promise; + private getIdentity; +} +export {}; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/classes/SignatureV4S3Express.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/classes/SignatureV4S3Express.d.ts new file mode 100644 index 00000000..c6f7da6c --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/classes/SignatureV4S3Express.d.ts @@ -0,0 +1,17 @@ +import { AwsCredentialIdentity } from "@aws-sdk/types"; +import { SignatureV4 } from "@smithy/signature-v4"; +import { HttpRequest as IHttpRequest, RequestPresigningArguments, RequestSigningArguments } from "@smithy/types"; +export declare class SignatureV4S3Express extends SignatureV4 { + /** + * Signs with alternate provided credentials instead of those provided in the + * constructor. + * + * Additionally omits the credential sessionToken and assigns it to the + * alternate header field for S3 Express. + */ + signWithCredentials(requestToSign: IHttpRequest, credentials: AwsCredentialIdentity, options?: RequestSigningArguments): Promise; + /** + * Similar to {@link SignatureV4S3Express#signWithCredentials} but for presigning. + */ + presignWithCredentials(requestToSign: IHttpRequest, credentials: AwsCredentialIdentity, options?: RequestPresigningArguments): Promise; +} diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/constants.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/constants.d.ts new file mode 100644 index 00000000..328e5040 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/constants.d.ts @@ -0,0 +1,37 @@ +import type { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + * + * @deprecated will be replaced by backend. + * + * TODO(s3-express): non-beta value, backend == S3Express. + */ +export declare const S3_EXPRESS_BUCKET_TYPE = "Directory"; +/** + * @internal + */ +export declare const S3_EXPRESS_BACKEND = "S3Express"; +/** + * @internal + */ +export declare const S3_EXPRESS_AUTH_SCHEME = "sigv4-s3express"; +/** + * @internal + */ +export declare const SESSION_TOKEN_QUERY_PARAM = "X-Amz-S3session-Token"; +/** + * @internal + */ +export declare const SESSION_TOKEN_HEADER: string; +/** + * @internal + */ +export declare const NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_ENV_NAME = "AWS_S3_DISABLE_EXPRESS_SESSION_AUTH"; +/** + * @internal + */ +export declare const NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_INI_NAME = "s3_disable_express_session_auth"; +/** + * @internal + */ +export declare const NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/functions/s3ExpressHttpSigningMiddleware.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/functions/s3ExpressHttpSigningMiddleware.d.ts new file mode 100644 index 00000000..c775791a --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/functions/s3ExpressHttpSigningMiddleware.d.ts @@ -0,0 +1,27 @@ +import { IHttpRequest } from "@smithy/protocol-http"; +import { AuthScheme, AwsCredentialIdentity, FinalizeRequestMiddleware, Pluggable, RequestSigner } from "@smithy/types"; +interface SigningProperties { + signingRegion: string; + signingDate: Date; + signingService: string; +} +interface PreviouslyResolved { + signer: (authScheme?: AuthScheme | undefined) => Promise): Promise; + }>; +} +/** + * @internal + */ +export declare const s3ExpressHttpSigningMiddlewareOptions: import("@smithy/types").FinalizeRequestHandlerOptions & import("@smithy/types").RelativeLocation & Omit; +/** + * @internal + */ +export declare const s3ExpressHttpSigningMiddleware: (config: PreviouslyResolved) => FinalizeRequestMiddleware; +/** + * @internal + */ +export declare const getS3ExpressHttpSigningPlugin: (config: { + signer: (authScheme?: AuthScheme | undefined) => Promise; +}) => Pluggable; +export {}; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/functions/s3ExpressMiddleware.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/functions/s3ExpressMiddleware.d.ts new file mode 100644 index 00000000..a264835e --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/functions/s3ExpressMiddleware.d.ts @@ -0,0 +1,32 @@ +import { AwsCredentialIdentity } from "@aws-sdk/types"; +import { BuildHandlerOptions, BuildMiddleware, Logger, MemoizedProvider, Pluggable } from "@smithy/types"; +import { S3ExpressIdentity } from "../interfaces/S3ExpressIdentity"; +import { S3ExpressIdentityProvider } from "../interfaces/S3ExpressIdentityProvider"; +declare module "@smithy/types" { + interface HandlerExecutionContext { + /** + * Reserved key, only when using S3. + */ + s3ExpressIdentity?: S3ExpressIdentity; + } +} +/** + * @internal + */ +export interface S3ExpressResolvedConfig { + logger?: Logger; + s3ExpressIdentityProvider: S3ExpressIdentityProvider; + credentials: MemoizedProvider; +} +/** + * @internal + */ +export declare const s3ExpressMiddleware: (options: S3ExpressResolvedConfig) => BuildMiddleware; +/** + * @internal + */ +export declare const s3ExpressMiddlewareOptions: BuildHandlerOptions; +/** + * @internal + */ +export declare const getS3ExpressPlugin: (options: S3ExpressResolvedConfig) => Pluggable; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/functions/signS3Express.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/functions/signS3Express.d.ts new file mode 100644 index 00000000..78cd1a77 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/functions/signS3Express.d.ts @@ -0,0 +1,13 @@ +import type { AwsCredentialIdentity, HttpRequest as IHttpRequest } from "@smithy/types"; +import { S3ExpressIdentity } from "../interfaces/S3ExpressIdentity"; +export declare const signS3Express: (s3ExpressIdentity: S3ExpressIdentity, signingOptions: { + signingDate: Date; + signingRegion: string; + signingService: string; +}, request: IHttpRequest, sigV4MultiRegionSigner: { + signWithCredentials(req: IHttpRequest, identity: AwsCredentialIdentity, opts?: Partial<{ + signingDate: Date; + signingRegion: string; + signingService: string; + }> | undefined): Promise; +}) => Promise; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/index.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/index.d.ts new file mode 100644 index 00000000..a8a239ae --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/index.d.ts @@ -0,0 +1,9 @@ +export { S3ExpressIdentityCache } from "./classes/S3ExpressIdentityCache"; +export { S3ExpressIdentityCacheEntry } from "./classes/S3ExpressIdentityCacheEntry"; +export { S3ExpressIdentityProviderImpl } from "./classes/S3ExpressIdentityProviderImpl"; +export { SignatureV4S3Express } from "./classes/SignatureV4S3Express"; +export { NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_OPTIONS } from "./constants"; +export { getS3ExpressPlugin, s3ExpressMiddleware, s3ExpressMiddlewareOptions } from "./functions/s3ExpressMiddleware"; +export { getS3ExpressHttpSigningPlugin, s3ExpressHttpSigningMiddleware, s3ExpressHttpSigningMiddlewareOptions, } from "./functions/s3ExpressHttpSigningMiddleware"; +export { S3ExpressIdentity } from "./interfaces/S3ExpressIdentity"; +export { S3ExpressIdentityProvider } from "./interfaces/S3ExpressIdentityProvider"; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/interfaces/S3ExpressIdentity.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/interfaces/S3ExpressIdentity.d.ts new file mode 100644 index 00000000..22a18b9c --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/interfaces/S3ExpressIdentity.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentity } from "@aws-sdk/types"; +/** + * @public + */ +export interface S3ExpressIdentity extends AwsCredentialIdentity { +} diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/interfaces/S3ExpressIdentityProvider.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/interfaces/S3ExpressIdentityProvider.d.ts new file mode 100644 index 00000000..d0625924 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3-express/interfaces/S3ExpressIdentityProvider.d.ts @@ -0,0 +1,12 @@ +import { AwsCredentialIdentity } from "@aws-sdk/types"; +import { S3ExpressIdentity } from "./S3ExpressIdentity"; +/** + * @public + */ +export interface S3ExpressIdentityProvider { + /** + * @param awsIdentity - pre-existing credentials. + * @param identityProperties - unknown. + */ + getS3ExpressIdentity(awsIdentity: AwsCredentialIdentity, identityProperties: Record): Promise; +} diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3Configuration.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3Configuration.d.ts new file mode 100644 index 00000000..9a7a6ff3 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/s3Configuration.d.ts @@ -0,0 +1,62 @@ +import type { Client, Command } from "@smithy/types"; +import { S3ExpressIdentityProvider } from "./s3-express"; +/** + * All endpoint parameters with built-in bindings of AWS::S3::* + * @public + */ +export interface S3InputConfig { + /** + * Whether to force path style URLs for S3 objects + * (e.g., https://s3.amazonaws.com// instead of https://.s3.amazonaws.com/ + */ + forcePathStyle?: boolean; + /** + * Whether to use the S3 Transfer Acceleration endpoint by default + */ + useAccelerateEndpoint?: boolean; + /** + * Whether multi-region access points (MRAP) should be disabled. + */ + disableMultiregionAccessPoints?: boolean; + /** + * This feature was previously called the S3 Global Client. + * This can result in additional latency as failed requests are retried + * with a corrected region when receiving a permanent redirect error with status 301. + * This feature should only be used as a last resort if you do not know the region of your bucket(s) ahead of time. + */ + followRegionRedirects?: boolean; + /** + * Identity provider for an S3 feature. + */ + s3ExpressIdentityProvider?: S3ExpressIdentityProvider; + /** + * Whether to use the bucket name as the endpoint for this client. + */ + bucketEndpoint?: boolean; +} +/** + * This is a placeholder for the actual + * S3Client type from \@aws-sdk/client-s3. It is not explicitly + * imported to avoid a circular dependency. + * @internal + */ +type PlaceholderS3Client = Client & any; +/** + * Placeholder for the constructor for CreateSessionCommand. + * @internal + */ +type PlaceholderCreateSessionCommandCtor = { + new (args: any): Command; +}; +export interface S3ResolvedConfig { + forcePathStyle: boolean; + useAccelerateEndpoint: boolean; + disableMultiregionAccessPoints: boolean; + followRegionRedirects: boolean; + s3ExpressIdentityProvider: S3ExpressIdentityProvider; + bucketEndpoint: boolean; +} +export declare const resolveS3Config: (input: T & S3InputConfig, { session, }: { + session: [() => PlaceholderS3Client, PlaceholderCreateSessionCommandCtor]; +}) => T & S3ResolvedConfig; +export {}; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/throw-200-exceptions.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/throw-200-exceptions.d.ts new file mode 100644 index 00000000..07d82bd0 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/throw-200-exceptions.d.ts @@ -0,0 +1,20 @@ +import { DeserializeMiddleware, Encoder, Pluggable, RelativeMiddlewareOptions } from "@smithy/types"; +type PreviouslyResolved = { + utf8Encoder: Encoder; +}; +/** + * In case of an internal error/terminated connection, S3 operations may return 200 errors. CopyObject, UploadPartCopy, + * CompleteMultipartUpload may return empty payload or payload with only xml Preamble. + * @internal + */ +export declare const throw200ExceptionsMiddleware: (config: PreviouslyResolved) => DeserializeMiddleware; +/** + * @internal + */ +export declare const throw200ExceptionsMiddlewareOptions: RelativeMiddlewareOptions; +/** + * + * @internal + */ +export declare const getThrow200ExceptionsPlugin: (config: PreviouslyResolved) => Pluggable; +export {}; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/bucket-endpoint-middleware.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/bucket-endpoint-middleware.d.ts new file mode 100644 index 00000000..1b4e2a6b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/bucket-endpoint-middleware.d.ts @@ -0,0 +1,9 @@ +import { RelativeMiddlewareOptions, SerializeMiddleware } from "@smithy/types"; +interface PreviouslyResolved { + bucketEndpoint?: boolean; +} +export declare function bucketEndpointMiddleware( + options: PreviouslyResolved +): SerializeMiddleware; +export declare const bucketEndpointMiddlewareOptions: RelativeMiddlewareOptions; +export {}; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/check-content-length-header.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/check-content-length-header.d.ts new file mode 100644 index 00000000..09db7f0e --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/check-content-length-header.d.ts @@ -0,0 +1,13 @@ +import { + FinalizeRequestHandlerOptions, + FinalizeRequestMiddleware, + Pluggable, +} from "@smithy/types"; +export declare function checkContentLengthHeader(): FinalizeRequestMiddleware< + any, + any +>; +export declare const checkContentLengthHeaderMiddlewareOptions: FinalizeRequestHandlerOptions; +export declare const getCheckContentLengthHeaderPlugin: ( + unused: any +) => Pluggable; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..34f756c0 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +export * from "./check-content-length-header"; +export * from "./region-redirect-endpoint-middleware"; +export * from "./region-redirect-middleware"; +export * from "./s3-expires-middleware"; +export * from "./s3-express/index"; +export * from "./s3Configuration"; +export * from "./throw-200-exceptions"; +export * from "./validate-bucket-name"; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/region-redirect-endpoint-middleware.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/region-redirect-endpoint-middleware.d.ts new file mode 100644 index 00000000..bbe5b128 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/region-redirect-endpoint-middleware.d.ts @@ -0,0 +1,6 @@ +import { RelativeMiddlewareOptions, SerializeMiddleware } from "@smithy/types"; +import { PreviouslyResolved } from "./region-redirect-middleware"; +export declare const regionRedirectEndpointMiddleware: ( + config: PreviouslyResolved +) => SerializeMiddleware; +export declare const regionRedirectEndpointMiddlewareOptions: RelativeMiddlewareOptions; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/region-redirect-middleware.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/region-redirect-middleware.d.ts new file mode 100644 index 00000000..7be451a2 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/region-redirect-middleware.d.ts @@ -0,0 +1,17 @@ +import { + InitializeHandlerOptions, + InitializeMiddleware, + Pluggable, + Provider, +} from "@smithy/types"; +export interface PreviouslyResolved { + region: Provider; + followRegionRedirects: boolean; +} +export declare function regionRedirectMiddleware( + clientConfig: PreviouslyResolved +): InitializeMiddleware; +export declare const regionRedirectMiddlewareOptions: InitializeHandlerOptions; +export declare const getRegionRedirectMiddlewarePlugin: ( + clientConfig: PreviouslyResolved +) => Pluggable; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-expires-middleware.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-expires-middleware.d.ts new file mode 100644 index 00000000..652274f3 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-expires-middleware.d.ts @@ -0,0 +1,14 @@ +import { + DeserializeMiddleware, + Pluggable, + RelativeMiddlewareOptions, +} from "@smithy/types"; +interface PreviouslyResolved {} +export declare const s3ExpiresMiddleware: ( + config: PreviouslyResolved +) => DeserializeMiddleware; +export declare const s3ExpiresMiddlewareOptions: RelativeMiddlewareOptions; +export declare const getS3ExpiresMiddlewarePlugin: ( + clientConfig: PreviouslyResolved +) => Pluggable; +export {}; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/classes/S3ExpressIdentityCache.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/classes/S3ExpressIdentityCache.d.ts new file mode 100644 index 00000000..7fc0c0e4 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/classes/S3ExpressIdentityCache.d.ts @@ -0,0 +1,14 @@ +import { S3ExpressIdentityCacheEntry } from "./S3ExpressIdentityCacheEntry"; +export declare class S3ExpressIdentityCache { + private data; + private lastPurgeTime; + static EXPIRED_CREDENTIAL_PURGE_INTERVAL_MS: number; + constructor(data?: Record); + get(key: string): undefined | S3ExpressIdentityCacheEntry; + set( + key: string, + entry: S3ExpressIdentityCacheEntry + ): S3ExpressIdentityCacheEntry; + delete(key: string): void; + purgeExpired(): Promise; +} diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/classes/S3ExpressIdentityCacheEntry.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/classes/S3ExpressIdentityCacheEntry.d.ts new file mode 100644 index 00000000..5c0ed4c5 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/classes/S3ExpressIdentityCacheEntry.d.ts @@ -0,0 +1,12 @@ +import { S3ExpressIdentity } from "../interfaces/S3ExpressIdentity"; +export declare class S3ExpressIdentityCacheEntry { + private _identity; + isRefreshing: boolean; + accessed: number; + constructor( + _identity: Promise, + isRefreshing?: boolean, + accessed?: number + ); + readonly identity: Promise; +} diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/classes/S3ExpressIdentityProviderImpl.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/classes/S3ExpressIdentityProviderImpl.d.ts new file mode 100644 index 00000000..3ee33c3d --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/classes/S3ExpressIdentityProviderImpl.d.ts @@ -0,0 +1,31 @@ +import { AwsCredentialIdentity } from "@aws-sdk/types"; +import { S3ExpressIdentity } from "../interfaces/S3ExpressIdentity"; +import { S3ExpressIdentityProvider } from "../interfaces/S3ExpressIdentityProvider"; +import { S3ExpressIdentityCache } from "./S3ExpressIdentityCache"; +type Credentials = { + AccessKeyId: string | undefined; + SecretAccessKey: string | undefined; + SessionToken: string | undefined; + Expiration: Date | undefined; +}; +export declare class S3ExpressIdentityProviderImpl + implements S3ExpressIdentityProvider +{ + private createSessionFn; + private cache; + static REFRESH_WINDOW_MS: number; + constructor( + createSessionFn: (key: string) => Promise<{ + Credentials: Credentials; + }>, + cache?: S3ExpressIdentityCache + ); + getS3ExpressIdentity( + awsIdentity: AwsCredentialIdentity, + identityProperties: { + Bucket: string; + } & Record + ): Promise; + private getIdentity; +} +export {}; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/classes/SignatureV4S3Express.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/classes/SignatureV4S3Express.d.ts new file mode 100644 index 00000000..effd0eb7 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/classes/SignatureV4S3Express.d.ts @@ -0,0 +1,19 @@ +import { AwsCredentialIdentity } from "@aws-sdk/types"; +import { SignatureV4 } from "@smithy/signature-v4"; +import { + HttpRequest as IHttpRequest, + RequestPresigningArguments, + RequestSigningArguments, +} from "@smithy/types"; +export declare class SignatureV4S3Express extends SignatureV4 { + signWithCredentials( + requestToSign: IHttpRequest, + credentials: AwsCredentialIdentity, + options?: RequestSigningArguments + ): Promise; + presignWithCredentials( + requestToSign: IHttpRequest, + credentials: AwsCredentialIdentity, + options?: RequestPresigningArguments + ): Promise; +} diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/constants.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/constants.d.ts new file mode 100644 index 00000000..58629b18 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/constants.d.ts @@ -0,0 +1,11 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const S3_EXPRESS_BUCKET_TYPE = "Directory"; +export declare const S3_EXPRESS_BACKEND = "S3Express"; +export declare const S3_EXPRESS_AUTH_SCHEME = "sigv4-s3express"; +export declare const SESSION_TOKEN_QUERY_PARAM = "X-Amz-S3session-Token"; +export declare const SESSION_TOKEN_HEADER: string; +export declare const NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_ENV_NAME = + "AWS_S3_DISABLE_EXPRESS_SESSION_AUTH"; +export declare const NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_INI_NAME = + "s3_disable_express_session_auth"; +export declare const NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/functions/s3ExpressHttpSigningMiddleware.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/functions/s3ExpressHttpSigningMiddleware.d.ts new file mode 100644 index 00000000..269ad83f --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/functions/s3ExpressHttpSigningMiddleware.d.ts @@ -0,0 +1,40 @@ +import { IHttpRequest } from "@smithy/protocol-http"; +import { + AuthScheme, + AwsCredentialIdentity, + FinalizeRequestMiddleware, + Pluggable, + RequestSigner, +} from "@smithy/types"; +interface SigningProperties { + signingRegion: string; + signingDate: Date; + signingService: string; +} +interface PreviouslyResolved { + signer: (authScheme?: AuthScheme | undefined) => Promise< + RequestSigner & { + signWithCredentials( + req: IHttpRequest, + identity: AwsCredentialIdentity, + opts?: Partial + ): Promise; + } + >; +} +export declare const s3ExpressHttpSigningMiddlewareOptions: import("@smithy/types").FinalizeRequestHandlerOptions & + import("@smithy/types").RelativeLocation & + Pick< + import("@smithy/types").HandlerOptions, + Exclude + >; +export declare const s3ExpressHttpSigningMiddleware: < + Input extends object, + Output extends object +>( + config: PreviouslyResolved +) => FinalizeRequestMiddleware; +export declare const getS3ExpressHttpSigningPlugin: (config: { + signer: (authScheme?: AuthScheme | undefined) => Promise; +}) => Pluggable; +export {}; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/functions/s3ExpressMiddleware.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/functions/s3ExpressMiddleware.d.ts new file mode 100644 index 00000000..a85634e4 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/functions/s3ExpressMiddleware.d.ts @@ -0,0 +1,27 @@ +import { AwsCredentialIdentity } from "@aws-sdk/types"; +import { + BuildHandlerOptions, + BuildMiddleware, + Logger, + MemoizedProvider, + Pluggable, +} from "@smithy/types"; +import { S3ExpressIdentity } from "../interfaces/S3ExpressIdentity"; +import { S3ExpressIdentityProvider } from "../interfaces/S3ExpressIdentityProvider"; +declare module "@smithy/types" { + interface HandlerExecutionContext { + s3ExpressIdentity?: S3ExpressIdentity; + } +} +export interface S3ExpressResolvedConfig { + logger?: Logger; + s3ExpressIdentityProvider: S3ExpressIdentityProvider; + credentials: MemoizedProvider; +} +export declare const s3ExpressMiddleware: ( + options: S3ExpressResolvedConfig +) => BuildMiddleware; +export declare const s3ExpressMiddlewareOptions: BuildHandlerOptions; +export declare const getS3ExpressPlugin: ( + options: S3ExpressResolvedConfig +) => Pluggable; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/functions/signS3Express.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/functions/signS3Express.d.ts new file mode 100644 index 00000000..76bbba33 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/functions/signS3Express.d.ts @@ -0,0 +1,27 @@ +import { + AwsCredentialIdentity, + HttpRequest as IHttpRequest, +} from "@smithy/types"; +import { S3ExpressIdentity } from "../interfaces/S3ExpressIdentity"; +export declare const signS3Express: ( + s3ExpressIdentity: S3ExpressIdentity, + signingOptions: { + signingDate: Date; + signingRegion: string; + signingService: string; + }, + request: IHttpRequest, + sigV4MultiRegionSigner: { + signWithCredentials( + req: IHttpRequest, + identity: AwsCredentialIdentity, + opts?: + | Partial<{ + signingDate: Date; + signingRegion: string; + signingService: string; + }> + | undefined + ): Promise; + } +) => Promise; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/index.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/index.d.ts new file mode 100644 index 00000000..4051937c --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/index.d.ts @@ -0,0 +1,17 @@ +export { S3ExpressIdentityCache } from "./classes/S3ExpressIdentityCache"; +export { S3ExpressIdentityCacheEntry } from "./classes/S3ExpressIdentityCacheEntry"; +export { S3ExpressIdentityProviderImpl } from "./classes/S3ExpressIdentityProviderImpl"; +export { SignatureV4S3Express } from "./classes/SignatureV4S3Express"; +export { NODE_DISABLE_S3_EXPRESS_SESSION_AUTH_OPTIONS } from "./constants"; +export { + getS3ExpressPlugin, + s3ExpressMiddleware, + s3ExpressMiddlewareOptions, +} from "./functions/s3ExpressMiddleware"; +export { + getS3ExpressHttpSigningPlugin, + s3ExpressHttpSigningMiddleware, + s3ExpressHttpSigningMiddlewareOptions, +} from "./functions/s3ExpressHttpSigningMiddleware"; +export { S3ExpressIdentity } from "./interfaces/S3ExpressIdentity"; +export { S3ExpressIdentityProvider } from "./interfaces/S3ExpressIdentityProvider"; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/interfaces/S3ExpressIdentity.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/interfaces/S3ExpressIdentity.d.ts new file mode 100644 index 00000000..2ee15c92 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/interfaces/S3ExpressIdentity.d.ts @@ -0,0 +1,2 @@ +import { AwsCredentialIdentity } from "@aws-sdk/types"; +export interface S3ExpressIdentity extends AwsCredentialIdentity {} diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/interfaces/S3ExpressIdentityProvider.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/interfaces/S3ExpressIdentityProvider.d.ts new file mode 100644 index 00000000..2c1d36cf --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3-express/interfaces/S3ExpressIdentityProvider.d.ts @@ -0,0 +1,8 @@ +import { AwsCredentialIdentity } from "@aws-sdk/types"; +import { S3ExpressIdentity } from "./S3ExpressIdentity"; +export interface S3ExpressIdentityProvider { + getS3ExpressIdentity( + awsIdentity: AwsCredentialIdentity, + identityProperties: Record + ): Promise; +} diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3Configuration.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3Configuration.d.ts new file mode 100644 index 00000000..ca390750 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/s3Configuration.d.ts @@ -0,0 +1,31 @@ +import { Client, Command } from "@smithy/types"; +import { S3ExpressIdentityProvider } from "./s3-express"; +export interface S3InputConfig { + forcePathStyle?: boolean; + useAccelerateEndpoint?: boolean; + disableMultiregionAccessPoints?: boolean; + followRegionRedirects?: boolean; + s3ExpressIdentityProvider?: S3ExpressIdentityProvider; + bucketEndpoint?: boolean; +} +type PlaceholderS3Client = Client & any; +type PlaceholderCreateSessionCommandCtor = { + new (args: any): Command; +}; +export interface S3ResolvedConfig { + forcePathStyle: boolean; + useAccelerateEndpoint: boolean; + disableMultiregionAccessPoints: boolean; + followRegionRedirects: boolean; + s3ExpressIdentityProvider: S3ExpressIdentityProvider; + bucketEndpoint: boolean; +} +export declare const resolveS3Config: ( + input: T & S3InputConfig, + { + session, + }: { + session: [() => PlaceholderS3Client, PlaceholderCreateSessionCommandCtor]; + } +) => T & S3ResolvedConfig; +export {}; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/throw-200-exceptions.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/throw-200-exceptions.d.ts new file mode 100644 index 00000000..413b96a5 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/throw-200-exceptions.d.ts @@ -0,0 +1,17 @@ +import { + DeserializeMiddleware, + Encoder, + Pluggable, + RelativeMiddlewareOptions, +} from "@smithy/types"; +type PreviouslyResolved = { + utf8Encoder: Encoder; +}; +export declare const throw200ExceptionsMiddleware: ( + config: PreviouslyResolved +) => DeserializeMiddleware; +export declare const throw200ExceptionsMiddlewareOptions: RelativeMiddlewareOptions; +export declare const getThrow200ExceptionsPlugin: ( + config: PreviouslyResolved +) => Pluggable; +export {}; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/validate-bucket-name.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/validate-bucket-name.d.ts new file mode 100644 index 00000000..1775731b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/ts3.4/validate-bucket-name.d.ts @@ -0,0 +1,13 @@ +import { + InitializeHandlerOptions, + InitializeMiddleware, + Pluggable, +} from "@smithy/types"; +import { S3ResolvedConfig } from "./s3Configuration"; +export declare function validateBucketNameMiddleware({ + bucketEndpoint, +}: S3ResolvedConfig): InitializeMiddleware; +export declare const validateBucketNameMiddlewareOptions: InitializeHandlerOptions; +export declare const getValidateBucketNamePlugin: ( + options: S3ResolvedConfig +) => Pluggable; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/validate-bucket-name.d.ts b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/validate-bucket-name.d.ts new file mode 100644 index 00000000..70e51a72 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/dist-types/validate-bucket-name.d.ts @@ -0,0 +1,14 @@ +import { InitializeHandlerOptions, InitializeMiddleware, Pluggable } from "@smithy/types"; +import { S3ResolvedConfig } from "./s3Configuration"; +/** + * @internal + */ +export declare function validateBucketNameMiddleware({ bucketEndpoint }: S3ResolvedConfig): InitializeMiddleware; +/** + * @internal + */ +export declare const validateBucketNameMiddlewareOptions: InitializeHandlerOptions; +/** + * @internal + */ +export declare const getValidateBucketNamePlugin: (options: S3ResolvedConfig) => Pluggable; diff --git a/node_modules/@aws-sdk/middleware-sdk-s3/package.json b/node_modules/@aws-sdk/middleware-sdk-s3/package.json new file mode 100644 index 00000000..2fe807e4 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-sdk-s3/package.json @@ -0,0 +1,71 @@ +{ + "name": "@aws-sdk/middleware-sdk-s3", + "version": "3.799.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-sdk-s3", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:types": "tsc -p tsconfig.test.json", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts && yarn test:types", + "test:e2e": "yarn g:vitest run -c vitest.config.e2e.ts --mode development", + "extract:docs": "api-extractor run --local", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts", + "test:e2e:watch": "yarn g:vitest watch -c vitest.config.e2e.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-arn-parser": "3.723.0", + "@smithy/core": "^3.3.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-sdk-s3", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-sdk-s3" + } +} diff --git a/node_modules/@aws-sdk/middleware-ssec/LICENSE b/node_modules/@aws-sdk/middleware-ssec/LICENSE new file mode 100644 index 00000000..dd65ae06 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-ssec/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-sdk/middleware-ssec/README.md b/node_modules/@aws-sdk/middleware-ssec/README.md new file mode 100644 index 00000000..a25f60bf --- /dev/null +++ b/node_modules/@aws-sdk/middleware-ssec/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-ssec + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-ssec/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-ssec) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-ssec.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-ssec) diff --git a/node_modules/@aws-sdk/middleware-ssec/dist-cjs/index.js b/node_modules/@aws-sdk/middleware-ssec/dist-cjs/index.js new file mode 100644 index 00000000..e8f6edda --- /dev/null +++ b/node_modules/@aws-sdk/middleware-ssec/dist-cjs/index.js @@ -0,0 +1,100 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + getSsecPlugin: () => getSsecPlugin, + isValidBase64EncodedSSECustomerKey: () => isValidBase64EncodedSSECustomerKey, + ssecMiddleware: () => ssecMiddleware, + ssecMiddlewareOptions: () => ssecMiddlewareOptions +}); +module.exports = __toCommonJS(index_exports); +function ssecMiddleware(options) { + return (next) => async (args) => { + const input = { ...args.input }; + const properties = [ + { + target: "SSECustomerKey", + hash: "SSECustomerKeyMD5" + }, + { + target: "CopySourceSSECustomerKey", + hash: "CopySourceSSECustomerKeyMD5" + } + ]; + for (const prop of properties) { + const value = input[prop.target]; + if (value) { + let valueForHash; + if (typeof value === "string") { + if (isValidBase64EncodedSSECustomerKey(value, options)) { + valueForHash = options.base64Decoder(value); + } else { + valueForHash = options.utf8Decoder(value); + input[prop.target] = options.base64Encoder(valueForHash); + } + } else { + valueForHash = ArrayBuffer.isView(value) ? new Uint8Array(value.buffer, value.byteOffset, value.byteLength) : new Uint8Array(value); + input[prop.target] = options.base64Encoder(valueForHash); + } + const hash = new options.md5(); + hash.update(valueForHash); + input[prop.hash] = options.base64Encoder(await hash.digest()); + } + } + return next({ + ...args, + input + }); + }; +} +__name(ssecMiddleware, "ssecMiddleware"); +var ssecMiddlewareOptions = { + name: "ssecMiddleware", + step: "initialize", + tags: ["SSE"], + override: true +}; +var getSsecPlugin = /* @__PURE__ */ __name((config) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(ssecMiddleware(config), ssecMiddlewareOptions); + }, "applyToStack") +}), "getSsecPlugin"); +function isValidBase64EncodedSSECustomerKey(str, options) { + const base64Regex = /^(?:[A-Za-z0-9+/]{4})*([A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/; + if (!base64Regex.test(str)) return false; + try { + const decodedBytes = options.base64Decoder(str); + return decodedBytes.length === 32; + } catch { + return false; + } +} +__name(isValidBase64EncodedSSECustomerKey, "isValidBase64EncodedSSECustomerKey"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + ssecMiddleware, + ssecMiddlewareOptions, + getSsecPlugin, + isValidBase64EncodedSSECustomerKey +}); + diff --git a/node_modules/@aws-sdk/middleware-ssec/dist-es/index.js b/node_modules/@aws-sdk/middleware-ssec/dist-es/index.js new file mode 100644 index 00000000..3abb4b65 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-ssec/dist-es/index.js @@ -0,0 +1,66 @@ +export function ssecMiddleware(options) { + return (next) => async (args) => { + const input = { ...args.input }; + const properties = [ + { + target: "SSECustomerKey", + hash: "SSECustomerKeyMD5", + }, + { + target: "CopySourceSSECustomerKey", + hash: "CopySourceSSECustomerKeyMD5", + }, + ]; + for (const prop of properties) { + const value = input[prop.target]; + if (value) { + let valueForHash; + if (typeof value === "string") { + if (isValidBase64EncodedSSECustomerKey(value, options)) { + valueForHash = options.base64Decoder(value); + } + else { + valueForHash = options.utf8Decoder(value); + input[prop.target] = options.base64Encoder(valueForHash); + } + } + else { + valueForHash = ArrayBuffer.isView(value) + ? new Uint8Array(value.buffer, value.byteOffset, value.byteLength) + : new Uint8Array(value); + input[prop.target] = options.base64Encoder(valueForHash); + } + const hash = new options.md5(); + hash.update(valueForHash); + input[prop.hash] = options.base64Encoder(await hash.digest()); + } + } + return next({ + ...args, + input, + }); + }; +} +export const ssecMiddlewareOptions = { + name: "ssecMiddleware", + step: "initialize", + tags: ["SSE"], + override: true, +}; +export const getSsecPlugin = (config) => ({ + applyToStack: (clientStack) => { + clientStack.add(ssecMiddleware(config), ssecMiddlewareOptions); + }, +}); +export function isValidBase64EncodedSSECustomerKey(str, options) { + const base64Regex = /^(?:[A-Za-z0-9+/]{4})*([A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/; + if (!base64Regex.test(str)) + return false; + try { + const decodedBytes = options.base64Decoder(str); + return decodedBytes.length === 32; + } + catch { + return false; + } +} diff --git a/node_modules/@aws-sdk/middleware-ssec/dist-types/index.d.ts b/node_modules/@aws-sdk/middleware-ssec/dist-types/index.d.ts new file mode 100644 index 00000000..74e5834e --- /dev/null +++ b/node_modules/@aws-sdk/middleware-ssec/dist-types/index.d.ts @@ -0,0 +1,12 @@ +import { ChecksumConstructor, Decoder, Encoder, HashConstructor, InitializeHandlerOptions, InitializeMiddleware, Pluggable } from "@smithy/types"; +interface PreviouslyResolved { + base64Encoder: Encoder; + md5: ChecksumConstructor | HashConstructor; + utf8Decoder: Decoder; + base64Decoder: Decoder; +} +export declare function ssecMiddleware(options: PreviouslyResolved): InitializeMiddleware; +export declare const ssecMiddlewareOptions: InitializeHandlerOptions; +export declare const getSsecPlugin: (config: PreviouslyResolved) => Pluggable; +export declare function isValidBase64EncodedSSECustomerKey(str: string, options: PreviouslyResolved): boolean; +export {}; diff --git a/node_modules/@aws-sdk/middleware-ssec/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/middleware-ssec/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..cfffd594 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-ssec/dist-types/ts3.4/index.d.ts @@ -0,0 +1,27 @@ +import { + ChecksumConstructor, + Decoder, + Encoder, + HashConstructor, + InitializeHandlerOptions, + InitializeMiddleware, + Pluggable, +} from "@smithy/types"; +interface PreviouslyResolved { + base64Encoder: Encoder; + md5: ChecksumConstructor | HashConstructor; + utf8Decoder: Decoder; + base64Decoder: Decoder; +} +export declare function ssecMiddleware( + options: PreviouslyResolved +): InitializeMiddleware; +export declare const ssecMiddlewareOptions: InitializeHandlerOptions; +export declare const getSsecPlugin: ( + config: PreviouslyResolved +) => Pluggable; +export declare function isValidBase64EncodedSSECustomerKey( + str: string, + options: PreviouslyResolved +): boolean; +export {}; diff --git a/node_modules/@aws-sdk/middleware-ssec/package.json b/node_modules/@aws-sdk/middleware-ssec/package.json new file mode 100644 index 00000000..b59984dd --- /dev/null +++ b/node_modules/@aws-sdk/middleware-ssec/package.json @@ -0,0 +1,56 @@ +{ + "name": "@aws-sdk/middleware-ssec", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-ssec", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-ssec", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-ssec" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + } +} diff --git a/node_modules/@aws-sdk/middleware-user-agent/LICENSE b/node_modules/@aws-sdk/middleware-user-agent/LICENSE new file mode 100644 index 00000000..e907b586 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-sdk/middleware-user-agent/README.md b/node_modules/@aws-sdk/middleware-user-agent/README.md new file mode 100644 index 00000000..a0bf1a92 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-user-agent + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-user-agent/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-user-agent) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-user-agent.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-user-agent) diff --git a/node_modules/@aws-sdk/middleware-user-agent/dist-cjs/index.js b/node_modules/@aws-sdk/middleware-user-agent/dist-cjs/index.js new file mode 100644 index 00000000..aaf267cb --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/dist-cjs/index.js @@ -0,0 +1,227 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + DEFAULT_UA_APP_ID: () => DEFAULT_UA_APP_ID, + getUserAgentMiddlewareOptions: () => getUserAgentMiddlewareOptions, + getUserAgentPlugin: () => getUserAgentPlugin, + resolveUserAgentConfig: () => resolveUserAgentConfig, + userAgentMiddleware: () => userAgentMiddleware +}); +module.exports = __toCommonJS(index_exports); + +// src/configurations.ts +var import_core = require("@smithy/core"); +var DEFAULT_UA_APP_ID = void 0; +function isValidUserAgentAppId(appId) { + if (appId === void 0) { + return true; + } + return typeof appId === "string" && appId.length <= 50; +} +__name(isValidUserAgentAppId, "isValidUserAgentAppId"); +function resolveUserAgentConfig(input) { + const normalizedAppIdProvider = (0, import_core.normalizeProvider)(input.userAgentAppId ?? DEFAULT_UA_APP_ID); + const { customUserAgent } = input; + return Object.assign(input, { + customUserAgent: typeof customUserAgent === "string" ? [[customUserAgent]] : customUserAgent, + userAgentAppId: /* @__PURE__ */ __name(async () => { + const appId = await normalizedAppIdProvider(); + if (!isValidUserAgentAppId(appId)) { + const logger = input.logger?.constructor?.name === "NoOpLogger" || !input.logger ? console : input.logger; + if (typeof appId !== "string") { + logger?.warn("userAgentAppId must be a string or undefined."); + } else if (appId.length > 50) { + logger?.warn("The provided userAgentAppId exceeds the maximum length of 50 characters."); + } + } + return appId; + }, "userAgentAppId") + }); +} +__name(resolveUserAgentConfig, "resolveUserAgentConfig"); + +// src/user-agent-middleware.ts +var import_util_endpoints = require("@aws-sdk/util-endpoints"); +var import_protocol_http = require("@smithy/protocol-http"); + +// src/check-features.ts +var import_core2 = require("@aws-sdk/core"); +var ACCOUNT_ID_ENDPOINT_REGEX = /\d{12}\.ddb/; +async function checkFeatures(context, config, args) { + const request = args.request; + if (request?.headers?.["smithy-protocol"] === "rpc-v2-cbor") { + (0, import_core2.setFeature)(context, "PROTOCOL_RPC_V2_CBOR", "M"); + } + if (typeof config.retryStrategy === "function") { + const retryStrategy = await config.retryStrategy(); + if (typeof retryStrategy.acquireInitialRetryToken === "function") { + if (retryStrategy.constructor?.name?.includes("Adaptive")) { + (0, import_core2.setFeature)(context, "RETRY_MODE_ADAPTIVE", "F"); + } else { + (0, import_core2.setFeature)(context, "RETRY_MODE_STANDARD", "E"); + } + } else { + (0, import_core2.setFeature)(context, "RETRY_MODE_LEGACY", "D"); + } + } + if (typeof config.accountIdEndpointMode === "function") { + const endpointV2 = context.endpointV2; + if (String(endpointV2?.url?.hostname).match(ACCOUNT_ID_ENDPOINT_REGEX)) { + (0, import_core2.setFeature)(context, "ACCOUNT_ID_ENDPOINT", "O"); + } + switch (await config.accountIdEndpointMode?.()) { + case "disabled": + (0, import_core2.setFeature)(context, "ACCOUNT_ID_MODE_DISABLED", "Q"); + break; + case "preferred": + (0, import_core2.setFeature)(context, "ACCOUNT_ID_MODE_PREFERRED", "P"); + break; + case "required": + (0, import_core2.setFeature)(context, "ACCOUNT_ID_MODE_REQUIRED", "R"); + break; + } + } + const identity = context.__smithy_context?.selectedHttpAuthScheme?.identity; + if (identity?.$source) { + const credentials = identity; + if (credentials.accountId) { + (0, import_core2.setFeature)(context, "RESOLVED_ACCOUNT_ID", "T"); + } + for (const [key, value] of Object.entries(credentials.$source ?? {})) { + (0, import_core2.setFeature)(context, key, value); + } + } +} +__name(checkFeatures, "checkFeatures"); + +// src/constants.ts +var USER_AGENT = "user-agent"; +var X_AMZ_USER_AGENT = "x-amz-user-agent"; +var SPACE = " "; +var UA_NAME_SEPARATOR = "/"; +var UA_NAME_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w]/g; +var UA_VALUE_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w\#]/g; +var UA_ESCAPE_CHAR = "-"; + +// src/encode-features.ts +var BYTE_LIMIT = 1024; +function encodeFeatures(features) { + let buffer = ""; + for (const key in features) { + const val = features[key]; + if (buffer.length + val.length + 1 <= BYTE_LIMIT) { + if (buffer.length) { + buffer += "," + val; + } else { + buffer += val; + } + continue; + } + break; + } + return buffer; +} +__name(encodeFeatures, "encodeFeatures"); + +// src/user-agent-middleware.ts +var userAgentMiddleware = /* @__PURE__ */ __name((options) => (next, context) => async (args) => { + const { request } = args; + if (!import_protocol_http.HttpRequest.isInstance(request)) { + return next(args); + } + const { headers } = request; + const userAgent = context?.userAgent?.map(escapeUserAgent) || []; + const defaultUserAgent = (await options.defaultUserAgentProvider()).map(escapeUserAgent); + await checkFeatures(context, options, args); + const awsContext = context; + defaultUserAgent.push( + `m/${encodeFeatures( + Object.assign({}, context.__smithy_context?.features, awsContext.__aws_sdk_context?.features) + )}` + ); + const customUserAgent = options?.customUserAgent?.map(escapeUserAgent) || []; + const appId = await options.userAgentAppId(); + if (appId) { + defaultUserAgent.push(escapeUserAgent([`app/${appId}`])); + } + const prefix = (0, import_util_endpoints.getUserAgentPrefix)(); + const sdkUserAgentValue = (prefix ? [prefix] : []).concat([...defaultUserAgent, ...userAgent, ...customUserAgent]).join(SPACE); + const normalUAValue = [ + ...defaultUserAgent.filter((section) => section.startsWith("aws-sdk-")), + ...customUserAgent + ].join(SPACE); + if (options.runtime !== "browser") { + if (normalUAValue) { + headers[X_AMZ_USER_AGENT] = headers[X_AMZ_USER_AGENT] ? `${headers[USER_AGENT]} ${normalUAValue}` : normalUAValue; + } + headers[USER_AGENT] = sdkUserAgentValue; + } else { + headers[X_AMZ_USER_AGENT] = sdkUserAgentValue; + } + return next({ + ...args, + request + }); +}, "userAgentMiddleware"); +var escapeUserAgent = /* @__PURE__ */ __name((userAgentPair) => { + const name = userAgentPair[0].split(UA_NAME_SEPARATOR).map((part) => part.replace(UA_NAME_ESCAPE_REGEX, UA_ESCAPE_CHAR)).join(UA_NAME_SEPARATOR); + const version = userAgentPair[1]?.replace(UA_VALUE_ESCAPE_REGEX, UA_ESCAPE_CHAR); + const prefixSeparatorIndex = name.indexOf(UA_NAME_SEPARATOR); + const prefix = name.substring(0, prefixSeparatorIndex); + let uaName = name.substring(prefixSeparatorIndex + 1); + if (prefix === "api") { + uaName = uaName.toLowerCase(); + } + return [prefix, uaName, version].filter((item) => item && item.length > 0).reduce((acc, item, index) => { + switch (index) { + case 0: + return item; + case 1: + return `${acc}/${item}`; + default: + return `${acc}#${item}`; + } + }, ""); +}, "escapeUserAgent"); +var getUserAgentMiddlewareOptions = { + name: "getUserAgentMiddleware", + step: "build", + priority: "low", + tags: ["SET_USER_AGENT", "USER_AGENT"], + override: true +}; +var getUserAgentPlugin = /* @__PURE__ */ __name((config) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(userAgentMiddleware(config), getUserAgentMiddlewareOptions); + }, "applyToStack") +}), "getUserAgentPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + DEFAULT_UA_APP_ID, + resolveUserAgentConfig, + userAgentMiddleware, + getUserAgentMiddlewareOptions, + getUserAgentPlugin +}); + diff --git a/node_modules/@aws-sdk/middleware-user-agent/dist-es/check-features.js b/node_modules/@aws-sdk/middleware-user-agent/dist-es/check-features.js new file mode 100644 index 00000000..1f115a74 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/dist-es/check-features.js @@ -0,0 +1,49 @@ +import { setFeature } from "@aws-sdk/core"; +const ACCOUNT_ID_ENDPOINT_REGEX = /\d{12}\.ddb/; +export async function checkFeatures(context, config, args) { + const request = args.request; + if (request?.headers?.["smithy-protocol"] === "rpc-v2-cbor") { + setFeature(context, "PROTOCOL_RPC_V2_CBOR", "M"); + } + if (typeof config.retryStrategy === "function") { + const retryStrategy = await config.retryStrategy(); + if (typeof retryStrategy.acquireInitialRetryToken === "function") { + if (retryStrategy.constructor?.name?.includes("Adaptive")) { + setFeature(context, "RETRY_MODE_ADAPTIVE", "F"); + } + else { + setFeature(context, "RETRY_MODE_STANDARD", "E"); + } + } + else { + setFeature(context, "RETRY_MODE_LEGACY", "D"); + } + } + if (typeof config.accountIdEndpointMode === "function") { + const endpointV2 = context.endpointV2; + if (String(endpointV2?.url?.hostname).match(ACCOUNT_ID_ENDPOINT_REGEX)) { + setFeature(context, "ACCOUNT_ID_ENDPOINT", "O"); + } + switch (await config.accountIdEndpointMode?.()) { + case "disabled": + setFeature(context, "ACCOUNT_ID_MODE_DISABLED", "Q"); + break; + case "preferred": + setFeature(context, "ACCOUNT_ID_MODE_PREFERRED", "P"); + break; + case "required": + setFeature(context, "ACCOUNT_ID_MODE_REQUIRED", "R"); + break; + } + } + const identity = context.__smithy_context?.selectedHttpAuthScheme?.identity; + if (identity?.$source) { + const credentials = identity; + if (credentials.accountId) { + setFeature(context, "RESOLVED_ACCOUNT_ID", "T"); + } + for (const [key, value] of Object.entries(credentials.$source ?? {})) { + setFeature(context, key, value); + } + } +} diff --git a/node_modules/@aws-sdk/middleware-user-agent/dist-es/configurations.js b/node_modules/@aws-sdk/middleware-user-agent/dist-es/configurations.js new file mode 100644 index 00000000..7fff0875 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/dist-es/configurations.js @@ -0,0 +1,28 @@ +import { normalizeProvider } from "@smithy/core"; +export const DEFAULT_UA_APP_ID = undefined; +function isValidUserAgentAppId(appId) { + if (appId === undefined) { + return true; + } + return typeof appId === "string" && appId.length <= 50; +} +export function resolveUserAgentConfig(input) { + const normalizedAppIdProvider = normalizeProvider(input.userAgentAppId ?? DEFAULT_UA_APP_ID); + const { customUserAgent } = input; + return Object.assign(input, { + customUserAgent: typeof customUserAgent === "string" ? [[customUserAgent]] : customUserAgent, + userAgentAppId: async () => { + const appId = await normalizedAppIdProvider(); + if (!isValidUserAgentAppId(appId)) { + const logger = input.logger?.constructor?.name === "NoOpLogger" || !input.logger ? console : input.logger; + if (typeof appId !== "string") { + logger?.warn("userAgentAppId must be a string or undefined."); + } + else if (appId.length > 50) { + logger?.warn("The provided userAgentAppId exceeds the maximum length of 50 characters."); + } + } + return appId; + }, + }); +} diff --git a/node_modules/@aws-sdk/middleware-user-agent/dist-es/constants.js b/node_modules/@aws-sdk/middleware-user-agent/dist-es/constants.js new file mode 100644 index 00000000..33e33916 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/dist-es/constants.js @@ -0,0 +1,7 @@ +export const USER_AGENT = "user-agent"; +export const X_AMZ_USER_AGENT = "x-amz-user-agent"; +export const SPACE = " "; +export const UA_NAME_SEPARATOR = "/"; +export const UA_NAME_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w]/g; +export const UA_VALUE_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w\#]/g; +export const UA_ESCAPE_CHAR = "-"; diff --git a/node_modules/@aws-sdk/middleware-user-agent/dist-es/encode-features.js b/node_modules/@aws-sdk/middleware-user-agent/dist-es/encode-features.js new file mode 100644 index 00000000..23002b6a --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/dist-es/encode-features.js @@ -0,0 +1,18 @@ +const BYTE_LIMIT = 1024; +export function encodeFeatures(features) { + let buffer = ""; + for (const key in features) { + const val = features[key]; + if (buffer.length + val.length + 1 <= BYTE_LIMIT) { + if (buffer.length) { + buffer += "," + val; + } + else { + buffer += val; + } + continue; + } + break; + } + return buffer; +} diff --git a/node_modules/@aws-sdk/middleware-user-agent/dist-es/index.js b/node_modules/@aws-sdk/middleware-user-agent/dist-es/index.js new file mode 100644 index 00000000..0456ec7b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./configurations"; +export * from "./user-agent-middleware"; diff --git a/node_modules/@aws-sdk/middleware-user-agent/dist-es/user-agent-middleware.js b/node_modules/@aws-sdk/middleware-user-agent/dist-es/user-agent-middleware.js new file mode 100644 index 00000000..188bda0c --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/dist-es/user-agent-middleware.js @@ -0,0 +1,82 @@ +import { getUserAgentPrefix } from "@aws-sdk/util-endpoints"; +import { HttpRequest } from "@smithy/protocol-http"; +import { checkFeatures } from "./check-features"; +import { SPACE, UA_ESCAPE_CHAR, UA_NAME_ESCAPE_REGEX, UA_NAME_SEPARATOR, UA_VALUE_ESCAPE_REGEX, USER_AGENT, X_AMZ_USER_AGENT, } from "./constants"; +import { encodeFeatures } from "./encode-features"; +export const userAgentMiddleware = (options) => (next, context) => async (args) => { + const { request } = args; + if (!HttpRequest.isInstance(request)) { + return next(args); + } + const { headers } = request; + const userAgent = context?.userAgent?.map(escapeUserAgent) || []; + const defaultUserAgent = (await options.defaultUserAgentProvider()).map(escapeUserAgent); + await checkFeatures(context, options, args); + const awsContext = context; + defaultUserAgent.push(`m/${encodeFeatures(Object.assign({}, context.__smithy_context?.features, awsContext.__aws_sdk_context?.features))}`); + const customUserAgent = options?.customUserAgent?.map(escapeUserAgent) || []; + const appId = await options.userAgentAppId(); + if (appId) { + defaultUserAgent.push(escapeUserAgent([`app/${appId}`])); + } + const prefix = getUserAgentPrefix(); + const sdkUserAgentValue = (prefix ? [prefix] : []) + .concat([...defaultUserAgent, ...userAgent, ...customUserAgent]) + .join(SPACE); + const normalUAValue = [ + ...defaultUserAgent.filter((section) => section.startsWith("aws-sdk-")), + ...customUserAgent, + ].join(SPACE); + if (options.runtime !== "browser") { + if (normalUAValue) { + headers[X_AMZ_USER_AGENT] = headers[X_AMZ_USER_AGENT] + ? `${headers[USER_AGENT]} ${normalUAValue}` + : normalUAValue; + } + headers[USER_AGENT] = sdkUserAgentValue; + } + else { + headers[X_AMZ_USER_AGENT] = sdkUserAgentValue; + } + return next({ + ...args, + request, + }); +}; +const escapeUserAgent = (userAgentPair) => { + const name = userAgentPair[0] + .split(UA_NAME_SEPARATOR) + .map((part) => part.replace(UA_NAME_ESCAPE_REGEX, UA_ESCAPE_CHAR)) + .join(UA_NAME_SEPARATOR); + const version = userAgentPair[1]?.replace(UA_VALUE_ESCAPE_REGEX, UA_ESCAPE_CHAR); + const prefixSeparatorIndex = name.indexOf(UA_NAME_SEPARATOR); + const prefix = name.substring(0, prefixSeparatorIndex); + let uaName = name.substring(prefixSeparatorIndex + 1); + if (prefix === "api") { + uaName = uaName.toLowerCase(); + } + return [prefix, uaName, version] + .filter((item) => item && item.length > 0) + .reduce((acc, item, index) => { + switch (index) { + case 0: + return item; + case 1: + return `${acc}/${item}`; + default: + return `${acc}#${item}`; + } + }, ""); +}; +export const getUserAgentMiddlewareOptions = { + name: "getUserAgentMiddleware", + step: "build", + priority: "low", + tags: ["SET_USER_AGENT", "USER_AGENT"], + override: true, +}; +export const getUserAgentPlugin = (config) => ({ + applyToStack: (clientStack) => { + clientStack.add(userAgentMiddleware(config), getUserAgentMiddlewareOptions); + }, +}); diff --git a/node_modules/@aws-sdk/middleware-user-agent/dist-types/check-features.d.ts b/node_modules/@aws-sdk/middleware-user-agent/dist-types/check-features.d.ts new file mode 100644 index 00000000..a75d08b2 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/dist-types/check-features.d.ts @@ -0,0 +1,18 @@ +import type { AccountIdEndpointMode } from "@aws-sdk/core/account-id-endpoint"; +import type { AwsHandlerExecutionContext } from "@aws-sdk/types"; +import type { AwsCredentialIdentityProvider, BuildHandlerArguments, Provider, RetryStrategy, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +type PreviouslyResolved = Partial<{ + credentials?: AwsCredentialIdentityProvider; + accountIdEndpointMode?: Provider; + retryStrategy?: Provider; +}>; +/** + * @internal + * Check for features that don't have a middleware activation site but + * may be detected on the context, client config, or request. + */ +export declare function checkFeatures(context: AwsHandlerExecutionContext, config: PreviouslyResolved, args: BuildHandlerArguments): Promise; +export {}; diff --git a/node_modules/@aws-sdk/middleware-user-agent/dist-types/configurations.d.ts b/node_modules/@aws-sdk/middleware-user-agent/dist-types/configurations.d.ts new file mode 100644 index 00000000..f8183f84 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/dist-types/configurations.d.ts @@ -0,0 +1,44 @@ +import { Logger, Provider, UserAgent } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULT_UA_APP_ID: undefined; +/** + * @public + */ +export interface UserAgentInputConfig { + /** + * The custom user agent header that would be appended to default one + */ + customUserAgent?: string | UserAgent; + /** + * The application ID used to identify the application. + */ + userAgentAppId?: string | undefined | Provider; +} +interface PreviouslyResolved { + defaultUserAgentProvider: Provider; + runtime: string; + logger?: Logger; +} +export interface UserAgentResolvedConfig { + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header. + * @internal + */ + defaultUserAgentProvider: Provider; + /** + * The custom user agent header that would be appended to default one + */ + customUserAgent?: UserAgent; + /** + * The runtime environment + */ + runtime: string; + /** + * Resolved value for input config {config.userAgentAppId} + */ + userAgentAppId: Provider; +} +export declare function resolveUserAgentConfig(input: T & PreviouslyResolved & UserAgentInputConfig): T & UserAgentResolvedConfig; +export {}; diff --git a/node_modules/@aws-sdk/middleware-user-agent/dist-types/constants.d.ts b/node_modules/@aws-sdk/middleware-user-agent/dist-types/constants.d.ts new file mode 100644 index 00000000..8c0dfc93 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/dist-types/constants.d.ts @@ -0,0 +1,7 @@ +export declare const USER_AGENT = "user-agent"; +export declare const X_AMZ_USER_AGENT = "x-amz-user-agent"; +export declare const SPACE = " "; +export declare const UA_NAME_SEPARATOR = "/"; +export declare const UA_NAME_ESCAPE_REGEX: RegExp; +export declare const UA_VALUE_ESCAPE_REGEX: RegExp; +export declare const UA_ESCAPE_CHAR = "-"; diff --git a/node_modules/@aws-sdk/middleware-user-agent/dist-types/encode-features.d.ts b/node_modules/@aws-sdk/middleware-user-agent/dist-types/encode-features.d.ts new file mode 100644 index 00000000..d6079ae2 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/dist-types/encode-features.d.ts @@ -0,0 +1,5 @@ +import type { AwsSdkFeatures } from "@aws-sdk/types"; +/** + * @internal + */ +export declare function encodeFeatures(features: AwsSdkFeatures): string; diff --git a/node_modules/@aws-sdk/middleware-user-agent/dist-types/index.d.ts b/node_modules/@aws-sdk/middleware-user-agent/dist-types/index.d.ts new file mode 100644 index 00000000..0456ec7b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./configurations"; +export * from "./user-agent-middleware"; diff --git a/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/check-features.d.ts b/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/check-features.d.ts new file mode 100644 index 00000000..d8fc2016 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/check-features.d.ts @@ -0,0 +1,20 @@ +import { AccountIdEndpointMode } from "@aws-sdk/core/account-id-endpoint"; +import { AwsHandlerExecutionContext } from "@aws-sdk/types"; +import { + AwsCredentialIdentityProvider, + BuildHandlerArguments, + Provider, + RetryStrategy, + RetryStrategyV2, +} from "@smithy/types"; +type PreviouslyResolved = Partial<{ + credentials?: AwsCredentialIdentityProvider; + accountIdEndpointMode?: Provider; + retryStrategy?: Provider; +}>; +export declare function checkFeatures( + context: AwsHandlerExecutionContext, + config: PreviouslyResolved, + args: BuildHandlerArguments +): Promise; +export {}; diff --git a/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/configurations.d.ts b/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/configurations.d.ts new file mode 100644 index 00000000..a4a1b108 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/configurations.d.ts @@ -0,0 +1,21 @@ +import { Logger, Provider, UserAgent } from "@smithy/types"; +export declare const DEFAULT_UA_APP_ID: undefined; +export interface UserAgentInputConfig { + customUserAgent?: string | UserAgent; + userAgentAppId?: string | undefined | Provider; +} +interface PreviouslyResolved { + defaultUserAgentProvider: Provider; + runtime: string; + logger?: Logger; +} +export interface UserAgentResolvedConfig { + defaultUserAgentProvider: Provider; + customUserAgent?: UserAgent; + runtime: string; + userAgentAppId: Provider; +} +export declare function resolveUserAgentConfig( + input: T & PreviouslyResolved & UserAgentInputConfig +): T & UserAgentResolvedConfig; +export {}; diff --git a/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/constants.d.ts b/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/constants.d.ts new file mode 100644 index 00000000..8c0dfc93 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,7 @@ +export declare const USER_AGENT = "user-agent"; +export declare const X_AMZ_USER_AGENT = "x-amz-user-agent"; +export declare const SPACE = " "; +export declare const UA_NAME_SEPARATOR = "/"; +export declare const UA_NAME_ESCAPE_REGEX: RegExp; +export declare const UA_VALUE_ESCAPE_REGEX: RegExp; +export declare const UA_ESCAPE_CHAR = "-"; diff --git a/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/encode-features.d.ts b/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/encode-features.d.ts new file mode 100644 index 00000000..a7be5b71 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/encode-features.d.ts @@ -0,0 +1,2 @@ +import { AwsSdkFeatures } from "@aws-sdk/types"; +export declare function encodeFeatures(features: AwsSdkFeatures): string; diff --git a/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..0456ec7b --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./configurations"; +export * from "./user-agent-middleware"; diff --git a/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/user-agent-middleware.d.ts b/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/user-agent-middleware.d.ts new file mode 100644 index 00000000..a4da01e8 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/user-agent-middleware.d.ts @@ -0,0 +1,21 @@ +import { AwsHandlerExecutionContext } from "@aws-sdk/types"; +import { + AbsoluteLocation, + BuildHandler, + BuildHandlerOptions, + HandlerExecutionContext, + MetadataBearer, + Pluggable, +} from "@smithy/types"; +import { UserAgentResolvedConfig } from "./configurations"; +export declare const userAgentMiddleware: ( + options: UserAgentResolvedConfig +) => ( + next: BuildHandler, + context: HandlerExecutionContext | AwsHandlerExecutionContext +) => BuildHandler; +export declare const getUserAgentMiddlewareOptions: BuildHandlerOptions & + AbsoluteLocation; +export declare const getUserAgentPlugin: ( + config: UserAgentResolvedConfig +) => Pluggable; diff --git a/node_modules/@aws-sdk/middleware-user-agent/dist-types/user-agent-middleware.d.ts b/node_modules/@aws-sdk/middleware-user-agent/dist-types/user-agent-middleware.d.ts new file mode 100644 index 00000000..d36dee55 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/dist-types/user-agent-middleware.d.ts @@ -0,0 +1,18 @@ +import type { AwsHandlerExecutionContext } from "@aws-sdk/types"; +import { AbsoluteLocation, BuildHandler, BuildHandlerOptions, HandlerExecutionContext, MetadataBearer, Pluggable } from "@smithy/types"; +import { UserAgentResolvedConfig } from "./configurations"; +/** + * Build user agent header sections from: + * 1. runtime-specific default user agent provider; + * 2. custom user agent from `customUserAgent` client config; + * 3. handler execution context set by internal SDK components; + * The built user agent will be set to `x-amz-user-agent` header for ALL the + * runtimes. + * Please note that any override to the `user-agent` or `x-amz-user-agent` header + * in the HTTP request is discouraged. Please use `customUserAgent` client + * config or middleware setting the `userAgent` context to generate desired user + * agent. + */ +export declare const userAgentMiddleware: (options: UserAgentResolvedConfig) => (next: BuildHandler, context: HandlerExecutionContext | AwsHandlerExecutionContext) => BuildHandler; +export declare const getUserAgentMiddlewareOptions: BuildHandlerOptions & AbsoluteLocation; +export declare const getUserAgentPlugin: (config: UserAgentResolvedConfig) => Pluggable; diff --git a/node_modules/@aws-sdk/middleware-user-agent/package.json b/node_modules/@aws-sdk/middleware-user-agent/package.json new file mode 100644 index 00000000..34cff383 --- /dev/null +++ b/node_modules/@aws-sdk/middleware-user-agent/package.json @@ -0,0 +1,61 @@ +{ + "name": "@aws-sdk/middleware-user-agent", + "version": "3.799.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-user-agent", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "extract:docs": "api-extractor run --local", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@smithy/core": "^3.3.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-user-agent", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-user-agent" + } +} diff --git a/node_modules/@aws-sdk/nested-clients/README.md b/node_modules/@aws-sdk/nested-clients/README.md new file mode 100644 index 00000000..1182bbdc --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/README.md @@ -0,0 +1,13 @@ +# @aws-sdk/nested-clients + +## Description + +This is an internal package. Do not install this as a direct dependency. + +This package contains separate internal implementations of the STS and SSO-OIDC AWS SDK clients +to be used by the AWS SDK credential providers to break a cyclic dependency. + +### Bundlers + +This package may be marked as external if you do not use STS nor SSO-OIDC +in your credential resolution process. diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/index.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/index.js new file mode 100644 index 00000000..c8ad2e54 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/auth/httpAuthSchemeProvider.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/auth/httpAuthSchemeProvider.js new file mode 100644 index 00000000..7a9f28a4 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/auth/httpAuthSchemeProvider.js @@ -0,0 +1,56 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthSchemeConfig = exports.defaultSSOOIDCHttpAuthSchemeProvider = exports.defaultSSOOIDCHttpAuthSchemeParametersProvider = void 0; +const core_1 = require("@aws-sdk/core"); +const util_middleware_1 = require("@smithy/util-middleware"); +const defaultSSOOIDCHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultSSOOIDCHttpAuthSchemeParametersProvider = defaultSSOOIDCHttpAuthSchemeParametersProvider; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "sso-oauth", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +const defaultSSOOIDCHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "CreateToken": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultSSOOIDCHttpAuthSchemeProvider = defaultSSOOIDCHttpAuthSchemeProvider; +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, core_1.resolveAwsSdkSigV4Config)(config); + return Object.assign(config_0, { + authSchemePreference: (0, util_middleware_1.normalizeProvider)(config.authSchemePreference ?? []), + }); +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/endpointResolver.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/endpointResolver.js new file mode 100644 index 00000000..7258a356 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/endpointResolver.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = require("@aws-sdk/util-endpoints"); +const util_endpoints_2 = require("@smithy/util-endpoints"); +const ruleset_1 = require("./ruleset"); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS"], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/ruleset.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/ruleset.js new file mode 100644 index 00000000..72e0adce --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/ruleset.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ruleSet = void 0; +const u = "required", v = "fn", w = "argv", x = "ref"; +const a = true, b = "isSet", c = "booleanEquals", d = "error", e = "endpoint", f = "tree", g = "PartitionResult", h = "getAttr", i = { [u]: false, "type": "String" }, j = { [u]: true, "default": false, "type": "Boolean" }, k = { [x]: "Endpoint" }, l = { [v]: c, [w]: [{ [x]: "UseFIPS" }, true] }, m = { [v]: c, [w]: [{ [x]: "UseDualStack" }, true] }, n = {}, o = { [v]: h, [w]: [{ [x]: g }, "supportsFIPS"] }, p = { [x]: g }, q = { [v]: c, [w]: [true, { [v]: h, [w]: [p, "supportsDualStack"] }] }, r = [l], s = [m], t = [{ [x]: "Region" }]; +const _data = { version: "1.0", parameters: { Region: i, UseDualStack: j, UseFIPS: j, Endpoint: i }, rules: [{ conditions: [{ [v]: b, [w]: [k] }], rules: [{ conditions: r, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: d }, { conditions: s, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: d }, { endpoint: { url: k, properties: n, headers: n }, type: e }], type: f }, { conditions: [{ [v]: b, [w]: t }], rules: [{ conditions: [{ [v]: "aws.partition", [w]: t, assign: g }], rules: [{ conditions: [l, m], rules: [{ conditions: [{ [v]: c, [w]: [a, o] }, q], rules: [{ endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: d }], type: f }, { conditions: r, rules: [{ conditions: [{ [v]: c, [w]: [o, a] }], rules: [{ conditions: [{ [v]: "stringEquals", [w]: [{ [v]: h, [w]: [p, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://oidc.{Region}.amazonaws.com", properties: n, headers: n }, type: e }, { endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS is enabled but this partition does not support FIPS", type: d }], type: f }, { conditions: s, rules: [{ conditions: [q], rules: [{ endpoint: { url: "https://oidc.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "DualStack is enabled but this partition does not support DualStack", type: d }], type: f }, { endpoint: { url: "https://oidc.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }], type: f }, { error: "Invalid Configuration: Missing Region", type: d }] }; +exports.ruleSet = _data; diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/index.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/index.js new file mode 100644 index 00000000..55f595a3 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/index.js @@ -0,0 +1,872 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/sso-oidc/index.ts +var index_exports = {}; +__export(index_exports, { + $Command: () => import_smithy_client6.Command, + AccessDeniedException: () => AccessDeniedException, + AuthorizationPendingException: () => AuthorizationPendingException, + CreateTokenCommand: () => CreateTokenCommand, + CreateTokenRequestFilterSensitiveLog: () => CreateTokenRequestFilterSensitiveLog, + CreateTokenResponseFilterSensitiveLog: () => CreateTokenResponseFilterSensitiveLog, + ExpiredTokenException: () => ExpiredTokenException, + InternalServerException: () => InternalServerException, + InvalidClientException: () => InvalidClientException, + InvalidGrantException: () => InvalidGrantException, + InvalidRequestException: () => InvalidRequestException, + InvalidScopeException: () => InvalidScopeException, + SSOOIDC: () => SSOOIDC, + SSOOIDCClient: () => SSOOIDCClient, + SSOOIDCServiceException: () => SSOOIDCServiceException, + SlowDownException: () => SlowDownException, + UnauthorizedClientException: () => UnauthorizedClientException, + UnsupportedGrantTypeException: () => UnsupportedGrantTypeException, + __Client: () => import_smithy_client2.Client +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/sso-oidc/SSOOIDCClient.ts +var import_middleware_host_header = require("@aws-sdk/middleware-host-header"); +var import_middleware_logger = require("@aws-sdk/middleware-logger"); +var import_middleware_recursion_detection = require("@aws-sdk/middleware-recursion-detection"); +var import_middleware_user_agent = require("@aws-sdk/middleware-user-agent"); +var import_config_resolver = require("@smithy/config-resolver"); +var import_core = require("@smithy/core"); +var import_middleware_content_length = require("@smithy/middleware-content-length"); +var import_middleware_endpoint = require("@smithy/middleware-endpoint"); +var import_middleware_retry = require("@smithy/middleware-retry"); +var import_smithy_client2 = require("@smithy/smithy-client"); +var import_httpAuthSchemeProvider = require("./auth/httpAuthSchemeProvider"); + +// src/submodules/sso-oidc/endpoint/EndpointParameters.ts +var resolveClientEndpointParameters = /* @__PURE__ */ __name((options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "sso-oauth" + }); +}, "resolveClientEndpointParameters"); +var commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" } +}; + +// src/submodules/sso-oidc/SSOOIDCClient.ts +var import_runtimeConfig = require("./runtimeConfig"); + +// src/submodules/sso-oidc/runtimeExtensions.ts +var import_region_config_resolver = require("@aws-sdk/region-config-resolver"); +var import_protocol_http = require("@smithy/protocol-http"); +var import_smithy_client = require("@smithy/smithy-client"); + +// src/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.ts +var getHttpAuthExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + } + }; +}, "getHttpAuthExtensionConfiguration"); +var resolveHttpAuthRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials() + }; +}, "resolveHttpAuthRuntimeConfig"); + +// src/submodules/sso-oidc/runtimeExtensions.ts +var resolveRuntimeExtensions = /* @__PURE__ */ __name((runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign( + (0, import_region_config_resolver.getAwsRegionExtensionConfiguration)(runtimeConfig), + (0, import_smithy_client.getDefaultExtensionConfiguration)(runtimeConfig), + (0, import_protocol_http.getHttpHandlerExtensionConfiguration)(runtimeConfig), + getHttpAuthExtensionConfiguration(runtimeConfig) + ); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign( + runtimeConfig, + (0, import_region_config_resolver.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), + (0, import_smithy_client.resolveDefaultRuntimeConfig)(extensionConfiguration), + (0, import_protocol_http.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), + resolveHttpAuthRuntimeConfig(extensionConfiguration) + ); +}, "resolveRuntimeExtensions"); + +// src/submodules/sso-oidc/SSOOIDCClient.ts +var SSOOIDCClient = class extends import_smithy_client2.Client { + static { + __name(this, "SSOOIDCClient"); + } + /** + * The resolved configuration of SSOOIDCClient class. This is resolved and normalized from the {@link SSOOIDCClientConfig | constructor configuration interface}. + */ + config; + constructor(...[configuration]) { + const _config_0 = (0, import_runtimeConfig.getRuntimeConfig)(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = (0, import_middleware_user_agent.resolveUserAgentConfig)(_config_1); + const _config_3 = (0, import_middleware_retry.resolveRetryConfig)(_config_2); + const _config_4 = (0, import_config_resolver.resolveRegionConfig)(_config_3); + const _config_5 = (0, import_middleware_host_header.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, import_middleware_endpoint.resolveEndpointConfig)(_config_5); + const _config_7 = (0, import_httpAuthSchemeProvider.resolveHttpAuthSchemeConfig)(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use((0, import_middleware_user_agent.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_retry.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_content_length.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_host_header.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_logger.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_recursion_detection.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use( + (0, import_core.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: import_httpAuthSchemeProvider.defaultSSOOIDCHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: /* @__PURE__ */ __name(async (config) => new import_core.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials + }), "identityProviderConfigProvider") + }) + ); + this.middlewareStack.use((0, import_core.getHttpSigningPlugin)(this.config)); + } + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy() { + super.destroy(); + } +}; + +// src/submodules/sso-oidc/SSOOIDC.ts +var import_smithy_client7 = require("@smithy/smithy-client"); + +// src/submodules/sso-oidc/commands/CreateTokenCommand.ts +var import_middleware_endpoint2 = require("@smithy/middleware-endpoint"); +var import_middleware_serde = require("@smithy/middleware-serde"); +var import_smithy_client6 = require("@smithy/smithy-client"); + +// src/submodules/sso-oidc/models/models_0.ts +var import_smithy_client4 = require("@smithy/smithy-client"); + +// src/submodules/sso-oidc/models/SSOOIDCServiceException.ts +var import_smithy_client3 = require("@smithy/smithy-client"); +var SSOOIDCServiceException = class _SSOOIDCServiceException extends import_smithy_client3.ServiceException { + static { + __name(this, "SSOOIDCServiceException"); + } + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _SSOOIDCServiceException.prototype); + } +}; + +// src/submodules/sso-oidc/models/models_0.ts +var AccessDeniedException = class _AccessDeniedException extends SSOOIDCServiceException { + static { + __name(this, "AccessDeniedException"); + } + name = "AccessDeniedException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be access_denied.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "AccessDeniedException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _AccessDeniedException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var AuthorizationPendingException = class _AuthorizationPendingException extends SSOOIDCServiceException { + static { + __name(this, "AuthorizationPendingException"); + } + name = "AuthorizationPendingException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * authorization_pending.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "AuthorizationPendingException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _AuthorizationPendingException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var CreateTokenRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.clientSecret && { clientSecret: import_smithy_client4.SENSITIVE_STRING }, + ...obj.refreshToken && { refreshToken: import_smithy_client4.SENSITIVE_STRING }, + ...obj.codeVerifier && { codeVerifier: import_smithy_client4.SENSITIVE_STRING } +}), "CreateTokenRequestFilterSensitiveLog"); +var CreateTokenResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client4.SENSITIVE_STRING }, + ...obj.refreshToken && { refreshToken: import_smithy_client4.SENSITIVE_STRING }, + ...obj.idToken && { idToken: import_smithy_client4.SENSITIVE_STRING } +}), "CreateTokenResponseFilterSensitiveLog"); +var ExpiredTokenException = class _ExpiredTokenException extends SSOOIDCServiceException { + static { + __name(this, "ExpiredTokenException"); + } + name = "ExpiredTokenException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be expired_token.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ExpiredTokenException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InternalServerException = class _InternalServerException extends SSOOIDCServiceException { + static { + __name(this, "InternalServerException"); + } + name = "InternalServerException"; + $fault = "server"; + /** + *

Single error code. For this exception the value will be server_error.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InternalServerException", + $fault: "server", + ...opts + }); + Object.setPrototypeOf(this, _InternalServerException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InvalidClientException = class _InvalidClientException extends SSOOIDCServiceException { + static { + __name(this, "InvalidClientException"); + } + name = "InvalidClientException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * invalid_client.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidClientException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InvalidGrantException = class _InvalidGrantException extends SSOOIDCServiceException { + static { + __name(this, "InvalidGrantException"); + } + name = "InvalidGrantException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be invalid_grant.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidGrantException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidGrantException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InvalidRequestException = class _InvalidRequestException extends SSOOIDCServiceException { + static { + __name(this, "InvalidRequestException"); + } + name = "InvalidRequestException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * invalid_request.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidRequestException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InvalidScopeException = class _InvalidScopeException extends SSOOIDCServiceException { + static { + __name(this, "InvalidScopeException"); + } + name = "InvalidScopeException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be invalid_scope.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidScopeException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidScopeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var SlowDownException = class _SlowDownException extends SSOOIDCServiceException { + static { + __name(this, "SlowDownException"); + } + name = "SlowDownException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be slow_down.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "SlowDownException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _SlowDownException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var UnauthorizedClientException = class _UnauthorizedClientException extends SSOOIDCServiceException { + static { + __name(this, "UnauthorizedClientException"); + } + name = "UnauthorizedClientException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * unauthorized_client.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "UnauthorizedClientException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _UnauthorizedClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var UnsupportedGrantTypeException = class _UnsupportedGrantTypeException extends SSOOIDCServiceException { + static { + __name(this, "UnsupportedGrantTypeException"); + } + name = "UnsupportedGrantTypeException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * unsupported_grant_type.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "UnsupportedGrantTypeException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _UnsupportedGrantTypeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; + +// src/submodules/sso-oidc/protocols/Aws_restJson1.ts +var import_core2 = require("@aws-sdk/core"); +var import_core3 = require("@smithy/core"); +var import_smithy_client5 = require("@smithy/smithy-client"); +var se_CreateTokenCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core3.requestBuilder)(input, context); + const headers = { + "content-type": "application/json" + }; + b.bp("/token"); + let body; + body = JSON.stringify( + (0, import_smithy_client5.take)(input, { + clientId: [], + clientSecret: [], + code: [], + codeVerifier: [], + deviceCode: [], + grantType: [], + redirectUri: [], + refreshToken: [], + scope: /* @__PURE__ */ __name((_) => (0, import_smithy_client5._json)(_), "scope") + }) + ); + b.m("POST").h(headers).b(body); + return b.build(); +}, "se_CreateTokenCommand"); +var de_CreateTokenCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client5.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client5.expectNonNull)((0, import_smithy_client5.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client5.take)(data, { + accessToken: import_smithy_client5.expectString, + expiresIn: import_smithy_client5.expectInt32, + idToken: import_smithy_client5.expectString, + refreshToken: import_smithy_client5.expectString, + tokenType: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + return contents; +}, "de_CreateTokenCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core2.parseJsonErrorBody)(output.body, context) + }; + const errorCode = (0, import_core2.loadRestJsonErrorCode)(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.ssooidc#AccessDeniedException": + throw await de_AccessDeniedExceptionRes(parsedOutput, context); + case "AuthorizationPendingException": + case "com.amazonaws.ssooidc#AuthorizationPendingException": + throw await de_AuthorizationPendingExceptionRes(parsedOutput, context); + case "ExpiredTokenException": + case "com.amazonaws.ssooidc#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "InternalServerException": + case "com.amazonaws.ssooidc#InternalServerException": + throw await de_InternalServerExceptionRes(parsedOutput, context); + case "InvalidClientException": + case "com.amazonaws.ssooidc#InvalidClientException": + throw await de_InvalidClientExceptionRes(parsedOutput, context); + case "InvalidGrantException": + case "com.amazonaws.ssooidc#InvalidGrantException": + throw await de_InvalidGrantExceptionRes(parsedOutput, context); + case "InvalidRequestException": + case "com.amazonaws.ssooidc#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "InvalidScopeException": + case "com.amazonaws.ssooidc#InvalidScopeException": + throw await de_InvalidScopeExceptionRes(parsedOutput, context); + case "SlowDownException": + case "com.amazonaws.ssooidc#SlowDownException": + throw await de_SlowDownExceptionRes(parsedOutput, context); + case "UnauthorizedClientException": + case "com.amazonaws.ssooidc#UnauthorizedClientException": + throw await de_UnauthorizedClientExceptionRes(parsedOutput, context); + case "UnsupportedGrantTypeException": + case "com.amazonaws.ssooidc#UnsupportedGrantTypeException": + throw await de_UnsupportedGrantTypeExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}, "de_CommandError"); +var throwDefaultError = (0, import_smithy_client5.withBaseException)(SSOOIDCServiceException); +var de_AccessDeniedExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new AccessDeniedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_AccessDeniedExceptionRes"); +var de_AuthorizationPendingExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new AuthorizationPendingException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_AuthorizationPendingExceptionRes"); +var de_ExpiredTokenExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_ExpiredTokenExceptionRes"); +var de_InternalServerExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InternalServerException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InternalServerExceptionRes"); +var de_InvalidClientExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidClientExceptionRes"); +var de_InvalidGrantExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidGrantException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidGrantExceptionRes"); +var de_InvalidRequestExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidRequestExceptionRes"); +var de_InvalidScopeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidScopeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidScopeExceptionRes"); +var de_SlowDownExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new SlowDownException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_SlowDownExceptionRes"); +var de_UnauthorizedClientExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new UnauthorizedClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_UnauthorizedClientExceptionRes"); +var de_UnsupportedGrantTypeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new UnsupportedGrantTypeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_UnsupportedGrantTypeExceptionRes"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); + +// src/submodules/sso-oidc/commands/CreateTokenCommand.ts +var CreateTokenCommand = class extends import_smithy_client6.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint2.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSSOOIDCService", "CreateToken", {}).n("SSOOIDCClient", "CreateTokenCommand").f(CreateTokenRequestFilterSensitiveLog, CreateTokenResponseFilterSensitiveLog).ser(se_CreateTokenCommand).de(de_CreateTokenCommand).build() { + static { + __name(this, "CreateTokenCommand"); + } +}; + +// src/submodules/sso-oidc/SSOOIDC.ts +var commands = { + CreateTokenCommand +}; +var SSOOIDC = class extends SSOOIDCClient { + static { + __name(this, "SSOOIDC"); + } +}; +(0, import_smithy_client7.createAggregatedClient)(commands, SSOOIDC); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + $Command, + AccessDeniedException, + AuthorizationPendingException, + CreateTokenCommand, + CreateTokenRequestFilterSensitiveLog, + CreateTokenResponseFilterSensitiveLog, + ExpiredTokenException, + InternalServerException, + InvalidClientException, + InvalidGrantException, + InvalidRequestException, + InvalidScopeException, + SSOOIDC, + SSOOIDCClient, + SSOOIDCServiceException, + SlowDownException, + UnauthorizedClientException, + UnsupportedGrantTypeException, + __Client +}); diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.browser.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.browser.js new file mode 100644 index 00000000..6654024a --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.browser.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../../../package.json")); +const sha256_browser_1 = require("@aws-crypto/sha256-browser"); +const util_user_agent_browser_1 = require("@aws-sdk/util-user-agent-browser"); +const config_resolver_1 = require("@smithy/config-resolver"); +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const invalid_dependency_1 = require("@smithy/invalid-dependency"); +const util_body_length_browser_1 = require("@smithy/util-body-length-browser"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_browser_1 = require("@smithy/util-defaults-mode-browser"); +const getRuntimeConfig = (config) => { + const defaultsMode = (0, util_defaults_mode_browser_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_browser_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_browser_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? util_retry_1.DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? (0, invalid_dependency_1.invalidProvider)("Region is missing"), + requestHandler: fetch_http_handler_1.FetchHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? sha256_browser_1.Sha256, + streamCollector: config?.streamCollector ?? fetch_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.js new file mode 100644 index 00000000..9cc237f5 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.js @@ -0,0 +1,51 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../../../package.json")); +const core_1 = require("@aws-sdk/core"); +const util_user_agent_node_1 = require("@aws-sdk/util-user-agent-node"); +const config_resolver_1 = require("@smithy/config-resolver"); +const hash_node_1 = require("@smithy/hash-node"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const node_config_provider_1 = require("@smithy/node-config-provider"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_body_length_node_1 = require("@smithy/util-body-length-node"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_node_1 = require("@smithy/util-defaults-mode-node"); +const smithy_client_2 = require("@smithy/smithy-client"); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? (0, node_config_provider_1.loadConfig)(core_1.NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.native.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.native.js new file mode 100644 index 00000000..34c5f8ec --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.native.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const sha256_js_1 = require("@aws-crypto/sha256-js"); +const runtimeConfig_browser_1 = require("./runtimeConfig.browser"); +const getRuntimeConfig = (config) => { + const browserDefaults = (0, runtimeConfig_browser_1.getRuntimeConfig)(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? sha256_js_1.Sha256, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.shared.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.shared.js new file mode 100644 index 00000000..a305a1b9 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.shared.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const core_1 = require("@aws-sdk/core"); +const core_2 = require("@smithy/core"); +const smithy_client_1 = require("@smithy/smithy-client"); +const url_parser_1 = require("@smithy/url-parser"); +const util_base64_1 = require("@smithy/util-base64"); +const util_utf8_1 = require("@smithy/util-utf8"); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const endpointResolver_1 = require("./endpoint/endpointResolver"); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultSSOOIDCHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "SSO OIDC", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/STSClient.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/STSClient.js new file mode 100644 index 00000000..13c3c743 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/STSClient.js @@ -0,0 +1,52 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.STSClient = exports.__Client = void 0; +const middleware_host_header_1 = require("@aws-sdk/middleware-host-header"); +const middleware_logger_1 = require("@aws-sdk/middleware-logger"); +const middleware_recursion_detection_1 = require("@aws-sdk/middleware-recursion-detection"); +const middleware_user_agent_1 = require("@aws-sdk/middleware-user-agent"); +const config_resolver_1 = require("@smithy/config-resolver"); +const core_1 = require("@smithy/core"); +const middleware_content_length_1 = require("@smithy/middleware-content-length"); +const middleware_endpoint_1 = require("@smithy/middleware-endpoint"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const smithy_client_1 = require("@smithy/smithy-client"); +Object.defineProperty(exports, "__Client", { enumerable: true, get: function () { return smithy_client_1.Client; } }); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const EndpointParameters_1 = require("./endpoint/EndpointParameters"); +const runtimeConfig_1 = require("./runtimeConfig"); +const runtimeExtensions_1 = require("./runtimeExtensions"); +class STSClient extends smithy_client_1.Client { + config; + constructor(...[configuration]) { + const _config_0 = (0, runtimeConfig_1.getRuntimeConfig)(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = (0, EndpointParameters_1.resolveClientEndpointParameters)(_config_0); + const _config_2 = (0, middleware_user_agent_1.resolveUserAgentConfig)(_config_1); + const _config_3 = (0, middleware_retry_1.resolveRetryConfig)(_config_2); + const _config_4 = (0, config_resolver_1.resolveRegionConfig)(_config_3); + const _config_5 = (0, middleware_host_header_1.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, middleware_endpoint_1.resolveEndpointConfig)(_config_5); + const _config_7 = (0, httpAuthSchemeProvider_1.resolveHttpAuthSchemeConfig)(_config_6); + const _config_8 = (0, runtimeExtensions_1.resolveRuntimeExtensions)(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use((0, middleware_user_agent_1.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, middleware_retry_1.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, middleware_content_length_1.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, middleware_host_header_1.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, middleware_logger_1.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, middleware_recursion_detection_1.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use((0, core_1.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: httpAuthSchemeProvider_1.defaultSTSHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new core_1.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use((0, core_1.getHttpSigningPlugin)(this.config)); + } + destroy() { + super.destroy(); + } +} +exports.STSClient = STSClient; diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthExtensionConfiguration.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 00000000..239095e0 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthRuntimeConfig = exports.getHttpAuthExtensionConfiguration = void 0; +const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +exports.getHttpAuthExtensionConfiguration = getHttpAuthExtensionConfiguration; +const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; +exports.resolveHttpAuthRuntimeConfig = resolveHttpAuthRuntimeConfig; diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthSchemeProvider.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthSchemeProvider.js new file mode 100644 index 00000000..842241a7 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthSchemeProvider.js @@ -0,0 +1,62 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthSchemeConfig = exports.resolveStsAuthConfig = exports.defaultSTSHttpAuthSchemeProvider = exports.defaultSTSHttpAuthSchemeParametersProvider = void 0; +const core_1 = require("@aws-sdk/core"); +const util_middleware_1 = require("@smithy/util-middleware"); +const STSClient_1 = require("../STSClient"); +const defaultSTSHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultSTSHttpAuthSchemeParametersProvider = defaultSTSHttpAuthSchemeParametersProvider; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "sts", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +const defaultSTSHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "AssumeRoleWithWebIdentity": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultSTSHttpAuthSchemeProvider = defaultSTSHttpAuthSchemeProvider; +const resolveStsAuthConfig = (input) => Object.assign(input, { + stsClientCtor: STSClient_1.STSClient, +}); +exports.resolveStsAuthConfig = resolveStsAuthConfig; +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, exports.resolveStsAuthConfig)(config); + const config_1 = (0, core_1.resolveAwsSdkSigV4Config)(config_0); + return Object.assign(config_1, { + authSchemePreference: (0, util_middleware_1.normalizeProvider)(config.authSchemePreference ?? []), + }); +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/EndpointParameters.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/EndpointParameters.js new file mode 100644 index 00000000..3aec6a5e --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/EndpointParameters.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.commonParams = exports.resolveClientEndpointParameters = void 0; +const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + useGlobalEndpoint: options.useGlobalEndpoint ?? false, + defaultSigningName: "sts", + }); +}; +exports.resolveClientEndpointParameters = resolveClientEndpointParameters; +exports.commonParams = { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/endpointResolver.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/endpointResolver.js new file mode 100644 index 00000000..6bfb6e90 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/endpointResolver.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = require("@aws-sdk/util-endpoints"); +const util_endpoints_2 = require("@smithy/util-endpoints"); +const ruleset_1 = require("./ruleset"); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS", "UseGlobalEndpoint"], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/ruleset.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/ruleset.js new file mode 100644 index 00000000..74282593 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/ruleset.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ruleSet = void 0; +const F = "required", G = "type", H = "fn", I = "argv", J = "ref"; +const a = false, b = true, c = "booleanEquals", d = "stringEquals", e = "sigv4", f = "sts", g = "us-east-1", h = "endpoint", i = "https://sts.{Region}.{PartitionResult#dnsSuffix}", j = "tree", k = "error", l = "getAttr", m = { [F]: false, [G]: "String" }, n = { [F]: true, "default": false, [G]: "Boolean" }, o = { [J]: "Endpoint" }, p = { [H]: "isSet", [I]: [{ [J]: "Region" }] }, q = { [J]: "Region" }, r = { [H]: "aws.partition", [I]: [q], "assign": "PartitionResult" }, s = { [J]: "UseFIPS" }, t = { [J]: "UseDualStack" }, u = { "url": "https://sts.amazonaws.com", "properties": { "authSchemes": [{ "name": e, "signingName": f, "signingRegion": g }] }, "headers": {} }, v = {}, w = { "conditions": [{ [H]: d, [I]: [q, "aws-global"] }], [h]: u, [G]: h }, x = { [H]: c, [I]: [s, true] }, y = { [H]: c, [I]: [t, true] }, z = { [H]: l, [I]: [{ [J]: "PartitionResult" }, "supportsFIPS"] }, A = { [J]: "PartitionResult" }, B = { [H]: c, [I]: [true, { [H]: l, [I]: [A, "supportsDualStack"] }] }, C = [{ [H]: "isSet", [I]: [o] }], D = [x], E = [y]; +const _data = { version: "1.0", parameters: { Region: m, UseDualStack: n, UseFIPS: n, Endpoint: m, UseGlobalEndpoint: n }, rules: [{ conditions: [{ [H]: c, [I]: [{ [J]: "UseGlobalEndpoint" }, b] }, { [H]: "not", [I]: C }, p, r, { [H]: c, [I]: [s, a] }, { [H]: c, [I]: [t, a] }], rules: [{ conditions: [{ [H]: d, [I]: [q, "ap-northeast-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-south-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-southeast-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-southeast-2"] }], endpoint: u, [G]: h }, w, { conditions: [{ [H]: d, [I]: [q, "ca-central-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-central-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-north-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-2"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-3"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "sa-east-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, g] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-east-2"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-west-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-west-2"] }], endpoint: u, [G]: h }, { endpoint: { url: i, properties: { authSchemes: [{ name: e, signingName: f, signingRegion: "{Region}" }] }, headers: v }, [G]: h }], [G]: j }, { conditions: C, rules: [{ conditions: D, error: "Invalid Configuration: FIPS and custom endpoint are not supported", [G]: k }, { conditions: E, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", [G]: k }, { endpoint: { url: o, properties: v, headers: v }, [G]: h }], [G]: j }, { conditions: [p], rules: [{ conditions: [r], rules: [{ conditions: [x, y], rules: [{ conditions: [{ [H]: c, [I]: [b, z] }, B], rules: [{ endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", [G]: k }], [G]: j }, { conditions: D, rules: [{ conditions: [{ [H]: c, [I]: [z, b] }], rules: [{ conditions: [{ [H]: d, [I]: [{ [H]: l, [I]: [A, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://sts.{Region}.amazonaws.com", properties: v, headers: v }, [G]: h }, { endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "FIPS is enabled but this partition does not support FIPS", [G]: k }], [G]: j }, { conditions: E, rules: [{ conditions: [B], rules: [{ endpoint: { url: "https://sts.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "DualStack is enabled but this partition does not support DualStack", [G]: k }], [G]: j }, w, { endpoint: { url: i, properties: v, headers: v }, [G]: h }], [G]: j }], [G]: j }, { error: "Invalid Configuration: Missing Region", [G]: k }] }; +exports.ruleSet = _data; diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/index.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/index.js new file mode 100644 index 00000000..bb0c42a9 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/index.js @@ -0,0 +1,951 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/sts/index.ts +var index_exports = {}; +__export(index_exports, { + AssumeRoleCommand: () => AssumeRoleCommand, + AssumeRoleResponseFilterSensitiveLog: () => AssumeRoleResponseFilterSensitiveLog, + AssumeRoleWithWebIdentityCommand: () => AssumeRoleWithWebIdentityCommand, + AssumeRoleWithWebIdentityRequestFilterSensitiveLog: () => AssumeRoleWithWebIdentityRequestFilterSensitiveLog, + AssumeRoleWithWebIdentityResponseFilterSensitiveLog: () => AssumeRoleWithWebIdentityResponseFilterSensitiveLog, + ClientInputEndpointParameters: () => import_EndpointParameters3.ClientInputEndpointParameters, + CredentialsFilterSensitiveLog: () => CredentialsFilterSensitiveLog, + ExpiredTokenException: () => ExpiredTokenException, + IDPCommunicationErrorException: () => IDPCommunicationErrorException, + IDPRejectedClaimException: () => IDPRejectedClaimException, + InvalidIdentityTokenException: () => InvalidIdentityTokenException, + MalformedPolicyDocumentException: () => MalformedPolicyDocumentException, + PackedPolicyTooLargeException: () => PackedPolicyTooLargeException, + RegionDisabledException: () => RegionDisabledException, + STS: () => STS, + STSServiceException: () => STSServiceException, + decorateDefaultCredentialProvider: () => decorateDefaultCredentialProvider, + getDefaultRoleAssumer: () => getDefaultRoleAssumer2, + getDefaultRoleAssumerWithWebIdentity: () => getDefaultRoleAssumerWithWebIdentity2 +}); +module.exports = __toCommonJS(index_exports); +__reExport(index_exports, require("./STSClient"), module.exports); + +// src/submodules/sts/STS.ts +var import_smithy_client6 = require("@smithy/smithy-client"); + +// src/submodules/sts/commands/AssumeRoleCommand.ts +var import_middleware_endpoint = require("@smithy/middleware-endpoint"); +var import_middleware_serde = require("@smithy/middleware-serde"); +var import_smithy_client4 = require("@smithy/smithy-client"); +var import_EndpointParameters = require("./endpoint/EndpointParameters"); + +// src/submodules/sts/models/models_0.ts +var import_smithy_client2 = require("@smithy/smithy-client"); + +// src/submodules/sts/models/STSServiceException.ts +var import_smithy_client = require("@smithy/smithy-client"); +var STSServiceException = class _STSServiceException extends import_smithy_client.ServiceException { + static { + __name(this, "STSServiceException"); + } + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _STSServiceException.prototype); + } +}; + +// src/submodules/sts/models/models_0.ts +var CredentialsFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SecretAccessKey && { SecretAccessKey: import_smithy_client2.SENSITIVE_STRING } +}), "CredentialsFilterSensitiveLog"); +var AssumeRoleResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) } +}), "AssumeRoleResponseFilterSensitiveLog"); +var ExpiredTokenException = class _ExpiredTokenException extends STSServiceException { + static { + __name(this, "ExpiredTokenException"); + } + name = "ExpiredTokenException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ExpiredTokenException.prototype); + } +}; +var MalformedPolicyDocumentException = class _MalformedPolicyDocumentException extends STSServiceException { + static { + __name(this, "MalformedPolicyDocumentException"); + } + name = "MalformedPolicyDocumentException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "MalformedPolicyDocumentException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _MalformedPolicyDocumentException.prototype); + } +}; +var PackedPolicyTooLargeException = class _PackedPolicyTooLargeException extends STSServiceException { + static { + __name(this, "PackedPolicyTooLargeException"); + } + name = "PackedPolicyTooLargeException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "PackedPolicyTooLargeException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _PackedPolicyTooLargeException.prototype); + } +}; +var RegionDisabledException = class _RegionDisabledException extends STSServiceException { + static { + __name(this, "RegionDisabledException"); + } + name = "RegionDisabledException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "RegionDisabledException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _RegionDisabledException.prototype); + } +}; +var IDPRejectedClaimException = class _IDPRejectedClaimException extends STSServiceException { + static { + __name(this, "IDPRejectedClaimException"); + } + name = "IDPRejectedClaimException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "IDPRejectedClaimException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _IDPRejectedClaimException.prototype); + } +}; +var InvalidIdentityTokenException = class _InvalidIdentityTokenException extends STSServiceException { + static { + __name(this, "InvalidIdentityTokenException"); + } + name = "InvalidIdentityTokenException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidIdentityTokenException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidIdentityTokenException.prototype); + } +}; +var AssumeRoleWithWebIdentityRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.WebIdentityToken && { WebIdentityToken: import_smithy_client2.SENSITIVE_STRING } +}), "AssumeRoleWithWebIdentityRequestFilterSensitiveLog"); +var AssumeRoleWithWebIdentityResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) } +}), "AssumeRoleWithWebIdentityResponseFilterSensitiveLog"); +var IDPCommunicationErrorException = class _IDPCommunicationErrorException extends STSServiceException { + static { + __name(this, "IDPCommunicationErrorException"); + } + name = "IDPCommunicationErrorException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "IDPCommunicationErrorException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _IDPCommunicationErrorException.prototype); + } +}; + +// src/submodules/sts/protocols/Aws_query.ts +var import_core = require("@aws-sdk/core"); +var import_protocol_http = require("@smithy/protocol-http"); +var import_smithy_client3 = require("@smithy/smithy-client"); +var se_AssumeRoleCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleRequest(input, context), + [_A]: _AR, + [_V]: _ + }); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_AssumeRoleCommand"); +var se_AssumeRoleWithWebIdentityCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleWithWebIdentityRequest(input, context), + [_A]: _ARWWI, + [_V]: _ + }); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_AssumeRoleWithWebIdentityCommand"); +var de_AssumeRoleCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseXmlBody)(output.body, context); + let contents = {}; + contents = de_AssumeRoleResponse(data.AssumeRoleResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_AssumeRoleCommand"); +var de_AssumeRoleWithWebIdentityCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseXmlBody)(output.body, context); + let contents = {}; + contents = de_AssumeRoleWithWebIdentityResponse(data.AssumeRoleWithWebIdentityResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_AssumeRoleWithWebIdentityCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core.parseXmlErrorBody)(output.body, context) + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ExpiredTokenException": + case "com.amazonaws.sts#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "MalformedPolicyDocument": + case "com.amazonaws.sts#MalformedPolicyDocumentException": + throw await de_MalformedPolicyDocumentExceptionRes(parsedOutput, context); + case "PackedPolicyTooLarge": + case "com.amazonaws.sts#PackedPolicyTooLargeException": + throw await de_PackedPolicyTooLargeExceptionRes(parsedOutput, context); + case "RegionDisabledException": + case "com.amazonaws.sts#RegionDisabledException": + throw await de_RegionDisabledExceptionRes(parsedOutput, context); + case "IDPCommunicationError": + case "com.amazonaws.sts#IDPCommunicationErrorException": + throw await de_IDPCommunicationErrorExceptionRes(parsedOutput, context); + case "IDPRejectedClaim": + case "com.amazonaws.sts#IDPRejectedClaimException": + throw await de_IDPRejectedClaimExceptionRes(parsedOutput, context); + case "InvalidIdentityToken": + case "com.amazonaws.sts#InvalidIdentityTokenException": + throw await de_InvalidIdentityTokenExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode + }); + } +}, "de_CommandError"); +var de_ExpiredTokenExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_ExpiredTokenException(body.Error, context); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_ExpiredTokenExceptionRes"); +var de_IDPCommunicationErrorExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPCommunicationErrorException(body.Error, context); + const exception = new IDPCommunicationErrorException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_IDPCommunicationErrorExceptionRes"); +var de_IDPRejectedClaimExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPRejectedClaimException(body.Error, context); + const exception = new IDPRejectedClaimException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_IDPRejectedClaimExceptionRes"); +var de_InvalidIdentityTokenExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_InvalidIdentityTokenException(body.Error, context); + const exception = new InvalidIdentityTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_InvalidIdentityTokenExceptionRes"); +var de_MalformedPolicyDocumentExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_MalformedPolicyDocumentException(body.Error, context); + const exception = new MalformedPolicyDocumentException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_MalformedPolicyDocumentExceptionRes"); +var de_PackedPolicyTooLargeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_PackedPolicyTooLargeException(body.Error, context); + const exception = new PackedPolicyTooLargeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_PackedPolicyTooLargeExceptionRes"); +var de_RegionDisabledExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_RegionDisabledException(body.Error, context); + const exception = new RegionDisabledException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_RegionDisabledExceptionRes"); +var se_AssumeRoleRequest = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_RA] != null) { + entries[_RA] = input[_RA]; + } + if (input[_RSN] != null) { + entries[_RSN] = input[_RSN]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (input[_PA]?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + if (input[_T] != null) { + const memberEntries = se_tagListType(input[_T], context); + if (input[_T]?.length === 0) { + entries.Tags = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `Tags.${key}`; + entries[loc] = value; + }); + } + if (input[_TTK] != null) { + const memberEntries = se_tagKeyListType(input[_TTK], context); + if (input[_TTK]?.length === 0) { + entries.TransitiveTagKeys = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `TransitiveTagKeys.${key}`; + entries[loc] = value; + }); + } + if (input[_EI] != null) { + entries[_EI] = input[_EI]; + } + if (input[_SN] != null) { + entries[_SN] = input[_SN]; + } + if (input[_TC] != null) { + entries[_TC] = input[_TC]; + } + if (input[_SI] != null) { + entries[_SI] = input[_SI]; + } + if (input[_PC] != null) { + const memberEntries = se_ProvidedContextsListType(input[_PC], context); + if (input[_PC]?.length === 0) { + entries.ProvidedContexts = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `ProvidedContexts.${key}`; + entries[loc] = value; + }); + } + return entries; +}, "se_AssumeRoleRequest"); +var se_AssumeRoleWithWebIdentityRequest = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_RA] != null) { + entries[_RA] = input[_RA]; + } + if (input[_RSN] != null) { + entries[_RSN] = input[_RSN]; + } + if (input[_WIT] != null) { + entries[_WIT] = input[_WIT]; + } + if (input[_PI] != null) { + entries[_PI] = input[_PI]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (input[_PA]?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + return entries; +}, "se_AssumeRoleWithWebIdentityRequest"); +var se_policyDescriptorListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_PolicyDescriptorType(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}, "se_policyDescriptorListType"); +var se_PolicyDescriptorType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_a] != null) { + entries[_a] = input[_a]; + } + return entries; +}, "se_PolicyDescriptorType"); +var se_ProvidedContext = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_PAr] != null) { + entries[_PAr] = input[_PAr]; + } + if (input[_CA] != null) { + entries[_CA] = input[_CA]; + } + return entries; +}, "se_ProvidedContext"); +var se_ProvidedContextsListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_ProvidedContext(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}, "se_ProvidedContextsListType"); +var se_Tag = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_K] != null) { + entries[_K] = input[_K]; + } + if (input[_Va] != null) { + entries[_Va] = input[_Va]; + } + return entries; +}, "se_Tag"); +var se_tagKeyListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + entries[`member.${counter}`] = entry; + counter++; + } + return entries; +}, "se_tagKeyListType"); +var se_tagListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_Tag(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}, "se_tagListType"); +var de_AssumedRoleUser = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_ARI] != null) { + contents[_ARI] = (0, import_smithy_client3.expectString)(output[_ARI]); + } + if (output[_Ar] != null) { + contents[_Ar] = (0, import_smithy_client3.expectString)(output[_Ar]); + } + return contents; +}, "de_AssumedRoleUser"); +var de_AssumeRoleResponse = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_ARU] != null) { + contents[_ARU] = de_AssumedRoleUser(output[_ARU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = (0, import_smithy_client3.strictParseInt32)(output[_PPS]); + } + if (output[_SI] != null) { + contents[_SI] = (0, import_smithy_client3.expectString)(output[_SI]); + } + return contents; +}, "de_AssumeRoleResponse"); +var de_AssumeRoleWithWebIdentityResponse = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_SFWIT] != null) { + contents[_SFWIT] = (0, import_smithy_client3.expectString)(output[_SFWIT]); + } + if (output[_ARU] != null) { + contents[_ARU] = de_AssumedRoleUser(output[_ARU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = (0, import_smithy_client3.strictParseInt32)(output[_PPS]); + } + if (output[_Pr] != null) { + contents[_Pr] = (0, import_smithy_client3.expectString)(output[_Pr]); + } + if (output[_Au] != null) { + contents[_Au] = (0, import_smithy_client3.expectString)(output[_Au]); + } + if (output[_SI] != null) { + contents[_SI] = (0, import_smithy_client3.expectString)(output[_SI]); + } + return contents; +}, "de_AssumeRoleWithWebIdentityResponse"); +var de_Credentials = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_AKI] != null) { + contents[_AKI] = (0, import_smithy_client3.expectString)(output[_AKI]); + } + if (output[_SAK] != null) { + contents[_SAK] = (0, import_smithy_client3.expectString)(output[_SAK]); + } + if (output[_ST] != null) { + contents[_ST] = (0, import_smithy_client3.expectString)(output[_ST]); + } + if (output[_E] != null) { + contents[_E] = (0, import_smithy_client3.expectNonNull)((0, import_smithy_client3.parseRfc3339DateTimeWithOffset)(output[_E])); + } + return contents; +}, "de_Credentials"); +var de_ExpiredTokenException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_ExpiredTokenException"); +var de_IDPCommunicationErrorException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_IDPCommunicationErrorException"); +var de_IDPRejectedClaimException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_IDPRejectedClaimException"); +var de_InvalidIdentityTokenException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_InvalidIdentityTokenException"); +var de_MalformedPolicyDocumentException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_MalformedPolicyDocumentException"); +var de_PackedPolicyTooLargeException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_PackedPolicyTooLargeException"); +var de_RegionDisabledException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_RegionDisabledException"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); +var throwDefaultError = (0, import_smithy_client3.withBaseException)(STSServiceException); +var buildHttpRpcRequest = /* @__PURE__ */ __name(async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers + }; + if (resolvedHostname !== void 0) { + contents.hostname = resolvedHostname; + } + if (body !== void 0) { + contents.body = body; + } + return new import_protocol_http.HttpRequest(contents); +}, "buildHttpRpcRequest"); +var SHARED_HEADERS = { + "content-type": "application/x-www-form-urlencoded" +}; +var _ = "2011-06-15"; +var _A = "Action"; +var _AKI = "AccessKeyId"; +var _AR = "AssumeRole"; +var _ARI = "AssumedRoleId"; +var _ARU = "AssumedRoleUser"; +var _ARWWI = "AssumeRoleWithWebIdentity"; +var _Ar = "Arn"; +var _Au = "Audience"; +var _C = "Credentials"; +var _CA = "ContextAssertion"; +var _DS = "DurationSeconds"; +var _E = "Expiration"; +var _EI = "ExternalId"; +var _K = "Key"; +var _P = "Policy"; +var _PA = "PolicyArns"; +var _PAr = "ProviderArn"; +var _PC = "ProvidedContexts"; +var _PI = "ProviderId"; +var _PPS = "PackedPolicySize"; +var _Pr = "Provider"; +var _RA = "RoleArn"; +var _RSN = "RoleSessionName"; +var _SAK = "SecretAccessKey"; +var _SFWIT = "SubjectFromWebIdentityToken"; +var _SI = "SourceIdentity"; +var _SN = "SerialNumber"; +var _ST = "SessionToken"; +var _T = "Tags"; +var _TC = "TokenCode"; +var _TTK = "TransitiveTagKeys"; +var _V = "Version"; +var _Va = "Value"; +var _WIT = "WebIdentityToken"; +var _a = "arn"; +var _m = "message"; +var buildFormUrlencodedString = /* @__PURE__ */ __name((formEntries) => Object.entries(formEntries).map(([key, value]) => (0, import_smithy_client3.extendedEncodeURIComponent)(key) + "=" + (0, import_smithy_client3.extendedEncodeURIComponent)(value)).join("&"), "buildFormUrlencodedString"); +var loadQueryErrorCode = /* @__PURE__ */ __name((output, data) => { + if (data.Error?.Code !== void 0) { + return data.Error.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}, "loadQueryErrorCode"); + +// src/submodules/sts/commands/AssumeRoleCommand.ts +var AssumeRoleCommand = class extends import_smithy_client4.Command.classBuilder().ep(import_EndpointParameters.commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSecurityTokenServiceV20110615", "AssumeRole", {}).n("STSClient", "AssumeRoleCommand").f(void 0, AssumeRoleResponseFilterSensitiveLog).ser(se_AssumeRoleCommand).de(de_AssumeRoleCommand).build() { + static { + __name(this, "AssumeRoleCommand"); + } +}; + +// src/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.ts +var import_middleware_endpoint2 = require("@smithy/middleware-endpoint"); +var import_middleware_serde2 = require("@smithy/middleware-serde"); +var import_smithy_client5 = require("@smithy/smithy-client"); +var import_EndpointParameters2 = require("./endpoint/EndpointParameters"); +var AssumeRoleWithWebIdentityCommand = class extends import_smithy_client5.Command.classBuilder().ep(import_EndpointParameters2.commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde2.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint2.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSecurityTokenServiceV20110615", "AssumeRoleWithWebIdentity", {}).n("STSClient", "AssumeRoleWithWebIdentityCommand").f(AssumeRoleWithWebIdentityRequestFilterSensitiveLog, AssumeRoleWithWebIdentityResponseFilterSensitiveLog).ser(se_AssumeRoleWithWebIdentityCommand).de(de_AssumeRoleWithWebIdentityCommand).build() { + static { + __name(this, "AssumeRoleWithWebIdentityCommand"); + } +}; + +// src/submodules/sts/STS.ts +var import_STSClient = require("./STSClient"); +var commands = { + AssumeRoleCommand, + AssumeRoleWithWebIdentityCommand +}; +var STS = class extends import_STSClient.STSClient { + static { + __name(this, "STS"); + } +}; +(0, import_smithy_client6.createAggregatedClient)(commands, STS); + +// src/submodules/sts/index.ts +var import_EndpointParameters3 = require("./endpoint/EndpointParameters"); + +// src/submodules/sts/defaultStsRoleAssumers.ts +var import_client = require("@aws-sdk/core/client"); +var ASSUME_ROLE_DEFAULT_REGION = "us-east-1"; +var getAccountIdFromAssumedRoleUser = /* @__PURE__ */ __name((assumedRoleUser) => { + if (typeof assumedRoleUser?.Arn === "string") { + const arnComponents = assumedRoleUser.Arn.split(":"); + if (arnComponents.length > 4 && arnComponents[4] !== "") { + return arnComponents[4]; + } + } + return void 0; +}, "getAccountIdFromAssumedRoleUser"); +var resolveRegion = /* @__PURE__ */ __name(async (_region, _parentRegion, credentialProviderLogger) => { + const region = typeof _region === "function" ? await _region() : _region; + const parentRegion = typeof _parentRegion === "function" ? await _parentRegion() : _parentRegion; + credentialProviderLogger?.debug?.( + "@aws-sdk/client-sts::resolveRegion", + "accepting first of:", + `${region} (provider)`, + `${parentRegion} (parent client)`, + `${ASSUME_ROLE_DEFAULT_REGION} (STS default)` + ); + return region ?? parentRegion ?? ASSUME_ROLE_DEFAULT_REGION; +}, "resolveRegion"); +var getDefaultRoleAssumer = /* @__PURE__ */ __name((stsOptions, STSClient3) => { + let stsClient; + let closureSourceCreds; + return async (sourceCreds, params) => { + closureSourceCreds = sourceCreds; + if (!stsClient) { + const { + logger = stsOptions?.parentClientConfig?.logger, + region, + requestHandler = stsOptions?.parentClientConfig?.requestHandler, + credentialProviderLogger + } = stsOptions; + const resolvedRegion = await resolveRegion( + region, + stsOptions?.parentClientConfig?.region, + credentialProviderLogger + ); + const isCompatibleRequestHandler = !isH2(requestHandler); + stsClient = new STSClient3({ + profile: stsOptions?.parentClientConfig?.profile, + // A hack to make sts client uses the credential in current closure. + credentialDefaultProvider: /* @__PURE__ */ __name(() => async () => closureSourceCreds, "credentialDefaultProvider"), + region: resolvedRegion, + requestHandler: isCompatibleRequestHandler ? requestHandler : void 0, + logger + }); + } + const { Credentials: Credentials2, AssumedRoleUser: AssumedRoleUser2 } = await stsClient.send(new AssumeRoleCommand(params)); + if (!Credentials2 || !Credentials2.AccessKeyId || !Credentials2.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRole call with role ${params.RoleArn}`); + } + const accountId = getAccountIdFromAssumedRoleUser(AssumedRoleUser2); + const credentials = { + accessKeyId: Credentials2.AccessKeyId, + secretAccessKey: Credentials2.SecretAccessKey, + sessionToken: Credentials2.SessionToken, + expiration: Credentials2.Expiration, + // TODO(credentialScope): access normally when shape is updated. + ...Credentials2.CredentialScope && { credentialScope: Credentials2.CredentialScope }, + ...accountId && { accountId } + }; + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_STS_ASSUME_ROLE", "i"); + return credentials; + }; +}, "getDefaultRoleAssumer"); +var getDefaultRoleAssumerWithWebIdentity = /* @__PURE__ */ __name((stsOptions, STSClient3) => { + let stsClient; + return async (params) => { + if (!stsClient) { + const { + logger = stsOptions?.parentClientConfig?.logger, + region, + requestHandler = stsOptions?.parentClientConfig?.requestHandler, + credentialProviderLogger + } = stsOptions; + const resolvedRegion = await resolveRegion( + region, + stsOptions?.parentClientConfig?.region, + credentialProviderLogger + ); + const isCompatibleRequestHandler = !isH2(requestHandler); + stsClient = new STSClient3({ + profile: stsOptions?.parentClientConfig?.profile, + region: resolvedRegion, + requestHandler: isCompatibleRequestHandler ? requestHandler : void 0, + logger + }); + } + const { Credentials: Credentials2, AssumedRoleUser: AssumedRoleUser2 } = await stsClient.send(new AssumeRoleWithWebIdentityCommand(params)); + if (!Credentials2 || !Credentials2.AccessKeyId || !Credentials2.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRoleWithWebIdentity call with role ${params.RoleArn}`); + } + const accountId = getAccountIdFromAssumedRoleUser(AssumedRoleUser2); + const credentials = { + accessKeyId: Credentials2.AccessKeyId, + secretAccessKey: Credentials2.SecretAccessKey, + sessionToken: Credentials2.SessionToken, + expiration: Credentials2.Expiration, + // TODO(credentialScope): access normally when shape is updated. + ...Credentials2.CredentialScope && { credentialScope: Credentials2.CredentialScope }, + ...accountId && { accountId } + }; + if (accountId) { + (0, import_client.setCredentialFeature)(credentials, "RESOLVED_ACCOUNT_ID", "T"); + } + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_STS_ASSUME_ROLE_WEB_ID", "k"); + return credentials; + }; +}, "getDefaultRoleAssumerWithWebIdentity"); +var isH2 = /* @__PURE__ */ __name((requestHandler) => { + return requestHandler?.metadata?.handlerProtocol === "h2"; +}, "isH2"); + +// src/submodules/sts/defaultRoleAssumers.ts +var import_STSClient2 = require("./STSClient"); +var getCustomizableStsClientCtor = /* @__PURE__ */ __name((baseCtor, customizations) => { + if (!customizations) return baseCtor; + else + return class CustomizableSTSClient extends baseCtor { + static { + __name(this, "CustomizableSTSClient"); + } + constructor(config) { + super(config); + for (const customization of customizations) { + this.middlewareStack.use(customization); + } + } + }; +}, "getCustomizableStsClientCtor"); +var getDefaultRoleAssumer2 = /* @__PURE__ */ __name((stsOptions = {}, stsPlugins) => getDefaultRoleAssumer(stsOptions, getCustomizableStsClientCtor(import_STSClient2.STSClient, stsPlugins)), "getDefaultRoleAssumer"); +var getDefaultRoleAssumerWithWebIdentity2 = /* @__PURE__ */ __name((stsOptions = {}, stsPlugins) => getDefaultRoleAssumerWithWebIdentity(stsOptions, getCustomizableStsClientCtor(import_STSClient2.STSClient, stsPlugins)), "getDefaultRoleAssumerWithWebIdentity"); +var decorateDefaultCredentialProvider = /* @__PURE__ */ __name((provider) => (input) => provider({ + roleAssumer: getDefaultRoleAssumer2(input), + roleAssumerWithWebIdentity: getDefaultRoleAssumerWithWebIdentity2(input), + ...input +}), "decorateDefaultCredentialProvider"); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + AssumeRoleCommand, + AssumeRoleResponseFilterSensitiveLog, + AssumeRoleWithWebIdentityCommand, + AssumeRoleWithWebIdentityRequestFilterSensitiveLog, + AssumeRoleWithWebIdentityResponseFilterSensitiveLog, + ClientInputEndpointParameters, + CredentialsFilterSensitiveLog, + ExpiredTokenException, + IDPCommunicationErrorException, + IDPRejectedClaimException, + InvalidIdentityTokenException, + MalformedPolicyDocumentException, + PackedPolicyTooLargeException, + RegionDisabledException, + STS, + STSServiceException, + decorateDefaultCredentialProvider, + getDefaultRoleAssumer, + getDefaultRoleAssumerWithWebIdentity, + ...require("./STSClient") +}); diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.browser.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.browser.js new file mode 100644 index 00000000..63cedb19 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.browser.js @@ -0,0 +1,39 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../../../package.json")); +const sha256_browser_1 = require("@aws-crypto/sha256-browser"); +const util_user_agent_browser_1 = require("@aws-sdk/util-user-agent-browser"); +const config_resolver_1 = require("@smithy/config-resolver"); +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const invalid_dependency_1 = require("@smithy/invalid-dependency"); +const util_body_length_browser_1 = require("@smithy/util-body-length-browser"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_browser_1 = require("@smithy/util-defaults-mode-browser"); +const getRuntimeConfig = (config) => { + const defaultsMode = (0, util_defaults_mode_browser_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_browser_1.calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? ((_) => () => Promise.reject(new Error("Credential is missing"))), + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_browser_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? util_retry_1.DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? (0, invalid_dependency_1.invalidProvider)("Region is missing"), + requestHandler: fetch_http_handler_1.FetchHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? sha256_browser_1.Sha256, + streamCollector: config?.streamCollector ?? fetch_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.js new file mode 100644 index 00000000..de3b0e79 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.js @@ -0,0 +1,65 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../../../package.json")); +const core_1 = require("@aws-sdk/core"); +const util_user_agent_node_1 = require("@aws-sdk/util-user-agent-node"); +const config_resolver_1 = require("@smithy/config-resolver"); +const core_2 = require("@smithy/core"); +const hash_node_1 = require("@smithy/hash-node"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const node_config_provider_1 = require("@smithy/node-config-provider"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_body_length_node_1 = require("@smithy/util-body-length-node"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_node_1 = require("@smithy/util-defaults-mode-node"); +const smithy_client_2 = require("@smithy/smithy-client"); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? (0, node_config_provider_1.loadConfig)(core_1.NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4") || + (async (idProps) => await config.credentialDefaultProvider(idProps?.__config || {})()), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.native.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.native.js new file mode 100644 index 00000000..34c5f8ec --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.native.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const sha256_js_1 = require("@aws-crypto/sha256-js"); +const runtimeConfig_browser_1 = require("./runtimeConfig.browser"); +const getRuntimeConfig = (config) => { + const browserDefaults = (0, runtimeConfig_browser_1.getRuntimeConfig)(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? sha256_js_1.Sha256, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.shared.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.shared.js new file mode 100644 index 00000000..1e03d8b6 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.shared.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const core_1 = require("@aws-sdk/core"); +const core_2 = require("@smithy/core"); +const smithy_client_1 = require("@smithy/smithy-client"); +const url_parser_1 = require("@smithy/url-parser"); +const util_base64_1 = require("@smithy/util-base64"); +const util_utf8_1 = require("@smithy/util-utf8"); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const endpointResolver_1 = require("./endpoint/endpointResolver"); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2011-06-15", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultSTSHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "STS", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeExtensions.js b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeExtensions.js new file mode 100644 index 00000000..a50ebec3 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeExtensions.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveRuntimeExtensions = void 0; +const region_config_resolver_1 = require("@aws-sdk/region-config-resolver"); +const protocol_http_1 = require("@smithy/protocol-http"); +const smithy_client_1 = require("@smithy/smithy-client"); +const httpAuthExtensionConfiguration_1 = require("./auth/httpAuthExtensionConfiguration"); +const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign((0, region_config_resolver_1.getAwsRegionExtensionConfiguration)(runtimeConfig), (0, smithy_client_1.getDefaultExtensionConfiguration)(runtimeConfig), (0, protocol_http_1.getHttpHandlerExtensionConfiguration)(runtimeConfig), (0, httpAuthExtensionConfiguration_1.getHttpAuthExtensionConfiguration)(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, (0, region_config_resolver_1.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), (0, smithy_client_1.resolveDefaultRuntimeConfig)(extensionConfiguration), (0, protocol_http_1.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), (0, httpAuthExtensionConfiguration_1.resolveHttpAuthRuntimeConfig)(extensionConfiguration)); +}; +exports.resolveRuntimeExtensions = resolveRuntimeExtensions; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/index.js b/node_modules/@aws-sdk/nested-clients/dist-es/index.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/index.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDC.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDC.js new file mode 100644 index 00000000..bcb161f2 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDC.js @@ -0,0 +1,9 @@ +import { createAggregatedClient } from "@smithy/smithy-client"; +import { CreateTokenCommand } from "./commands/CreateTokenCommand"; +import { SSOOIDCClient } from "./SSOOIDCClient"; +const commands = { + CreateTokenCommand, +}; +export class SSOOIDC extends SSOOIDCClient { +} +createAggregatedClient(commands, SSOOIDC); diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDCClient.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDCClient.js new file mode 100644 index 00000000..003cad71 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDCClient.js @@ -0,0 +1,48 @@ +import { getHostHeaderPlugin, resolveHostHeaderConfig, } from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { getRecursionDetectionPlugin } from "@aws-sdk/middleware-recursion-detection"; +import { getUserAgentPlugin, resolveUserAgentConfig, } from "@aws-sdk/middleware-user-agent"; +import { resolveRegionConfig } from "@smithy/config-resolver"; +import { DefaultIdentityProviderConfig, getHttpAuthSchemeEndpointRuleSetPlugin, getHttpSigningPlugin, } from "@smithy/core"; +import { getContentLengthPlugin } from "@smithy/middleware-content-length"; +import { resolveEndpointConfig } from "@smithy/middleware-endpoint"; +import { getRetryPlugin, resolveRetryConfig } from "@smithy/middleware-retry"; +import { Client as __Client, } from "@smithy/smithy-client"; +import { defaultSSOOIDCHttpAuthSchemeParametersProvider, resolveHttpAuthSchemeConfig, } from "./auth/httpAuthSchemeProvider"; +import { resolveClientEndpointParameters, } from "./endpoint/EndpointParameters"; +import { getRuntimeConfig as __getRuntimeConfig } from "./runtimeConfig"; +import { resolveRuntimeExtensions } from "./runtimeExtensions"; +export { __Client }; +export class SSOOIDCClient extends __Client { + config; + constructor(...[configuration]) { + const _config_0 = __getRuntimeConfig(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = resolveUserAgentConfig(_config_1); + const _config_3 = resolveRetryConfig(_config_2); + const _config_4 = resolveRegionConfig(_config_3); + const _config_5 = resolveHostHeaderConfig(_config_4); + const _config_6 = resolveEndpointConfig(_config_5); + const _config_7 = resolveHttpAuthSchemeConfig(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + this.middlewareStack.use(getRecursionDetectionPlugin(this.config)); + this.middlewareStack.use(getHttpAuthSchemeEndpointRuleSetPlugin(this.config, { + httpAuthSchemeParametersProvider: defaultSSOOIDCHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use(getHttpSigningPlugin(this.config)); + } + destroy() { + super.destroy(); + } +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 00000000..2ba1d48c --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,38 @@ +export const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +export const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthSchemeProvider.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthSchemeProvider.js new file mode 100644 index 00000000..a5e9eabd --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthSchemeProvider.js @@ -0,0 +1,50 @@ +import { resolveAwsSdkSigV4Config, } from "@aws-sdk/core"; +import { getSmithyContext, normalizeProvider } from "@smithy/util-middleware"; +export const defaultSSOOIDCHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: getSmithyContext(context).operation, + region: (await normalizeProvider(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "sso-oauth", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +export const defaultSSOOIDCHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "CreateToken": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +export const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = resolveAwsSdkSigV4Config(config); + return Object.assign(config_0, { + authSchemePreference: normalizeProvider(config.authSchemePreference ?? []), + }); +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/CreateTokenCommand.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/CreateTokenCommand.js new file mode 100644 index 00000000..78632477 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/CreateTokenCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { CreateTokenRequestFilterSensitiveLog, CreateTokenResponseFilterSensitiveLog, } from "../models/models_0"; +import { de_CreateTokenCommand, se_CreateTokenCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class CreateTokenCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AWSSSOOIDCService", "CreateToken", {}) + .n("SSOOIDCClient", "CreateTokenCommand") + .f(CreateTokenRequestFilterSensitiveLog, CreateTokenResponseFilterSensitiveLog) + .ser(se_CreateTokenCommand) + .de(de_CreateTokenCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/index.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/index.js new file mode 100644 index 00000000..09214cae --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/index.js @@ -0,0 +1 @@ +export * from "./CreateTokenCommand"; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/EndpointParameters.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/EndpointParameters.js new file mode 100644 index 00000000..2b26c443 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/EndpointParameters.js @@ -0,0 +1,13 @@ +export const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "sso-oauth", + }); +}; +export const commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/endpointResolver.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/endpointResolver.js new file mode 100644 index 00000000..0ac15bcd --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/endpointResolver.js @@ -0,0 +1,14 @@ +import { awsEndpointFunctions } from "@aws-sdk/util-endpoints"; +import { customEndpointFunctions, EndpointCache, resolveEndpoint } from "@smithy/util-endpoints"; +import { ruleSet } from "./ruleset"; +const cache = new EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS"], +}); +export const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => resolveEndpoint(ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/ruleset.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/ruleset.js new file mode 100644 index 00000000..040ea399 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/ruleset.js @@ -0,0 +1,4 @@ +const u = "required", v = "fn", w = "argv", x = "ref"; +const a = true, b = "isSet", c = "booleanEquals", d = "error", e = "endpoint", f = "tree", g = "PartitionResult", h = "getAttr", i = { [u]: false, "type": "String" }, j = { [u]: true, "default": false, "type": "Boolean" }, k = { [x]: "Endpoint" }, l = { [v]: c, [w]: [{ [x]: "UseFIPS" }, true] }, m = { [v]: c, [w]: [{ [x]: "UseDualStack" }, true] }, n = {}, o = { [v]: h, [w]: [{ [x]: g }, "supportsFIPS"] }, p = { [x]: g }, q = { [v]: c, [w]: [true, { [v]: h, [w]: [p, "supportsDualStack"] }] }, r = [l], s = [m], t = [{ [x]: "Region" }]; +const _data = { version: "1.0", parameters: { Region: i, UseDualStack: j, UseFIPS: j, Endpoint: i }, rules: [{ conditions: [{ [v]: b, [w]: [k] }], rules: [{ conditions: r, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: d }, { conditions: s, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: d }, { endpoint: { url: k, properties: n, headers: n }, type: e }], type: f }, { conditions: [{ [v]: b, [w]: t }], rules: [{ conditions: [{ [v]: "aws.partition", [w]: t, assign: g }], rules: [{ conditions: [l, m], rules: [{ conditions: [{ [v]: c, [w]: [a, o] }, q], rules: [{ endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: d }], type: f }, { conditions: r, rules: [{ conditions: [{ [v]: c, [w]: [o, a] }], rules: [{ conditions: [{ [v]: "stringEquals", [w]: [{ [v]: h, [w]: [p, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://oidc.{Region}.amazonaws.com", properties: n, headers: n }, type: e }, { endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS is enabled but this partition does not support FIPS", type: d }], type: f }, { conditions: s, rules: [{ conditions: [q], rules: [{ endpoint: { url: "https://oidc.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "DualStack is enabled but this partition does not support DualStack", type: d }], type: f }, { endpoint: { url: "https://oidc.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }], type: f }, { error: "Invalid Configuration: Missing Region", type: d }] }; +export const ruleSet = _data; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/extensionConfiguration.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/extensionConfiguration.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/extensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/index.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/index.js new file mode 100644 index 00000000..c2894a34 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/index.js @@ -0,0 +1,5 @@ +export * from "./SSOOIDCClient"; +export * from "./SSOOIDC"; +export * from "./commands"; +export * from "./models"; +export { SSOOIDCServiceException } from "./models/SSOOIDCServiceException"; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/SSOOIDCServiceException.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/SSOOIDCServiceException.js new file mode 100644 index 00000000..176cec3f --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/SSOOIDCServiceException.js @@ -0,0 +1,8 @@ +import { ServiceException as __ServiceException, } from "@smithy/smithy-client"; +export { __ServiceException }; +export class SSOOIDCServiceException extends __ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, SSOOIDCServiceException.prototype); + } +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/index.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/index.js new file mode 100644 index 00000000..09c5d6e0 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/index.js @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/models_0.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/models_0.js new file mode 100644 index 00000000..b350ef14 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/models_0.js @@ -0,0 +1,190 @@ +import { SENSITIVE_STRING } from "@smithy/smithy-client"; +import { SSOOIDCServiceException as __BaseException } from "./SSOOIDCServiceException"; +export class AccessDeniedException extends __BaseException { + name = "AccessDeniedException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "AccessDeniedException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, AccessDeniedException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class AuthorizationPendingException extends __BaseException { + name = "AuthorizationPendingException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "AuthorizationPendingException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, AuthorizationPendingException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export const CreateTokenRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.clientSecret && { clientSecret: SENSITIVE_STRING }), + ...(obj.refreshToken && { refreshToken: SENSITIVE_STRING }), + ...(obj.codeVerifier && { codeVerifier: SENSITIVE_STRING }), +}); +export const CreateTokenResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), + ...(obj.refreshToken && { refreshToken: SENSITIVE_STRING }), + ...(obj.idToken && { idToken: SENSITIVE_STRING }), +}); +export class ExpiredTokenException extends __BaseException { + name = "ExpiredTokenException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ExpiredTokenException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InternalServerException extends __BaseException { + name = "InternalServerException"; + $fault = "server"; + error; + error_description; + constructor(opts) { + super({ + name: "InternalServerException", + $fault: "server", + ...opts, + }); + Object.setPrototypeOf(this, InternalServerException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InvalidClientException extends __BaseException { + name = "InvalidClientException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "InvalidClientException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InvalidGrantException extends __BaseException { + name = "InvalidGrantException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "InvalidGrantException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidGrantException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InvalidRequestException extends __BaseException { + name = "InvalidRequestException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidRequestException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InvalidScopeException extends __BaseException { + name = "InvalidScopeException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "InvalidScopeException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidScopeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class SlowDownException extends __BaseException { + name = "SlowDownException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "SlowDownException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, SlowDownException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class UnauthorizedClientException extends __BaseException { + name = "UnauthorizedClientException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "UnauthorizedClientException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, UnauthorizedClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class UnsupportedGrantTypeException extends __BaseException { + name = "UnsupportedGrantTypeException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "UnsupportedGrantTypeException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, UnsupportedGrantTypeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/protocols/Aws_restJson1.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/protocols/Aws_restJson1.js new file mode 100644 index 00000000..b58850b8 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/protocols/Aws_restJson1.js @@ -0,0 +1,255 @@ +import { loadRestJsonErrorCode, parseJsonBody as parseBody, parseJsonErrorBody as parseErrorBody } from "@aws-sdk/core"; +import { requestBuilder as rb } from "@smithy/core"; +import { _json, collectBody, decorateServiceException as __decorateServiceException, expectInt32 as __expectInt32, expectNonNull as __expectNonNull, expectObject as __expectObject, expectString as __expectString, map, take, withBaseException, } from "@smithy/smithy-client"; +import { AccessDeniedException, AuthorizationPendingException, ExpiredTokenException, InternalServerException, InvalidClientException, InvalidGrantException, InvalidRequestException, InvalidScopeException, SlowDownException, UnauthorizedClientException, UnsupportedGrantTypeException, } from "../models/models_0"; +import { SSOOIDCServiceException as __BaseException } from "../models/SSOOIDCServiceException"; +export const se_CreateTokenCommand = async (input, context) => { + const b = rb(input, context); + const headers = { + "content-type": "application/json", + }; + b.bp("/token"); + let body; + body = JSON.stringify(take(input, { + clientId: [], + clientSecret: [], + code: [], + codeVerifier: [], + deviceCode: [], + grantType: [], + redirectUri: [], + refreshToken: [], + scope: (_) => _json(_), + })); + b.m("POST").h(headers).b(body); + return b.build(); +}; +export const de_CreateTokenCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + const doc = take(data, { + accessToken: __expectString, + expiresIn: __expectInt32, + idToken: __expectString, + refreshToken: __expectString, + tokenType: __expectString, + }); + Object.assign(contents, doc); + return contents; +}; +const de_CommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.ssooidc#AccessDeniedException": + throw await de_AccessDeniedExceptionRes(parsedOutput, context); + case "AuthorizationPendingException": + case "com.amazonaws.ssooidc#AuthorizationPendingException": + throw await de_AuthorizationPendingExceptionRes(parsedOutput, context); + case "ExpiredTokenException": + case "com.amazonaws.ssooidc#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "InternalServerException": + case "com.amazonaws.ssooidc#InternalServerException": + throw await de_InternalServerExceptionRes(parsedOutput, context); + case "InvalidClientException": + case "com.amazonaws.ssooidc#InvalidClientException": + throw await de_InvalidClientExceptionRes(parsedOutput, context); + case "InvalidGrantException": + case "com.amazonaws.ssooidc#InvalidGrantException": + throw await de_InvalidGrantExceptionRes(parsedOutput, context); + case "InvalidRequestException": + case "com.amazonaws.ssooidc#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "InvalidScopeException": + case "com.amazonaws.ssooidc#InvalidScopeException": + throw await de_InvalidScopeExceptionRes(parsedOutput, context); + case "SlowDownException": + case "com.amazonaws.ssooidc#SlowDownException": + throw await de_SlowDownExceptionRes(parsedOutput, context); + case "UnauthorizedClientException": + case "com.amazonaws.ssooidc#UnauthorizedClientException": + throw await de_UnauthorizedClientExceptionRes(parsedOutput, context); + case "UnsupportedGrantTypeException": + case "com.amazonaws.ssooidc#UnsupportedGrantTypeException": + throw await de_UnsupportedGrantTypeExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const throwDefaultError = withBaseException(__BaseException); +const de_AccessDeniedExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new AccessDeniedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_AuthorizationPendingExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new AuthorizationPendingException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_ExpiredTokenExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InternalServerExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InternalServerException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InvalidClientExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InvalidGrantExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidGrantException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InvalidRequestExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InvalidScopeExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidScopeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_SlowDownExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new SlowDownException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_UnauthorizedClientExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new UnauthorizedClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_UnsupportedGrantTypeExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new UnsupportedGrantTypeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.browser.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.browser.js new file mode 100644 index 00000000..94d7b875 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.browser.js @@ -0,0 +1,33 @@ +import packageInfo from "../../../package.json"; +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-browser"; +import { DEFAULT_USE_DUALSTACK_ENDPOINT, DEFAULT_USE_FIPS_ENDPOINT } from "@smithy/config-resolver"; +import { FetchHttpHandler as RequestHandler, streamCollector } from "@smithy/fetch-http-handler"; +import { invalidProvider } from "@smithy/invalid-dependency"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-browser"; +export const getRuntimeConfig = (config) => { + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? invalidProvider("Region is missing"), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? Sha256, + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.js new file mode 100644 index 00000000..32d413c9 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.js @@ -0,0 +1,46 @@ +import packageInfo from "../../../package.json"; +import { NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, emitWarningIfUnsupportedVersion as awsCheckVersion } from "@aws-sdk/core"; +import { NODE_APP_ID_CONFIG_OPTIONS, createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-node"; +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS, NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, } from "@smithy/config-resolver"; +import { Hash } from "@smithy/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS, NODE_RETRY_MODE_CONFIG_OPTIONS } from "@smithy/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@smithy/node-config-provider"; +import { NodeHttpHandler as RequestHandler, streamCollector } from "@smithy/node-http-handler"; +import { calculateBodyLength } from "@smithy/util-body-length-node"; +import { DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-node"; +import { emitWarningIfUnsupportedVersion } from "@smithy/smithy-client"; +export const getRuntimeConfig = (config) => { + emitWarningIfUnsupportedVersion(process.version); + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + awsCheckVersion(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? loadNodeConfig(NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, { ...NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + loadNodeConfig({ + ...NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? loadNodeConfig(NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? loadNodeConfig(NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? loadNodeConfig(NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.native.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.native.js new file mode 100644 index 00000000..0b546952 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.native.js @@ -0,0 +1,11 @@ +import { Sha256 } from "@aws-crypto/sha256-js"; +import { getRuntimeConfig as getBrowserRuntimeConfig } from "./runtimeConfig.browser"; +export const getRuntimeConfig = (config) => { + const browserDefaults = getBrowserRuntimeConfig(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? Sha256, + }; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.shared.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.shared.js new file mode 100644 index 00000000..49a0235c --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.shared.js @@ -0,0 +1,36 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { parseUrl } from "@smithy/url-parser"; +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { defaultSSOOIDCHttpAuthSchemeProvider } from "./auth/httpAuthSchemeProvider"; +import { defaultEndpointResolver } from "./endpoint/endpointResolver"; +export const getRuntimeConfig = (config) => { + return { + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? fromBase64, + base64Encoder: config?.base64Encoder ?? toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? defaultSSOOIDCHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new NoAuthSigner(), + }, + ], + logger: config?.logger ?? new NoOpLogger(), + serviceId: config?.serviceId ?? "SSO OIDC", + urlParser: config?.urlParser ?? parseUrl, + utf8Decoder: config?.utf8Decoder ?? fromUtf8, + utf8Encoder: config?.utf8Encoder ?? toUtf8, + }; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeExtensions.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeExtensions.js new file mode 100644 index 00000000..5b296950 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeExtensions.js @@ -0,0 +1,9 @@ +import { getAwsRegionExtensionConfiguration, resolveAwsRegionExtensionConfiguration, } from "@aws-sdk/region-config-resolver"; +import { getHttpHandlerExtensionConfiguration, resolveHttpHandlerRuntimeConfig } from "@smithy/protocol-http"; +import { getDefaultExtensionConfiguration, resolveDefaultRuntimeConfig } from "@smithy/smithy-client"; +import { getHttpAuthExtensionConfiguration, resolveHttpAuthRuntimeConfig } from "./auth/httpAuthExtensionConfiguration"; +export const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign(getAwsRegionExtensionConfiguration(runtimeConfig), getDefaultExtensionConfiguration(runtimeConfig), getHttpHandlerExtensionConfiguration(runtimeConfig), getHttpAuthExtensionConfiguration(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, resolveAwsRegionExtensionConfiguration(extensionConfiguration), resolveDefaultRuntimeConfig(extensionConfiguration), resolveHttpHandlerRuntimeConfig(extensionConfiguration), resolveHttpAuthRuntimeConfig(extensionConfiguration)); +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STS.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STS.js new file mode 100644 index 00000000..71edef7a --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STS.js @@ -0,0 +1,11 @@ +import { createAggregatedClient } from "@smithy/smithy-client"; +import { AssumeRoleCommand } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommand, } from "./commands/AssumeRoleWithWebIdentityCommand"; +import { STSClient } from "./STSClient"; +const commands = { + AssumeRoleCommand, + AssumeRoleWithWebIdentityCommand, +}; +export class STS extends STSClient { +} +createAggregatedClient(commands, STS); diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STSClient.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STSClient.js new file mode 100644 index 00000000..81b10407 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STSClient.js @@ -0,0 +1,48 @@ +import { getHostHeaderPlugin, resolveHostHeaderConfig, } from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { getRecursionDetectionPlugin } from "@aws-sdk/middleware-recursion-detection"; +import { getUserAgentPlugin, resolveUserAgentConfig, } from "@aws-sdk/middleware-user-agent"; +import { resolveRegionConfig } from "@smithy/config-resolver"; +import { DefaultIdentityProviderConfig, getHttpAuthSchemeEndpointRuleSetPlugin, getHttpSigningPlugin, } from "@smithy/core"; +import { getContentLengthPlugin } from "@smithy/middleware-content-length"; +import { resolveEndpointConfig } from "@smithy/middleware-endpoint"; +import { getRetryPlugin, resolveRetryConfig } from "@smithy/middleware-retry"; +import { Client as __Client, } from "@smithy/smithy-client"; +import { defaultSTSHttpAuthSchemeParametersProvider, resolveHttpAuthSchemeConfig, } from "./auth/httpAuthSchemeProvider"; +import { resolveClientEndpointParameters, } from "./endpoint/EndpointParameters"; +import { getRuntimeConfig as __getRuntimeConfig } from "./runtimeConfig"; +import { resolveRuntimeExtensions } from "./runtimeExtensions"; +export { __Client }; +export class STSClient extends __Client { + config; + constructor(...[configuration]) { + const _config_0 = __getRuntimeConfig(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = resolveUserAgentConfig(_config_1); + const _config_3 = resolveRetryConfig(_config_2); + const _config_4 = resolveRegionConfig(_config_3); + const _config_5 = resolveHostHeaderConfig(_config_4); + const _config_6 = resolveEndpointConfig(_config_5); + const _config_7 = resolveHttpAuthSchemeConfig(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + this.middlewareStack.use(getRecursionDetectionPlugin(this.config)); + this.middlewareStack.use(getHttpAuthSchemeEndpointRuleSetPlugin(this.config, { + httpAuthSchemeParametersProvider: defaultSTSHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use(getHttpSigningPlugin(this.config)); + } + destroy() { + super.destroy(); + } +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthExtensionConfiguration.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 00000000..2ba1d48c --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,38 @@ +export const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +export const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthSchemeProvider.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthSchemeProvider.js new file mode 100644 index 00000000..3ea1e498 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthSchemeProvider.js @@ -0,0 +1,55 @@ +import { resolveAwsSdkSigV4Config, } from "@aws-sdk/core"; +import { getSmithyContext, normalizeProvider } from "@smithy/util-middleware"; +import { STSClient } from "../STSClient"; +export const defaultSTSHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: getSmithyContext(context).operation, + region: (await normalizeProvider(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "sts", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +export const defaultSTSHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "AssumeRoleWithWebIdentity": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +export const resolveStsAuthConfig = (input) => Object.assign(input, { + stsClientCtor: STSClient, +}); +export const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = resolveStsAuthConfig(config); + const config_1 = resolveAwsSdkSigV4Config(config_0); + return Object.assign(config_1, { + authSchemePreference: normalizeProvider(config.authSchemePreference ?? []), + }); +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleCommand.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleCommand.js new file mode 100644 index 00000000..bcb8589e --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { AssumeRoleResponseFilterSensitiveLog } from "../models/models_0"; +import { de_AssumeRoleCommand, se_AssumeRoleCommand } from "../protocols/Aws_query"; +export { $Command }; +export class AssumeRoleCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AWSSecurityTokenServiceV20110615", "AssumeRole", {}) + .n("STSClient", "AssumeRoleCommand") + .f(void 0, AssumeRoleResponseFilterSensitiveLog) + .ser(se_AssumeRoleCommand) + .de(de_AssumeRoleCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.js new file mode 100644 index 00000000..e4ecc2ed --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { AssumeRoleWithWebIdentityRequestFilterSensitiveLog, AssumeRoleWithWebIdentityResponseFilterSensitiveLog, } from "../models/models_0"; +import { de_AssumeRoleWithWebIdentityCommand, se_AssumeRoleWithWebIdentityCommand } from "../protocols/Aws_query"; +export { $Command }; +export class AssumeRoleWithWebIdentityCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AWSSecurityTokenServiceV20110615", "AssumeRoleWithWebIdentity", {}) + .n("STSClient", "AssumeRoleWithWebIdentityCommand") + .f(AssumeRoleWithWebIdentityRequestFilterSensitiveLog, AssumeRoleWithWebIdentityResponseFilterSensitiveLog) + .ser(se_AssumeRoleWithWebIdentityCommand) + .de(de_AssumeRoleWithWebIdentityCommand) + .build() { +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/index.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/index.js new file mode 100644 index 00000000..0f200f52 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/index.js @@ -0,0 +1,2 @@ +export * from "./AssumeRoleCommand"; +export * from "./AssumeRoleWithWebIdentityCommand"; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultRoleAssumers.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultRoleAssumers.js new file mode 100644 index 00000000..aafb8c4e --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultRoleAssumers.js @@ -0,0 +1,22 @@ +import { getDefaultRoleAssumer as StsGetDefaultRoleAssumer, getDefaultRoleAssumerWithWebIdentity as StsGetDefaultRoleAssumerWithWebIdentity, } from "./defaultStsRoleAssumers"; +import { STSClient } from "./STSClient"; +const getCustomizableStsClientCtor = (baseCtor, customizations) => { + if (!customizations) + return baseCtor; + else + return class CustomizableSTSClient extends baseCtor { + constructor(config) { + super(config); + for (const customization of customizations) { + this.middlewareStack.use(customization); + } + } + }; +}; +export const getDefaultRoleAssumer = (stsOptions = {}, stsPlugins) => StsGetDefaultRoleAssumer(stsOptions, getCustomizableStsClientCtor(STSClient, stsPlugins)); +export const getDefaultRoleAssumerWithWebIdentity = (stsOptions = {}, stsPlugins) => StsGetDefaultRoleAssumerWithWebIdentity(stsOptions, getCustomizableStsClientCtor(STSClient, stsPlugins)); +export const decorateDefaultCredentialProvider = (provider) => (input) => provider({ + roleAssumer: getDefaultRoleAssumer(input), + roleAssumerWithWebIdentity: getDefaultRoleAssumerWithWebIdentity(input), + ...input, +}); diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultStsRoleAssumers.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultStsRoleAssumers.js new file mode 100644 index 00000000..e7c7a90c --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultStsRoleAssumers.js @@ -0,0 +1,95 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { AssumeRoleCommand } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommand, } from "./commands/AssumeRoleWithWebIdentityCommand"; +const ASSUME_ROLE_DEFAULT_REGION = "us-east-1"; +const getAccountIdFromAssumedRoleUser = (assumedRoleUser) => { + if (typeof assumedRoleUser?.Arn === "string") { + const arnComponents = assumedRoleUser.Arn.split(":"); + if (arnComponents.length > 4 && arnComponents[4] !== "") { + return arnComponents[4]; + } + } + return undefined; +}; +const resolveRegion = async (_region, _parentRegion, credentialProviderLogger) => { + const region = typeof _region === "function" ? await _region() : _region; + const parentRegion = typeof _parentRegion === "function" ? await _parentRegion() : _parentRegion; + credentialProviderLogger?.debug?.("@aws-sdk/client-sts::resolveRegion", "accepting first of:", `${region} (provider)`, `${parentRegion} (parent client)`, `${ASSUME_ROLE_DEFAULT_REGION} (STS default)`); + return region ?? parentRegion ?? ASSUME_ROLE_DEFAULT_REGION; +}; +export const getDefaultRoleAssumer = (stsOptions, STSClient) => { + let stsClient; + let closureSourceCreds; + return async (sourceCreds, params) => { + closureSourceCreds = sourceCreds; + if (!stsClient) { + const { logger = stsOptions?.parentClientConfig?.logger, region, requestHandler = stsOptions?.parentClientConfig?.requestHandler, credentialProviderLogger, } = stsOptions; + const resolvedRegion = await resolveRegion(region, stsOptions?.parentClientConfig?.region, credentialProviderLogger); + const isCompatibleRequestHandler = !isH2(requestHandler); + stsClient = new STSClient({ + profile: stsOptions?.parentClientConfig?.profile, + credentialDefaultProvider: () => async () => closureSourceCreds, + region: resolvedRegion, + requestHandler: isCompatibleRequestHandler ? requestHandler : undefined, + logger: logger, + }); + } + const { Credentials, AssumedRoleUser } = await stsClient.send(new AssumeRoleCommand(params)); + if (!Credentials || !Credentials.AccessKeyId || !Credentials.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRole call with role ${params.RoleArn}`); + } + const accountId = getAccountIdFromAssumedRoleUser(AssumedRoleUser); + const credentials = { + accessKeyId: Credentials.AccessKeyId, + secretAccessKey: Credentials.SecretAccessKey, + sessionToken: Credentials.SessionToken, + expiration: Credentials.Expiration, + ...(Credentials.CredentialScope && { credentialScope: Credentials.CredentialScope }), + ...(accountId && { accountId }), + }; + setCredentialFeature(credentials, "CREDENTIALS_STS_ASSUME_ROLE", "i"); + return credentials; + }; +}; +export const getDefaultRoleAssumerWithWebIdentity = (stsOptions, STSClient) => { + let stsClient; + return async (params) => { + if (!stsClient) { + const { logger = stsOptions?.parentClientConfig?.logger, region, requestHandler = stsOptions?.parentClientConfig?.requestHandler, credentialProviderLogger, } = stsOptions; + const resolvedRegion = await resolveRegion(region, stsOptions?.parentClientConfig?.region, credentialProviderLogger); + const isCompatibleRequestHandler = !isH2(requestHandler); + stsClient = new STSClient({ + profile: stsOptions?.parentClientConfig?.profile, + region: resolvedRegion, + requestHandler: isCompatibleRequestHandler ? requestHandler : undefined, + logger: logger, + }); + } + const { Credentials, AssumedRoleUser } = await stsClient.send(new AssumeRoleWithWebIdentityCommand(params)); + if (!Credentials || !Credentials.AccessKeyId || !Credentials.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRoleWithWebIdentity call with role ${params.RoleArn}`); + } + const accountId = getAccountIdFromAssumedRoleUser(AssumedRoleUser); + const credentials = { + accessKeyId: Credentials.AccessKeyId, + secretAccessKey: Credentials.SecretAccessKey, + sessionToken: Credentials.SessionToken, + expiration: Credentials.Expiration, + ...(Credentials.CredentialScope && { credentialScope: Credentials.CredentialScope }), + ...(accountId && { accountId }), + }; + if (accountId) { + setCredentialFeature(credentials, "RESOLVED_ACCOUNT_ID", "T"); + } + setCredentialFeature(credentials, "CREDENTIALS_STS_ASSUME_ROLE_WEB_ID", "k"); + return credentials; + }; +}; +export const decorateDefaultCredentialProvider = (provider) => (input) => provider({ + roleAssumer: getDefaultRoleAssumer(input, input.stsClientCtor), + roleAssumerWithWebIdentity: getDefaultRoleAssumerWithWebIdentity(input, input.stsClientCtor), + ...input, +}); +const isH2 = (requestHandler) => { + return requestHandler?.metadata?.handlerProtocol === "h2"; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/EndpointParameters.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/EndpointParameters.js new file mode 100644 index 00000000..1c74b013 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/EndpointParameters.js @@ -0,0 +1,15 @@ +export const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + useGlobalEndpoint: options.useGlobalEndpoint ?? false, + defaultSigningName: "sts", + }); +}; +export const commonParams = { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/endpointResolver.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/endpointResolver.js new file mode 100644 index 00000000..f54d2790 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/endpointResolver.js @@ -0,0 +1,14 @@ +import { awsEndpointFunctions } from "@aws-sdk/util-endpoints"; +import { customEndpointFunctions, EndpointCache, resolveEndpoint } from "@smithy/util-endpoints"; +import { ruleSet } from "./ruleset"; +const cache = new EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS", "UseGlobalEndpoint"], +}); +export const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => resolveEndpoint(ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/ruleset.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/ruleset.js new file mode 100644 index 00000000..99a438a5 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/ruleset.js @@ -0,0 +1,4 @@ +const F = "required", G = "type", H = "fn", I = "argv", J = "ref"; +const a = false, b = true, c = "booleanEquals", d = "stringEquals", e = "sigv4", f = "sts", g = "us-east-1", h = "endpoint", i = "https://sts.{Region}.{PartitionResult#dnsSuffix}", j = "tree", k = "error", l = "getAttr", m = { [F]: false, [G]: "String" }, n = { [F]: true, "default": false, [G]: "Boolean" }, o = { [J]: "Endpoint" }, p = { [H]: "isSet", [I]: [{ [J]: "Region" }] }, q = { [J]: "Region" }, r = { [H]: "aws.partition", [I]: [q], "assign": "PartitionResult" }, s = { [J]: "UseFIPS" }, t = { [J]: "UseDualStack" }, u = { "url": "https://sts.amazonaws.com", "properties": { "authSchemes": [{ "name": e, "signingName": f, "signingRegion": g }] }, "headers": {} }, v = {}, w = { "conditions": [{ [H]: d, [I]: [q, "aws-global"] }], [h]: u, [G]: h }, x = { [H]: c, [I]: [s, true] }, y = { [H]: c, [I]: [t, true] }, z = { [H]: l, [I]: [{ [J]: "PartitionResult" }, "supportsFIPS"] }, A = { [J]: "PartitionResult" }, B = { [H]: c, [I]: [true, { [H]: l, [I]: [A, "supportsDualStack"] }] }, C = [{ [H]: "isSet", [I]: [o] }], D = [x], E = [y]; +const _data = { version: "1.0", parameters: { Region: m, UseDualStack: n, UseFIPS: n, Endpoint: m, UseGlobalEndpoint: n }, rules: [{ conditions: [{ [H]: c, [I]: [{ [J]: "UseGlobalEndpoint" }, b] }, { [H]: "not", [I]: C }, p, r, { [H]: c, [I]: [s, a] }, { [H]: c, [I]: [t, a] }], rules: [{ conditions: [{ [H]: d, [I]: [q, "ap-northeast-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-south-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-southeast-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-southeast-2"] }], endpoint: u, [G]: h }, w, { conditions: [{ [H]: d, [I]: [q, "ca-central-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-central-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-north-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-2"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-3"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "sa-east-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, g] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-east-2"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-west-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-west-2"] }], endpoint: u, [G]: h }, { endpoint: { url: i, properties: { authSchemes: [{ name: e, signingName: f, signingRegion: "{Region}" }] }, headers: v }, [G]: h }], [G]: j }, { conditions: C, rules: [{ conditions: D, error: "Invalid Configuration: FIPS and custom endpoint are not supported", [G]: k }, { conditions: E, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", [G]: k }, { endpoint: { url: o, properties: v, headers: v }, [G]: h }], [G]: j }, { conditions: [p], rules: [{ conditions: [r], rules: [{ conditions: [x, y], rules: [{ conditions: [{ [H]: c, [I]: [b, z] }, B], rules: [{ endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", [G]: k }], [G]: j }, { conditions: D, rules: [{ conditions: [{ [H]: c, [I]: [z, b] }], rules: [{ conditions: [{ [H]: d, [I]: [{ [H]: l, [I]: [A, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://sts.{Region}.amazonaws.com", properties: v, headers: v }, [G]: h }, { endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "FIPS is enabled but this partition does not support FIPS", [G]: k }], [G]: j }, { conditions: E, rules: [{ conditions: [B], rules: [{ endpoint: { url: "https://sts.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "DualStack is enabled but this partition does not support DualStack", [G]: k }], [G]: j }, w, { endpoint: { url: i, properties: v, headers: v }, [G]: h }], [G]: j }], [G]: j }, { error: "Invalid Configuration: Missing Region", [G]: k }] }; +export const ruleSet = _data; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/extensionConfiguration.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/extensionConfiguration.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/extensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/index.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/index.js new file mode 100644 index 00000000..fa366be7 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/index.js @@ -0,0 +1,6 @@ +export * from "./STSClient"; +export * from "./STS"; +export * from "./commands"; +export * from "./models"; +export * from "./defaultRoleAssumers"; +export { STSServiceException } from "./models/STSServiceException"; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/STSServiceException.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/STSServiceException.js new file mode 100644 index 00000000..6d2963c7 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/STSServiceException.js @@ -0,0 +1,8 @@ +import { ServiceException as __ServiceException, } from "@smithy/smithy-client"; +export { __ServiceException }; +export class STSServiceException extends __ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, STSServiceException.prototype); + } +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/index.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/index.js new file mode 100644 index 00000000..09c5d6e0 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/index.js @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/models_0.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/models_0.js new file mode 100644 index 00000000..63e9c523 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/models_0.js @@ -0,0 +1,102 @@ +import { SENSITIVE_STRING } from "@smithy/smithy-client"; +import { STSServiceException as __BaseException } from "./STSServiceException"; +export const CredentialsFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SecretAccessKey && { SecretAccessKey: SENSITIVE_STRING }), +}); +export const AssumeRoleResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) }), +}); +export class ExpiredTokenException extends __BaseException { + name = "ExpiredTokenException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ExpiredTokenException.prototype); + } +} +export class MalformedPolicyDocumentException extends __BaseException { + name = "MalformedPolicyDocumentException"; + $fault = "client"; + constructor(opts) { + super({ + name: "MalformedPolicyDocumentException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, MalformedPolicyDocumentException.prototype); + } +} +export class PackedPolicyTooLargeException extends __BaseException { + name = "PackedPolicyTooLargeException"; + $fault = "client"; + constructor(opts) { + super({ + name: "PackedPolicyTooLargeException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, PackedPolicyTooLargeException.prototype); + } +} +export class RegionDisabledException extends __BaseException { + name = "RegionDisabledException"; + $fault = "client"; + constructor(opts) { + super({ + name: "RegionDisabledException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, RegionDisabledException.prototype); + } +} +export class IDPRejectedClaimException extends __BaseException { + name = "IDPRejectedClaimException"; + $fault = "client"; + constructor(opts) { + super({ + name: "IDPRejectedClaimException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, IDPRejectedClaimException.prototype); + } +} +export class InvalidIdentityTokenException extends __BaseException { + name = "InvalidIdentityTokenException"; + $fault = "client"; + constructor(opts) { + super({ + name: "InvalidIdentityTokenException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidIdentityTokenException.prototype); + } +} +export const AssumeRoleWithWebIdentityRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.WebIdentityToken && { WebIdentityToken: SENSITIVE_STRING }), +}); +export const AssumeRoleWithWebIdentityResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) }), +}); +export class IDPCommunicationErrorException extends __BaseException { + name = "IDPCommunicationErrorException"; + $fault = "client"; + constructor(opts) { + super({ + name: "IDPCommunicationErrorException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, IDPCommunicationErrorException.prototype); + } +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/protocols/Aws_query.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/protocols/Aws_query.js new file mode 100644 index 00000000..a98e41a8 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/protocols/Aws_query.js @@ -0,0 +1,528 @@ +import { parseXmlBody as parseBody, parseXmlErrorBody as parseErrorBody } from "@aws-sdk/core"; +import { HttpRequest as __HttpRequest } from "@smithy/protocol-http"; +import { collectBody, decorateServiceException as __decorateServiceException, expectNonNull as __expectNonNull, expectString as __expectString, extendedEncodeURIComponent as __extendedEncodeURIComponent, parseRfc3339DateTimeWithOffset as __parseRfc3339DateTimeWithOffset, strictParseInt32 as __strictParseInt32, withBaseException, } from "@smithy/smithy-client"; +import { ExpiredTokenException, IDPCommunicationErrorException, IDPRejectedClaimException, InvalidIdentityTokenException, MalformedPolicyDocumentException, PackedPolicyTooLargeException, RegionDisabledException, } from "../models/models_0"; +import { STSServiceException as __BaseException } from "../models/STSServiceException"; +export const se_AssumeRoleCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleRequest(input, context), + [_A]: _AR, + [_V]: _, + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_AssumeRoleWithWebIdentityCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleWithWebIdentityRequest(input, context), + [_A]: _ARWWI, + [_V]: _, + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const de_AssumeRoleCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_AssumeRoleResponse(data.AssumeRoleResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_AssumeRoleWithWebIdentityCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_AssumeRoleWithWebIdentityResponse(data.AssumeRoleWithWebIdentityResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +const de_CommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ExpiredTokenException": + case "com.amazonaws.sts#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "MalformedPolicyDocument": + case "com.amazonaws.sts#MalformedPolicyDocumentException": + throw await de_MalformedPolicyDocumentExceptionRes(parsedOutput, context); + case "PackedPolicyTooLarge": + case "com.amazonaws.sts#PackedPolicyTooLargeException": + throw await de_PackedPolicyTooLargeExceptionRes(parsedOutput, context); + case "RegionDisabledException": + case "com.amazonaws.sts#RegionDisabledException": + throw await de_RegionDisabledExceptionRes(parsedOutput, context); + case "IDPCommunicationError": + case "com.amazonaws.sts#IDPCommunicationErrorException": + throw await de_IDPCommunicationErrorExceptionRes(parsedOutput, context); + case "IDPRejectedClaim": + case "com.amazonaws.sts#IDPRejectedClaimException": + throw await de_IDPRejectedClaimExceptionRes(parsedOutput, context); + case "InvalidIdentityToken": + case "com.amazonaws.sts#InvalidIdentityTokenException": + throw await de_InvalidIdentityTokenExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); + } +}; +const de_ExpiredTokenExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_ExpiredTokenException(body.Error, context); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_IDPCommunicationErrorExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPCommunicationErrorException(body.Error, context); + const exception = new IDPCommunicationErrorException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_IDPRejectedClaimExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPRejectedClaimException(body.Error, context); + const exception = new IDPRejectedClaimException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_InvalidIdentityTokenExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_InvalidIdentityTokenException(body.Error, context); + const exception = new InvalidIdentityTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_MalformedPolicyDocumentExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_MalformedPolicyDocumentException(body.Error, context); + const exception = new MalformedPolicyDocumentException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_PackedPolicyTooLargeExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_PackedPolicyTooLargeException(body.Error, context); + const exception = new PackedPolicyTooLargeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_RegionDisabledExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_RegionDisabledException(body.Error, context); + const exception = new RegionDisabledException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const se_AssumeRoleRequest = (input, context) => { + const entries = {}; + if (input[_RA] != null) { + entries[_RA] = input[_RA]; + } + if (input[_RSN] != null) { + entries[_RSN] = input[_RSN]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (input[_PA]?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + if (input[_T] != null) { + const memberEntries = se_tagListType(input[_T], context); + if (input[_T]?.length === 0) { + entries.Tags = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `Tags.${key}`; + entries[loc] = value; + }); + } + if (input[_TTK] != null) { + const memberEntries = se_tagKeyListType(input[_TTK], context); + if (input[_TTK]?.length === 0) { + entries.TransitiveTagKeys = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `TransitiveTagKeys.${key}`; + entries[loc] = value; + }); + } + if (input[_EI] != null) { + entries[_EI] = input[_EI]; + } + if (input[_SN] != null) { + entries[_SN] = input[_SN]; + } + if (input[_TC] != null) { + entries[_TC] = input[_TC]; + } + if (input[_SI] != null) { + entries[_SI] = input[_SI]; + } + if (input[_PC] != null) { + const memberEntries = se_ProvidedContextsListType(input[_PC], context); + if (input[_PC]?.length === 0) { + entries.ProvidedContexts = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `ProvidedContexts.${key}`; + entries[loc] = value; + }); + } + return entries; +}; +const se_AssumeRoleWithWebIdentityRequest = (input, context) => { + const entries = {}; + if (input[_RA] != null) { + entries[_RA] = input[_RA]; + } + if (input[_RSN] != null) { + entries[_RSN] = input[_RSN]; + } + if (input[_WIT] != null) { + entries[_WIT] = input[_WIT]; + } + if (input[_PI] != null) { + entries[_PI] = input[_PI]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (input[_PA]?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + return entries; +}; +const se_policyDescriptorListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_PolicyDescriptorType(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}; +const se_PolicyDescriptorType = (input, context) => { + const entries = {}; + if (input[_a] != null) { + entries[_a] = input[_a]; + } + return entries; +}; +const se_ProvidedContext = (input, context) => { + const entries = {}; + if (input[_PAr] != null) { + entries[_PAr] = input[_PAr]; + } + if (input[_CA] != null) { + entries[_CA] = input[_CA]; + } + return entries; +}; +const se_ProvidedContextsListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_ProvidedContext(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}; +const se_Tag = (input, context) => { + const entries = {}; + if (input[_K] != null) { + entries[_K] = input[_K]; + } + if (input[_Va] != null) { + entries[_Va] = input[_Va]; + } + return entries; +}; +const se_tagKeyListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + entries[`member.${counter}`] = entry; + counter++; + } + return entries; +}; +const se_tagListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_Tag(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}; +const de_AssumedRoleUser = (output, context) => { + const contents = {}; + if (output[_ARI] != null) { + contents[_ARI] = __expectString(output[_ARI]); + } + if (output[_Ar] != null) { + contents[_Ar] = __expectString(output[_Ar]); + } + return contents; +}; +const de_AssumeRoleResponse = (output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_ARU] != null) { + contents[_ARU] = de_AssumedRoleUser(output[_ARU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = __strictParseInt32(output[_PPS]); + } + if (output[_SI] != null) { + contents[_SI] = __expectString(output[_SI]); + } + return contents; +}; +const de_AssumeRoleWithWebIdentityResponse = (output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_SFWIT] != null) { + contents[_SFWIT] = __expectString(output[_SFWIT]); + } + if (output[_ARU] != null) { + contents[_ARU] = de_AssumedRoleUser(output[_ARU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = __strictParseInt32(output[_PPS]); + } + if (output[_Pr] != null) { + contents[_Pr] = __expectString(output[_Pr]); + } + if (output[_Au] != null) { + contents[_Au] = __expectString(output[_Au]); + } + if (output[_SI] != null) { + contents[_SI] = __expectString(output[_SI]); + } + return contents; +}; +const de_Credentials = (output, context) => { + const contents = {}; + if (output[_AKI] != null) { + contents[_AKI] = __expectString(output[_AKI]); + } + if (output[_SAK] != null) { + contents[_SAK] = __expectString(output[_SAK]); + } + if (output[_ST] != null) { + contents[_ST] = __expectString(output[_ST]); + } + if (output[_E] != null) { + contents[_E] = __expectNonNull(__parseRfc3339DateTimeWithOffset(output[_E])); + } + return contents; +}; +const de_ExpiredTokenException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_IDPCommunicationErrorException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_IDPRejectedClaimException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_InvalidIdentityTokenException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_MalformedPolicyDocumentException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_PackedPolicyTooLargeException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_RegionDisabledException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); +const throwDefaultError = withBaseException(__BaseException); +const buildHttpRpcRequest = async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers, + }; + if (resolvedHostname !== undefined) { + contents.hostname = resolvedHostname; + } + if (body !== undefined) { + contents.body = body; + } + return new __HttpRequest(contents); +}; +const SHARED_HEADERS = { + "content-type": "application/x-www-form-urlencoded", +}; +const _ = "2011-06-15"; +const _A = "Action"; +const _AKI = "AccessKeyId"; +const _AR = "AssumeRole"; +const _ARI = "AssumedRoleId"; +const _ARU = "AssumedRoleUser"; +const _ARWWI = "AssumeRoleWithWebIdentity"; +const _Ar = "Arn"; +const _Au = "Audience"; +const _C = "Credentials"; +const _CA = "ContextAssertion"; +const _DS = "DurationSeconds"; +const _E = "Expiration"; +const _EI = "ExternalId"; +const _K = "Key"; +const _P = "Policy"; +const _PA = "PolicyArns"; +const _PAr = "ProviderArn"; +const _PC = "ProvidedContexts"; +const _PI = "ProviderId"; +const _PPS = "PackedPolicySize"; +const _Pr = "Provider"; +const _RA = "RoleArn"; +const _RSN = "RoleSessionName"; +const _SAK = "SecretAccessKey"; +const _SFWIT = "SubjectFromWebIdentityToken"; +const _SI = "SourceIdentity"; +const _SN = "SerialNumber"; +const _ST = "SessionToken"; +const _T = "Tags"; +const _TC = "TokenCode"; +const _TTK = "TransitiveTagKeys"; +const _V = "Version"; +const _Va = "Value"; +const _WIT = "WebIdentityToken"; +const _a = "arn"; +const _m = "message"; +const buildFormUrlencodedString = (formEntries) => Object.entries(formEntries) + .map(([key, value]) => __extendedEncodeURIComponent(key) + "=" + __extendedEncodeURIComponent(value)) + .join("&"); +const loadQueryErrorCode = (output, data) => { + if (data.Error?.Code !== undefined) { + return data.Error.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.browser.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.browser.js new file mode 100644 index 00000000..f45dbd3a --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.browser.js @@ -0,0 +1,34 @@ +import packageInfo from "../../../package.json"; +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-browser"; +import { DEFAULT_USE_DUALSTACK_ENDPOINT, DEFAULT_USE_FIPS_ENDPOINT } from "@smithy/config-resolver"; +import { FetchHttpHandler as RequestHandler, streamCollector } from "@smithy/fetch-http-handler"; +import { invalidProvider } from "@smithy/invalid-dependency"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-browser"; +export const getRuntimeConfig = (config) => { + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? ((_) => () => Promise.reject(new Error("Credential is missing"))), + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? invalidProvider("Region is missing"), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? Sha256, + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.js new file mode 100644 index 00000000..6ac2412d --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.js @@ -0,0 +1,60 @@ +import packageInfo from "../../../package.json"; +import { AwsSdkSigV4Signer, NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, emitWarningIfUnsupportedVersion as awsCheckVersion, } from "@aws-sdk/core"; +import { NODE_APP_ID_CONFIG_OPTIONS, createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-node"; +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS, NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, } from "@smithy/config-resolver"; +import { NoAuthSigner } from "@smithy/core"; +import { Hash } from "@smithy/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS, NODE_RETRY_MODE_CONFIG_OPTIONS } from "@smithy/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@smithy/node-config-provider"; +import { NodeHttpHandler as RequestHandler, streamCollector } from "@smithy/node-http-handler"; +import { calculateBodyLength } from "@smithy/util-body-length-node"; +import { DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-node"; +import { emitWarningIfUnsupportedVersion } from "@smithy/smithy-client"; +export const getRuntimeConfig = (config) => { + emitWarningIfUnsupportedVersion(process.version); + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + awsCheckVersion(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? loadNodeConfig(NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4") || + (async (idProps) => await config.credentialDefaultProvider(idProps?.__config || {})()), + signer: new AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new NoAuthSigner(), + }, + ], + maxAttempts: config?.maxAttempts ?? loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, { ...NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + loadNodeConfig({ + ...NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? loadNodeConfig(NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? loadNodeConfig(NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? loadNodeConfig(NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.native.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.native.js new file mode 100644 index 00000000..0b546952 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.native.js @@ -0,0 +1,11 @@ +import { Sha256 } from "@aws-crypto/sha256-js"; +import { getRuntimeConfig as getBrowserRuntimeConfig } from "./runtimeConfig.browser"; +export const getRuntimeConfig = (config) => { + const browserDefaults = getBrowserRuntimeConfig(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? Sha256, + }; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.shared.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.shared.js new file mode 100644 index 00000000..5c6df203 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.shared.js @@ -0,0 +1,36 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { parseUrl } from "@smithy/url-parser"; +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { defaultSTSHttpAuthSchemeProvider } from "./auth/httpAuthSchemeProvider"; +import { defaultEndpointResolver } from "./endpoint/endpointResolver"; +export const getRuntimeConfig = (config) => { + return { + apiVersion: "2011-06-15", + base64Decoder: config?.base64Decoder ?? fromBase64, + base64Encoder: config?.base64Encoder ?? toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? defaultSTSHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new NoAuthSigner(), + }, + ], + logger: config?.logger ?? new NoOpLogger(), + serviceId: config?.serviceId ?? "STS", + urlParser: config?.urlParser ?? parseUrl, + utf8Decoder: config?.utf8Decoder ?? fromUtf8, + utf8Encoder: config?.utf8Encoder ?? toUtf8, + }; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeExtensions.js b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeExtensions.js new file mode 100644 index 00000000..5b296950 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeExtensions.js @@ -0,0 +1,9 @@ +import { getAwsRegionExtensionConfiguration, resolveAwsRegionExtensionConfiguration, } from "@aws-sdk/region-config-resolver"; +import { getHttpHandlerExtensionConfiguration, resolveHttpHandlerRuntimeConfig } from "@smithy/protocol-http"; +import { getDefaultExtensionConfiguration, resolveDefaultRuntimeConfig } from "@smithy/smithy-client"; +import { getHttpAuthExtensionConfiguration, resolveHttpAuthRuntimeConfig } from "./auth/httpAuthExtensionConfiguration"; +export const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign(getAwsRegionExtensionConfiguration(runtimeConfig), getDefaultExtensionConfiguration(runtimeConfig), getHttpHandlerExtensionConfiguration(runtimeConfig), getHttpAuthExtensionConfiguration(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, resolveAwsRegionExtensionConfiguration(extensionConfiguration), resolveDefaultRuntimeConfig(extensionConfiguration), resolveHttpHandlerRuntimeConfig(extensionConfiguration), resolveHttpAuthRuntimeConfig(extensionConfiguration)); +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/index.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/index.d.ts new file mode 100644 index 00000000..9d99a73c --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/index.d.ts @@ -0,0 +1,7 @@ +/** + * This package exports nothing at the root. + * Use submodules e.g. \@aws-sdk/nested-clients/client-sts. + * + * @internal + */ +export {}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDC.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDC.d.ts new file mode 100644 index 00000000..ebec5e61 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDC.d.ts @@ -0,0 +1,55 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { CreateTokenCommandInput, CreateTokenCommandOutput } from "./commands/CreateTokenCommand"; +import { SSOOIDCClient } from "./SSOOIDCClient"; +export interface SSOOIDC { + /** + * @see {@link CreateTokenCommand} + */ + createToken(args: CreateTokenCommandInput, options?: __HttpHandlerOptions): Promise; + createToken(args: CreateTokenCommandInput, cb: (err: any, data?: CreateTokenCommandOutput) => void): void; + createToken(args: CreateTokenCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: CreateTokenCommandOutput) => void): void; +} +/** + *

IAM Identity Center OpenID Connect (OIDC) is a web service that enables a client (such as CLI or a + * native application) to register with IAM Identity Center. The service also enables the client to fetch the + * user’s access token upon successful authentication and authorization with IAM Identity Center.

+ *

+ * API namespaces + *

+ *

IAM Identity Center uses the sso and identitystore API namespaces. IAM Identity Center + * OpenID Connect uses the sso-oidc namespace.

+ *

+ * Considerations for using this guide + *

+ *

Before you begin using this guide, we recommend that you first review the following + * important information about how the IAM Identity Center OIDC service works.

+ *
    + *
  • + *

    The IAM Identity Center OIDC service currently implements only the portions of the OAuth 2.0 Device + * Authorization Grant standard (https://tools.ietf.org/html/rfc8628) that are necessary to enable single + * sign-on authentication with the CLI.

    + *
  • + *
  • + *

    With older versions of the CLI, the service only emits OIDC access tokens, so to + * obtain a new token, users must explicitly re-authenticate. To access the OIDC flow that + * supports token refresh and doesn’t require re-authentication, update to the latest CLI + * version (1.27.10 for CLI V1 and 2.9.0 for CLI V2) with support for OIDC token refresh + * and configurable IAM Identity Center session durations. For more information, see Configure Amazon Web Services access portal session duration .

    + *
  • + *
  • + *

    The access tokens provided by this service grant access to all Amazon Web Services account + * entitlements assigned to an IAM Identity Center user, not just a particular application.

    + *
  • + *
  • + *

    The documentation in this guide does not describe the mechanism to convert the access + * token into Amazon Web Services Auth (“sigv4”) credentials for use with IAM-protected Amazon Web Services service + * endpoints. For more information, see GetRoleCredentials in the IAM Identity Center Portal API Reference + * Guide.

    + *
  • + *
+ *

For general information about IAM Identity Center, see What is + * IAM Identity Center? in the IAM Identity Center User Guide.

+ * @public + */ +export declare class SSOOIDC extends SSOOIDCClient implements SSOOIDC { +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDCClient.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDCClient.d.ts new file mode 100644 index 00000000..54908894 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDCClient.d.ts @@ -0,0 +1,220 @@ +import { HostHeaderInputConfig, HostHeaderResolvedConfig } from "@aws-sdk/middleware-host-header"; +import { UserAgentInputConfig, UserAgentResolvedConfig } from "@aws-sdk/middleware-user-agent"; +import { RegionInputConfig, RegionResolvedConfig } from "@smithy/config-resolver"; +import { EndpointInputConfig, EndpointResolvedConfig } from "@smithy/middleware-endpoint"; +import { RetryInputConfig, RetryResolvedConfig } from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { Client as __Client, DefaultsMode as __DefaultsMode, SmithyConfiguration as __SmithyConfiguration, SmithyResolvedConfiguration as __SmithyResolvedConfiguration } from "@smithy/smithy-client"; +import { BodyLengthCalculator as __BodyLengthCalculator, CheckOptionalClientConfig as __CheckOptionalClientConfig, ChecksumConstructor as __ChecksumConstructor, Decoder as __Decoder, Encoder as __Encoder, HashConstructor as __HashConstructor, HttpHandlerOptions as __HttpHandlerOptions, Logger as __Logger, Provider as __Provider, Provider, StreamCollector as __StreamCollector, UrlParser as __UrlParser, UserAgent as __UserAgent } from "@smithy/types"; +import { HttpAuthSchemeInputConfig, HttpAuthSchemeResolvedConfig } from "./auth/httpAuthSchemeProvider"; +import { CreateTokenCommandInput, CreateTokenCommandOutput } from "./commands/CreateTokenCommand"; +import { ClientInputEndpointParameters, ClientResolvedEndpointParameters, EndpointParameters } from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +/** + * @public + */ +export type ServiceInputTypes = CreateTokenCommandInput; +/** + * @public + */ +export type ServiceOutputTypes = CreateTokenCommandOutput; +/** + * @public + */ +export interface ClientDefaults extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use or its constructor options. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandlerUserInput; + /** + * A constructor for a class implementing the {@link @smithy/types#ChecksumConstructor} interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + * @internal + */ + sha256?: __ChecksumConstructor | __HashConstructor; + /** + * The function that will be used to convert strings into HTTP endpoints. + * @internal + */ + urlParser?: __UrlParser; + /** + * A function that can calculate the length of a request body. + * @internal + */ + bodyLengthChecker?: __BodyLengthCalculator; + /** + * A function that converts a stream into an array of bytes. + * @internal + */ + streamCollector?: __StreamCollector; + /** + * The function that will be used to convert a base64-encoded string to a byte array. + * @internal + */ + base64Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a base64-encoded string. + * @internal + */ + base64Encoder?: __Encoder; + /** + * The function that will be used to convert a UTF8-encoded string to a byte array. + * @internal + */ + utf8Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a UTF-8 encoded string. + * @internal + */ + utf8Encoder?: __Encoder; + /** + * The runtime environment. + * @internal + */ + runtime?: string; + /** + * Disable dynamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | __Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | __Provider; + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + /** + * Setting a client profile is similar to setting a value for the + * AWS_PROFILE environment variable. Setting a profile on a client + * in code only affects the single client instance, unlike AWS_PROFILE. + * + * When set, and only for environments where an AWS configuration + * file exists, fields configurable by this file will be retrieved + * from the specified profile within that file. + * Conflicting code configuration and environment variables will + * still have higher priority. + * + * For client credential resolution that involves checking the AWS + * configuration file, the client's profile (this value) will be + * used unless a different profile is set in the credential + * provider options. + * + */ + profile?: string; + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header + * @internal + */ + defaultUserAgentProvider?: Provider<__UserAgent>; + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + /** + * Specifies which retry algorithm to use. + * @see https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-util-retry/Enum/RETRY_MODES/ + * + */ + retryMode?: string | __Provider; + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + /** + * Optional extensions + */ + extensions?: RuntimeExtension[]; + /** + * The {@link @smithy/smithy-client#DefaultsMode} that will be used to determine how certain default configuration options are resolved in the SDK. + */ + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +/** + * @public + */ +export type SSOOIDCClientConfigType = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & ClientDefaults & UserAgentInputConfig & RetryInputConfig & RegionInputConfig & HostHeaderInputConfig & EndpointInputConfig & HttpAuthSchemeInputConfig & ClientInputEndpointParameters; +/** + * @public + * + * The configuration interface of SSOOIDCClient class constructor that set the region, credentials and other options. + */ +export interface SSOOIDCClientConfig extends SSOOIDCClientConfigType { +} +/** + * @public + */ +export type SSOOIDCClientResolvedConfigType = __SmithyResolvedConfiguration<__HttpHandlerOptions> & Required & RuntimeExtensionsConfig & UserAgentResolvedConfig & RetryResolvedConfig & RegionResolvedConfig & HostHeaderResolvedConfig & EndpointResolvedConfig & HttpAuthSchemeResolvedConfig & ClientResolvedEndpointParameters; +/** + * @public + * + * The resolved configuration interface of SSOOIDCClient class. This is resolved and normalized from the {@link SSOOIDCClientConfig | constructor configuration interface}. + */ +export interface SSOOIDCClientResolvedConfig extends SSOOIDCClientResolvedConfigType { +} +/** + *

IAM Identity Center OpenID Connect (OIDC) is a web service that enables a client (such as CLI or a + * native application) to register with IAM Identity Center. The service also enables the client to fetch the + * user’s access token upon successful authentication and authorization with IAM Identity Center.

+ *

+ * API namespaces + *

+ *

IAM Identity Center uses the sso and identitystore API namespaces. IAM Identity Center + * OpenID Connect uses the sso-oidc namespace.

+ *

+ * Considerations for using this guide + *

+ *

Before you begin using this guide, we recommend that you first review the following + * important information about how the IAM Identity Center OIDC service works.

+ *
    + *
  • + *

    The IAM Identity Center OIDC service currently implements only the portions of the OAuth 2.0 Device + * Authorization Grant standard (https://tools.ietf.org/html/rfc8628) that are necessary to enable single + * sign-on authentication with the CLI.

    + *
  • + *
  • + *

    With older versions of the CLI, the service only emits OIDC access tokens, so to + * obtain a new token, users must explicitly re-authenticate. To access the OIDC flow that + * supports token refresh and doesn’t require re-authentication, update to the latest CLI + * version (1.27.10 for CLI V1 and 2.9.0 for CLI V2) with support for OIDC token refresh + * and configurable IAM Identity Center session durations. For more information, see Configure Amazon Web Services access portal session duration .

    + *
  • + *
  • + *

    The access tokens provided by this service grant access to all Amazon Web Services account + * entitlements assigned to an IAM Identity Center user, not just a particular application.

    + *
  • + *
  • + *

    The documentation in this guide does not describe the mechanism to convert the access + * token into Amazon Web Services Auth (“sigv4”) credentials for use with IAM-protected Amazon Web Services service + * endpoints. For more information, see GetRoleCredentials in the IAM Identity Center Portal API Reference + * Guide.

    + *
  • + *
+ *

For general information about IAM Identity Center, see What is + * IAM Identity Center? in the IAM Identity Center User Guide.

+ * @public + */ +export declare class SSOOIDCClient extends __Client<__HttpHandlerOptions, ServiceInputTypes, ServiceOutputTypes, SSOOIDCClientResolvedConfig> { + /** + * The resolved configuration of SSOOIDCClient class. This is resolved and normalized from the {@link SSOOIDCClientConfig | constructor configuration interface}. + */ + readonly config: SSOOIDCClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy(): void; +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 00000000..a56a6089 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,29 @@ +import { AwsCredentialIdentity, AwsCredentialIdentityProvider, HttpAuthScheme } from "@smithy/types"; +import { SSOOIDCHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +/** + * @internal + */ +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider(httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider): void; + httpAuthSchemeProvider(): SSOOIDCHttpAuthSchemeProvider; + setCredentials(credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider): void; + credentials(): AwsCredentialIdentity | AwsCredentialIdentityProvider | undefined; +} +/** + * @internal + */ +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +/** + * @internal + */ +export declare const getHttpAuthExtensionConfiguration: (runtimeConfig: HttpAuthRuntimeConfig) => HttpAuthExtensionConfiguration; +/** + * @internal + */ +export declare const resolveHttpAuthRuntimeConfig: (config: HttpAuthExtensionConfiguration) => HttpAuthRuntimeConfig; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 00000000..8fc989a9 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,75 @@ +import { AwsSdkSigV4AuthInputConfig, AwsSdkSigV4AuthResolvedConfig, AwsSdkSigV4PreviouslyResolved } from "@aws-sdk/core"; +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, Provider } from "@smithy/types"; +import { SSOOIDCClientResolvedConfig } from "../SSOOIDCClient"; +/** + * @internal + */ +export interface SSOOIDCHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +/** + * @internal + */ +export interface SSOOIDCHttpAuthSchemeParametersProvider extends HttpAuthSchemeParametersProvider { +} +/** + * @internal + */ +export declare const defaultSSOOIDCHttpAuthSchemeParametersProvider: (config: SSOOIDCClientResolvedConfig, context: HandlerExecutionContext, input: object) => Promise; +/** + * @internal + */ +export interface SSOOIDCHttpAuthSchemeProvider extends HttpAuthSchemeProvider { +} +/** + * @internal + */ +export declare const defaultSSOOIDCHttpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; +/** + * @internal + */ +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + authSchemePreference?: string[] | Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + httpAuthSchemes?: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + httpAuthSchemeProvider?: SSOOIDCHttpAuthSchemeProvider; +} +/** + * @internal + */ +export interface HttpAuthSchemeResolvedConfig extends AwsSdkSigV4AuthResolvedConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + readonly authSchemePreference: Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + readonly httpAuthSchemes: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + readonly httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; +} +/** + * @internal + */ +export declare const resolveHttpAuthSchemeConfig: (config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved) => T & HttpAuthSchemeResolvedConfig; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/CreateTokenCommand.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/CreateTokenCommand.d.ts new file mode 100644 index 00000000..042fb523 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/CreateTokenCommand.d.ts @@ -0,0 +1,174 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { CreateTokenRequest, CreateTokenResponse } from "../models/models_0"; +import { SSOOIDCClientResolvedConfig } from "../SSOOIDCClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CreateTokenCommand}. + */ +export interface CreateTokenCommandInput extends CreateTokenRequest { +} +/** + * @public + * + * The output of {@link CreateTokenCommand}. + */ +export interface CreateTokenCommandOutput extends CreateTokenResponse, __MetadataBearer { +} +declare const CreateTokenCommand_base: { + new (input: CreateTokenCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: CreateTokenCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates and returns access and refresh tokens for clients that are authenticated using + * client secrets. The access token can be used to fetch short-lived credentials for the assigned + * AWS accounts or to access application APIs using bearer authentication.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOOIDCClient, CreateTokenCommand } from "@aws-sdk/client-sso-oidc"; // ES Modules import + * // const { SSOOIDCClient, CreateTokenCommand } = require("@aws-sdk/client-sso-oidc"); // CommonJS import + * const client = new SSOOIDCClient(config); + * const input = { // CreateTokenRequest + * clientId: "STRING_VALUE", // required + * clientSecret: "STRING_VALUE", // required + * grantType: "STRING_VALUE", // required + * deviceCode: "STRING_VALUE", + * code: "STRING_VALUE", + * refreshToken: "STRING_VALUE", + * scope: [ // Scopes + * "STRING_VALUE", + * ], + * redirectUri: "STRING_VALUE", + * codeVerifier: "STRING_VALUE", + * }; + * const command = new CreateTokenCommand(input); + * const response = await client.send(command); + * // { // CreateTokenResponse + * // accessToken: "STRING_VALUE", + * // tokenType: "STRING_VALUE", + * // expiresIn: Number("int"), + * // refreshToken: "STRING_VALUE", + * // idToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param CreateTokenCommandInput - {@link CreateTokenCommandInput} + * @returns {@link CreateTokenCommandOutput} + * @see {@link CreateTokenCommandInput} for command's `input` shape. + * @see {@link CreateTokenCommandOutput} for command's `response` shape. + * @see {@link SSOOIDCClientResolvedConfig | config} for SSOOIDCClient's `config` shape. + * + * @throws {@link AccessDeniedException} (client fault) + *

You do not have sufficient access to perform this action.

+ * + * @throws {@link AuthorizationPendingException} (client fault) + *

Indicates that a request to authorize a client with an access user session token is + * pending.

+ * + * @throws {@link ExpiredTokenException} (client fault) + *

Indicates that the token issued by the service is expired and is no longer valid.

+ * + * @throws {@link InternalServerException} (server fault) + *

Indicates that an error from the service occurred while trying to process a + * request.

+ * + * @throws {@link InvalidClientException} (client fault) + *

Indicates that the clientId or clientSecret in the request is + * invalid. For example, this can occur when a client sends an incorrect clientId or + * an expired clientSecret.

+ * + * @throws {@link InvalidGrantException} (client fault) + *

Indicates that a request contains an invalid grant. This can occur if a client makes a + * CreateToken request with an invalid grant type.

+ * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that something is wrong with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link InvalidScopeException} (client fault) + *

Indicates that the scope provided in the request is invalid.

+ * + * @throws {@link SlowDownException} (client fault) + *

Indicates that the client is making the request too frequently and is more than the + * service can handle.

+ * + * @throws {@link UnauthorizedClientException} (client fault) + *

Indicates that the client is not currently authorized to make the request. This can happen + * when a clientId is not issued for a public client.

+ * + * @throws {@link UnsupportedGrantTypeException} (client fault) + *

Indicates that the grant type in the request is not supported by the service.

+ * + * @throws {@link SSOOIDCServiceException} + *

Base exception class for all service exceptions from SSOOIDC service.

+ * + * + * @example Call OAuth/OIDC /token endpoint for Device Code grant with Secret authentication + * ```javascript + * // + * const input = { + * clientId: "_yzkThXVzLWVhc3QtMQEXAMPLECLIENTID", + * clientSecret: "VERYLONGSECRETeyJraWQiOiJrZXktMTU2NDAyODA5OSIsImFsZyI6IkhTMzg0In0", + * deviceCode: "yJraWQiOiJrZXktMTU2Njk2ODA4OCIsImFsZyI6IkhTMzIn0EXAMPLEDEVICECODE", + * grantType: "urn:ietf:params:oauth:grant-type:device-code" + * }; + * const command = new CreateTokenCommand(input); + * const response = await client.send(command); + * /* response is + * { + * accessToken: "aoal-YigITUDiNX1xZwOMXM5MxOWDL0E0jg9P6_C_jKQPxS_SKCP6f0kh1Up4g7TtvQqkMnD-GJiU_S1gvug6SrggAkc0:MGYCMQD3IatVjV7jAJU91kK3PkS/SfA2wtgWzOgZWDOR7sDGN9t0phCZz5It/aes/3C1Zj0CMQCKWOgRaiz6AIhza3DSXQNMLjRKXC8F8ceCsHlgYLMZ7hZidEXAMPLEACCESSTOKEN", + * expiresIn: 1579729529, + * refreshToken: "aorvJYubGpU6i91YnH7Mfo-AT2fIVa1zCfA_Rvq9yjVKIP3onFmmykuQ7E93y2I-9Nyj-A_sVvMufaLNL0bqnDRtgAkc0:MGUCMFrRsktMRVlWaOR70XGMFGLL0SlcCw4DiYveIiOVx1uK9BbD0gvAddsW3UTLozXKMgIxAJ3qxUvjpnlLIOaaKOoa/FuNgqJVvr9GMwDtnAtlh9iZzAkEXAMPLEREFRESHTOKEN", + * tokenType: "Bearer" + * } + * *\/ + * ``` + * + * @example Call OAuth/OIDC /token endpoint for Refresh Token grant with Secret authentication + * ```javascript + * // + * const input = { + * clientId: "_yzkThXVzLWVhc3QtMQEXAMPLECLIENTID", + * clientSecret: "VERYLONGSECRETeyJraWQiOiJrZXktMTU2NDAyODA5OSIsImFsZyI6IkhTMzg0In0", + * grantType: "refresh_token", + * refreshToken: "aorvJYubGpU6i91YnH7Mfo-AT2fIVa1zCfA_Rvq9yjVKIP3onFmmykuQ7E93y2I-9Nyj-A_sVvMufaLNL0bqnDRtgAkc0:MGUCMFrRsktMRVlWaOR70XGMFGLL0SlcCw4DiYveIiOVx1uK9BbD0gvAddsW3UTLozXKMgIxAJ3qxUvjpnlLIOaaKOoa/FuNgqJVvr9GMwDtnAtlh9iZzAkEXAMPLEREFRESHTOKEN", + * scope: [ + * "codewhisperer:completions" + * ] + * }; + * const command = new CreateTokenCommand(input); + * const response = await client.send(command); + * /* response is + * { + * accessToken: "aoal-YigITUDiNX1xZwOMXM5MxOWDL0E0jg9P6_C_jKQPxS_SKCP6f0kh1Up4g7TtvQqkMnD-GJiU_S1gvug6SrggAkc0:MGYCMQD3IatVjV7jAJU91kK3PkS/SfA2wtgWzOgZWDOR7sDGN9t0phCZz5It/aes/3C1Zj0CMQCKWOgRaiz6AIhza3DSXQNMLjRKXC8F8ceCsHlgYLMZ7hZidEXAMPLEACCESSTOKEN", + * expiresIn: 1579729529, + * refreshToken: "aorvJYubGpU6i91YnH7Mfo-AT2fIVa1zCfA_Rvq9yjVKIP3onFmmykuQ7E93y2I-9Nyj-A_sVvMufaLNL0bqnDRtgAkc0:MGUCMFrRsktMRVlWaOR70XGMFGLL0SlcCw4DiYveIiOVx1uK9BbD0gvAddsW3UTLozXKMgIxAJ3qxUvjpnlLIOaaKOoa/FuNgqJVvr9GMwDtnAtlh9iZzAkEXAMPLEREFRESHTOKEN", + * tokenType: "Bearer" + * } + * *\/ + * ``` + * + * @public + */ +export declare class CreateTokenCommand extends CreateTokenCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: CreateTokenRequest; + output: CreateTokenResponse; + }; + sdk: { + input: CreateTokenCommandInput; + output: CreateTokenCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/index.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/index.d.ts new file mode 100644 index 00000000..09214cae --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/index.d.ts @@ -0,0 +1 @@ +export * from "./CreateTokenCommand"; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/EndpointParameters.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/EndpointParameters.d.ts new file mode 100644 index 00000000..23f42e36 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/EndpointParameters.d.ts @@ -0,0 +1,40 @@ +import { Endpoint, EndpointParameters as __EndpointParameters, EndpointV2, Provider } from "@smithy/types"; +/** + * @public + */ +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: string | Provider | Endpoint | Provider | EndpointV2 | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: (options: T & ClientInputEndpointParameters) => T & ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/endpointResolver.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/endpointResolver.d.ts new file mode 100644 index 00000000..70a8eaec --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/endpointResolver.d.ts @@ -0,0 +1,5 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: (endpointParams: EndpointParameters, context?: { + logger?: Logger; +}) => EndpointV2; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/ruleset.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/ruleset.d.ts new file mode 100644 index 00000000..4b238994 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/extensionConfiguration.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/extensionConfiguration.d.ts new file mode 100644 index 00000000..c78de858 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +/** + * @internal + */ +export interface SSOOIDCExtensionConfiguration extends HttpHandlerExtensionConfiguration, DefaultExtensionConfiguration, AwsRegionExtensionConfiguration, HttpAuthExtensionConfiguration { +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/index.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/index.d.ts new file mode 100644 index 00000000..54c46dda --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/index.d.ts @@ -0,0 +1,51 @@ +/** + *

IAM Identity Center OpenID Connect (OIDC) is a web service that enables a client (such as CLI or a + * native application) to register with IAM Identity Center. The service also enables the client to fetch the + * user’s access token upon successful authentication and authorization with IAM Identity Center.

+ *

+ * API namespaces + *

+ *

IAM Identity Center uses the sso and identitystore API namespaces. IAM Identity Center + * OpenID Connect uses the sso-oidc namespace.

+ *

+ * Considerations for using this guide + *

+ *

Before you begin using this guide, we recommend that you first review the following + * important information about how the IAM Identity Center OIDC service works.

+ *
    + *
  • + *

    The IAM Identity Center OIDC service currently implements only the portions of the OAuth 2.0 Device + * Authorization Grant standard (https://tools.ietf.org/html/rfc8628) that are necessary to enable single + * sign-on authentication with the CLI.

    + *
  • + *
  • + *

    With older versions of the CLI, the service only emits OIDC access tokens, so to + * obtain a new token, users must explicitly re-authenticate. To access the OIDC flow that + * supports token refresh and doesn’t require re-authentication, update to the latest CLI + * version (1.27.10 for CLI V1 and 2.9.0 for CLI V2) with support for OIDC token refresh + * and configurable IAM Identity Center session durations. For more information, see Configure Amazon Web Services access portal session duration .

    + *
  • + *
  • + *

    The access tokens provided by this service grant access to all Amazon Web Services account + * entitlements assigned to an IAM Identity Center user, not just a particular application.

    + *
  • + *
  • + *

    The documentation in this guide does not describe the mechanism to convert the access + * token into Amazon Web Services Auth (“sigv4”) credentials for use with IAM-protected Amazon Web Services service + * endpoints. For more information, see GetRoleCredentials in the IAM Identity Center Portal API Reference + * Guide.

    + *
  • + *
+ *

For general information about IAM Identity Center, see What is + * IAM Identity Center? in the IAM Identity Center User Guide.

+ * + * @packageDocumentation + */ +export * from "./SSOOIDCClient"; +export * from "./SSOOIDC"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export type { RuntimeExtension } from "./runtimeExtensions"; +export type { SSOOIDCExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./models"; +export { SSOOIDCServiceException } from "./models/SSOOIDCServiceException"; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts new file mode 100644 index 00000000..d45f71a0 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts @@ -0,0 +1,14 @@ +import { ServiceException as __ServiceException, ServiceExceptionOptions as __ServiceExceptionOptions } from "@smithy/smithy-client"; +export type { __ServiceExceptionOptions }; +export { __ServiceException }; +/** + * @public + * + * Base exception class for all service exceptions from SSOOIDC service. + */ +export declare class SSOOIDCServiceException extends __ServiceException { + /** + * @internal + */ + constructor(options: __ServiceExceptionOptions); +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/index.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/index.d.ts new file mode 100644 index 00000000..09c5d6e0 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/models_0.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/models_0.d.ts new file mode 100644 index 00000000..2d3c3f1e --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/models_0.d.ts @@ -0,0 +1,387 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { SSOOIDCServiceException as __BaseException } from "./SSOOIDCServiceException"; +/** + *

You do not have sufficient access to perform this action.

+ * @public + */ +export declare class AccessDeniedException extends __BaseException { + readonly name: "AccessDeniedException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be access_denied.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that a request to authorize a client with an access user session token is + * pending.

+ * @public + */ +export declare class AuthorizationPendingException extends __BaseException { + readonly name: "AuthorizationPendingException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * authorization_pending.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface CreateTokenRequest { + /** + *

The unique identifier string for the client or application. This value comes from the + * result of the RegisterClient API.

+ * @public + */ + clientId: string | undefined; + /** + *

A secret string generated for the client. This value should come from the persisted result + * of the RegisterClient API.

+ * @public + */ + clientSecret: string | undefined; + /** + *

Supports the following OAuth grant types: Authorization Code, Device Code, and Refresh + * Token. Specify one of the following values, depending on the grant type that you want:

+ *

* Authorization Code - authorization_code + *

+ *

* Device Code - urn:ietf:params:oauth:grant-type:device_code + *

+ *

* Refresh Token - refresh_token + *

+ * @public + */ + grantType: string | undefined; + /** + *

Used only when calling this API for the Device Code grant type. This short-lived code is + * used to identify this authorization request. This comes from the result of the StartDeviceAuthorization API.

+ * @public + */ + deviceCode?: string | undefined; + /** + *

Used only when calling this API for the Authorization Code grant type. The short-lived + * code is used to identify this authorization request.

+ * @public + */ + code?: string | undefined; + /** + *

Used only when calling this API for the Refresh Token grant type. This token is used to + * refresh short-lived tokens, such as the access token, that might expire.

+ *

For more information about the features and limitations of the current IAM Identity Center OIDC + * implementation, see Considerations for Using this Guide in the IAM Identity Center + * OIDC API Reference.

+ * @public + */ + refreshToken?: string | undefined; + /** + *

The list of scopes for which authorization is requested. The access token that is issued + * is limited to the scopes that are granted. If this value is not specified, IAM Identity Center authorizes + * all scopes that are configured for the client during the call to RegisterClient.

+ * @public + */ + scope?: string[] | undefined; + /** + *

Used only when calling this API for the Authorization Code grant type. This value + * specifies the location of the client or application that has registered to receive the + * authorization code.

+ * @public + */ + redirectUri?: string | undefined; + /** + *

Used only when calling this API for the Authorization Code grant type. This value is + * generated by the client and presented to validate the original code challenge value the client + * passed at authorization time.

+ * @public + */ + codeVerifier?: string | undefined; +} +/** + * @internal + */ +export declare const CreateTokenRequestFilterSensitiveLog: (obj: CreateTokenRequest) => any; +/** + * @public + */ +export interface CreateTokenResponse { + /** + *

A bearer token to access Amazon Web Services accounts and applications assigned to a user.

+ * @public + */ + accessToken?: string | undefined; + /** + *

Used to notify the client that the returned token is an access token. The supported token + * type is Bearer.

+ * @public + */ + tokenType?: string | undefined; + /** + *

Indicates the time in seconds when an access token will expire.

+ * @public + */ + expiresIn?: number | undefined; + /** + *

A token that, if present, can be used to refresh a previously issued access token that + * might have expired.

+ *

For more information about the features and limitations of the current IAM Identity Center OIDC + * implementation, see Considerations for Using this Guide in the IAM Identity Center + * OIDC API Reference.

+ * @public + */ + refreshToken?: string | undefined; + /** + *

The idToken is not implemented or supported. For more information about the + * features and limitations of the current IAM Identity Center OIDC implementation, see + * Considerations for Using this Guide in the IAM Identity Center + * OIDC API Reference.

+ *

A JSON Web Token (JWT) that identifies who is associated with the issued access token. + *

+ * @public + */ + idToken?: string | undefined; +} +/** + * @internal + */ +export declare const CreateTokenResponseFilterSensitiveLog: (obj: CreateTokenResponse) => any; +/** + *

Indicates that the token issued by the service is expired and is no longer valid.

+ * @public + */ +export declare class ExpiredTokenException extends __BaseException { + readonly name: "ExpiredTokenException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be expired_token.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that an error from the service occurred while trying to process a + * request.

+ * @public + */ +export declare class InternalServerException extends __BaseException { + readonly name: "InternalServerException"; + readonly $fault: "server"; + /** + *

Single error code. For this exception the value will be server_error.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the clientId or clientSecret in the request is + * invalid. For example, this can occur when a client sends an incorrect clientId or + * an expired clientSecret.

+ * @public + */ +export declare class InvalidClientException extends __BaseException { + readonly name: "InvalidClientException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * invalid_client.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that a request contains an invalid grant. This can occur if a client makes a + * CreateToken request with an invalid grant type.

+ * @public + */ +export declare class InvalidGrantException extends __BaseException { + readonly name: "InvalidGrantException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be invalid_grant.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that something is wrong with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * @public + */ +export declare class InvalidRequestException extends __BaseException { + readonly name: "InvalidRequestException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * invalid_request.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the scope provided in the request is invalid.

+ * @public + */ +export declare class InvalidScopeException extends __BaseException { + readonly name: "InvalidScopeException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be invalid_scope.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the client is making the request too frequently and is more than the + * service can handle.

+ * @public + */ +export declare class SlowDownException extends __BaseException { + readonly name: "SlowDownException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be slow_down.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the client is not currently authorized to make the request. This can happen + * when a clientId is not issued for a public client.

+ * @public + */ +export declare class UnauthorizedClientException extends __BaseException { + readonly name: "UnauthorizedClientException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * unauthorized_client.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the grant type in the request is not supported by the service.

+ * @public + */ +export declare class UnsupportedGrantTypeException extends __BaseException { + readonly name: "UnsupportedGrantTypeException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * unsupported_grant_type.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/protocols/Aws_restJson1.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/protocols/Aws_restJson1.d.ts new file mode 100644 index 00000000..d4e38b49 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/protocols/Aws_restJson1.d.ts @@ -0,0 +1,11 @@ +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { CreateTokenCommandInput, CreateTokenCommandOutput } from "../commands/CreateTokenCommand"; +/** + * serializeAws_restJson1CreateTokenCommand + */ +export declare const se_CreateTokenCommand: (input: CreateTokenCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * deserializeAws_restJson1CreateTokenCommand + */ +export declare const de_CreateTokenCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.browser.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.browser.d.ts new file mode 100644 index 00000000..26c727f2 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.browser.d.ts @@ -0,0 +1,57 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: import("@smithy/protocol-http").HttpHandler | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.d.ts new file mode 100644 index 00000000..1819a97e --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.d.ts @@ -0,0 +1,57 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-node").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: RequestHandler | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.native.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.native.d.ts new file mode 100644 index 00000000..86acac73 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.native.d.ts @@ -0,0 +1,56 @@ +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: import("@smithy/types").NodeHttpHandlerOptions | import("@smithy/types").FetchHttpHandlerOptions | Record | import("@smithy/protocol-http").HttpHandler | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: import("@smithy/smithy-client").DefaultsMode | import("@smithy/types").Provider; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.shared.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.shared.d.ts new file mode 100644 index 00000000..e1100177 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.shared.d.ts @@ -0,0 +1,32 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: NoAuthSigner; + })[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeExtensions.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeExtensions.d.ts new file mode 100644 index 00000000..1bdf704a --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeExtensions.d.ts @@ -0,0 +1,17 @@ +import { SSOOIDCExtensionConfiguration } from "./extensionConfiguration"; +/** + * @public + */ +export interface RuntimeExtension { + configure(extensionConfiguration: SSOOIDCExtensionConfiguration): void; +} +/** + * @public + */ +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +/** + * @internal + */ +export declare const resolveRuntimeExtensions: (runtimeConfig: any, extensions: RuntimeExtension[]) => any; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STS.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STS.d.ts new file mode 100644 index 00000000..bee83a5d --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STS.d.ts @@ -0,0 +1,27 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { AssumeRoleCommandInput, AssumeRoleCommandOutput } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput, AssumeRoleWithWebIdentityCommandOutput } from "./commands/AssumeRoleWithWebIdentityCommand"; +import { STSClient } from "./STSClient"; +export interface STS { + /** + * @see {@link AssumeRoleCommand} + */ + assumeRole(args: AssumeRoleCommandInput, options?: __HttpHandlerOptions): Promise; + assumeRole(args: AssumeRoleCommandInput, cb: (err: any, data?: AssumeRoleCommandOutput) => void): void; + assumeRole(args: AssumeRoleCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: AssumeRoleCommandOutput) => void): void; + /** + * @see {@link AssumeRoleWithWebIdentityCommand} + */ + assumeRoleWithWebIdentity(args: AssumeRoleWithWebIdentityCommandInput, options?: __HttpHandlerOptions): Promise; + assumeRoleWithWebIdentity(args: AssumeRoleWithWebIdentityCommandInput, cb: (err: any, data?: AssumeRoleWithWebIdentityCommandOutput) => void): void; + assumeRoleWithWebIdentity(args: AssumeRoleWithWebIdentityCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: AssumeRoleWithWebIdentityCommandOutput) => void): void; +} +/** + * Security Token Service + *

Security Token Service (STS) enables you to request temporary, limited-privilege + * credentials for users. This guide provides descriptions of the STS API. For + * more information about using this service, see Temporary Security Credentials.

+ * @public + */ +export declare class STS extends STSClient implements STS { +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STSClient.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STSClient.d.ts new file mode 100644 index 00000000..bd21c4ba --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STSClient.d.ts @@ -0,0 +1,192 @@ +import { HostHeaderInputConfig, HostHeaderResolvedConfig } from "@aws-sdk/middleware-host-header"; +import { UserAgentInputConfig, UserAgentResolvedConfig } from "@aws-sdk/middleware-user-agent"; +import { RegionInputConfig, RegionResolvedConfig } from "@smithy/config-resolver"; +import { EndpointInputConfig, EndpointResolvedConfig } from "@smithy/middleware-endpoint"; +import { RetryInputConfig, RetryResolvedConfig } from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { Client as __Client, DefaultsMode as __DefaultsMode, SmithyConfiguration as __SmithyConfiguration, SmithyResolvedConfiguration as __SmithyResolvedConfiguration } from "@smithy/smithy-client"; +import { AwsCredentialIdentityProvider, BodyLengthCalculator as __BodyLengthCalculator, CheckOptionalClientConfig as __CheckOptionalClientConfig, ChecksumConstructor as __ChecksumConstructor, Decoder as __Decoder, Encoder as __Encoder, HashConstructor as __HashConstructor, HttpHandlerOptions as __HttpHandlerOptions, Logger as __Logger, Provider as __Provider, Provider, StreamCollector as __StreamCollector, UrlParser as __UrlParser, UserAgent as __UserAgent } from "@smithy/types"; +import { HttpAuthSchemeInputConfig, HttpAuthSchemeResolvedConfig } from "./auth/httpAuthSchemeProvider"; +import { AssumeRoleCommandInput, AssumeRoleCommandOutput } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput, AssumeRoleWithWebIdentityCommandOutput } from "./commands/AssumeRoleWithWebIdentityCommand"; +import { ClientInputEndpointParameters, ClientResolvedEndpointParameters, EndpointParameters } from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +/** + * @public + */ +export type ServiceInputTypes = AssumeRoleCommandInput | AssumeRoleWithWebIdentityCommandInput; +/** + * @public + */ +export type ServiceOutputTypes = AssumeRoleCommandOutput | AssumeRoleWithWebIdentityCommandOutput; +/** + * @public + */ +export interface ClientDefaults extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use or its constructor options. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandlerUserInput; + /** + * A constructor for a class implementing the {@link @smithy/types#ChecksumConstructor} interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + * @internal + */ + sha256?: __ChecksumConstructor | __HashConstructor; + /** + * The function that will be used to convert strings into HTTP endpoints. + * @internal + */ + urlParser?: __UrlParser; + /** + * A function that can calculate the length of a request body. + * @internal + */ + bodyLengthChecker?: __BodyLengthCalculator; + /** + * A function that converts a stream into an array of bytes. + * @internal + */ + streamCollector?: __StreamCollector; + /** + * The function that will be used to convert a base64-encoded string to a byte array. + * @internal + */ + base64Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a base64-encoded string. + * @internal + */ + base64Encoder?: __Encoder; + /** + * The function that will be used to convert a UTF8-encoded string to a byte array. + * @internal + */ + utf8Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a UTF-8 encoded string. + * @internal + */ + utf8Encoder?: __Encoder; + /** + * The runtime environment. + * @internal + */ + runtime?: string; + /** + * Disable dynamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | __Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | __Provider; + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + /** + * Setting a client profile is similar to setting a value for the + * AWS_PROFILE environment variable. Setting a profile on a client + * in code only affects the single client instance, unlike AWS_PROFILE. + * + * When set, and only for environments where an AWS configuration + * file exists, fields configurable by this file will be retrieved + * from the specified profile within that file. + * Conflicting code configuration and environment variables will + * still have higher priority. + * + * For client credential resolution that involves checking the AWS + * configuration file, the client's profile (this value) will be + * used unless a different profile is set in the credential + * provider options. + * + */ + profile?: string; + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header + * @internal + */ + defaultUserAgentProvider?: Provider<__UserAgent>; + /** + * Default credentials provider; Not available in browser runtime. + * @deprecated + * @internal + */ + credentialDefaultProvider?: (input: any) => AwsCredentialIdentityProvider; + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + /** + * Specifies which retry algorithm to use. + * @see https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-util-retry/Enum/RETRY_MODES/ + * + */ + retryMode?: string | __Provider; + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + /** + * Optional extensions + */ + extensions?: RuntimeExtension[]; + /** + * The {@link @smithy/smithy-client#DefaultsMode} that will be used to determine how certain default configuration options are resolved in the SDK. + */ + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +/** + * @public + */ +export type STSClientConfigType = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & ClientDefaults & UserAgentInputConfig & RetryInputConfig & RegionInputConfig & HostHeaderInputConfig & EndpointInputConfig & HttpAuthSchemeInputConfig & ClientInputEndpointParameters; +/** + * @public + * + * The configuration interface of STSClient class constructor that set the region, credentials and other options. + */ +export interface STSClientConfig extends STSClientConfigType { +} +/** + * @public + */ +export type STSClientResolvedConfigType = __SmithyResolvedConfiguration<__HttpHandlerOptions> & Required & RuntimeExtensionsConfig & UserAgentResolvedConfig & RetryResolvedConfig & RegionResolvedConfig & HostHeaderResolvedConfig & EndpointResolvedConfig & HttpAuthSchemeResolvedConfig & ClientResolvedEndpointParameters; +/** + * @public + * + * The resolved configuration interface of STSClient class. This is resolved and normalized from the {@link STSClientConfig | constructor configuration interface}. + */ +export interface STSClientResolvedConfig extends STSClientResolvedConfigType { +} +/** + * Security Token Service + *

Security Token Service (STS) enables you to request temporary, limited-privilege + * credentials for users. This guide provides descriptions of the STS API. For + * more information about using this service, see Temporary Security Credentials.

+ * @public + */ +export declare class STSClient extends __Client<__HttpHandlerOptions, ServiceInputTypes, ServiceOutputTypes, STSClientResolvedConfig> { + /** + * The resolved configuration of STSClient class. This is resolved and normalized from the {@link STSClientConfig | constructor configuration interface}. + */ + readonly config: STSClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy(): void; +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 00000000..1066c88b --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,29 @@ +import { AwsCredentialIdentity, AwsCredentialIdentityProvider, HttpAuthScheme } from "@smithy/types"; +import { STSHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +/** + * @internal + */ +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider(httpAuthSchemeProvider: STSHttpAuthSchemeProvider): void; + httpAuthSchemeProvider(): STSHttpAuthSchemeProvider; + setCredentials(credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider): void; + credentials(): AwsCredentialIdentity | AwsCredentialIdentityProvider | undefined; +} +/** + * @internal + */ +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: STSHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +/** + * @internal + */ +export declare const getHttpAuthExtensionConfiguration: (runtimeConfig: HttpAuthRuntimeConfig) => HttpAuthExtensionConfiguration; +/** + * @internal + */ +export declare const resolveHttpAuthRuntimeConfig: (config: HttpAuthExtensionConfiguration) => HttpAuthRuntimeConfig; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthSchemeProvider.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 00000000..8e39cbe0 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,85 @@ +import { AwsSdkSigV4AuthInputConfig, AwsSdkSigV4AuthResolvedConfig, AwsSdkSigV4PreviouslyResolved } from "@aws-sdk/core"; +import { Client, HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, Provider } from "@smithy/types"; +import { STSClientResolvedConfig } from "../STSClient"; +/** + * @internal + */ +export interface STSHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +/** + * @internal + */ +export interface STSHttpAuthSchemeParametersProvider extends HttpAuthSchemeParametersProvider { +} +/** + * @internal + */ +export declare const defaultSTSHttpAuthSchemeParametersProvider: (config: STSClientResolvedConfig, context: HandlerExecutionContext, input: object) => Promise; +/** + * @internal + */ +export interface STSHttpAuthSchemeProvider extends HttpAuthSchemeProvider { +} +/** + * @internal + */ +export declare const defaultSTSHttpAuthSchemeProvider: STSHttpAuthSchemeProvider; +export interface StsAuthInputConfig { +} +export interface StsAuthResolvedConfig { + /** + * Reference to STSClient class constructor. + * @internal + */ + stsClientCtor: new (clientConfig: any) => Client; +} +export declare const resolveStsAuthConfig: (input: T & StsAuthInputConfig) => T & StsAuthResolvedConfig; +/** + * @internal + */ +export interface HttpAuthSchemeInputConfig extends StsAuthInputConfig, AwsSdkSigV4AuthInputConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + authSchemePreference?: string[] | Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + httpAuthSchemes?: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + httpAuthSchemeProvider?: STSHttpAuthSchemeProvider; +} +/** + * @internal + */ +export interface HttpAuthSchemeResolvedConfig extends StsAuthResolvedConfig, AwsSdkSigV4AuthResolvedConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + readonly authSchemePreference: Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + readonly httpAuthSchemes: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + readonly httpAuthSchemeProvider: STSHttpAuthSchemeProvider; +} +/** + * @internal + */ +export declare const resolveHttpAuthSchemeConfig: (config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved) => T & HttpAuthSchemeResolvedConfig; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleCommand.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleCommand.d.ts new file mode 100644 index 00000000..f9e6ccd3 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleCommand.d.ts @@ -0,0 +1,269 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { AssumeRoleRequest, AssumeRoleResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, STSClientResolvedConfig } from "../STSClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link AssumeRoleCommand}. + */ +export interface AssumeRoleCommandInput extends AssumeRoleRequest { +} +/** + * @public + * + * The output of {@link AssumeRoleCommand}. + */ +export interface AssumeRoleCommandOutput extends AssumeRoleResponse, __MetadataBearer { +} +declare const AssumeRoleCommand_base: { + new (input: AssumeRoleCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: AssumeRoleCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns a set of temporary security credentials that you can use to access Amazon Web Services + * resources. These temporary credentials consist of an access key ID, a secret access key, + * and a security token. Typically, you use AssumeRole within your account or for + * cross-account access. For a comparison of AssumeRole with other API operations + * that produce temporary credentials, see Requesting Temporary Security + * Credentials and Compare STS + * credentials in the IAM User Guide.

+ *

+ * Permissions + *

+ *

The temporary security credentials created by AssumeRole can be used to + * make API calls to any Amazon Web Services service with the following exception: You cannot call the + * Amazon Web Services STS GetFederationToken or GetSessionToken API + * operations.

+ *

(Optional) You can pass inline or managed session policies to this operation. You can + * pass a single JSON policy document to use as an inline session policy. You can also specify + * up to 10 managed policy Amazon Resource Names (ARNs) to use as managed session policies. + * The plaintext that you use for both inline and managed session policies can't exceed 2,048 + * characters. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

When you create a role, you create two policies: a role trust policy that specifies + * who can assume the role, and a permissions policy that specifies + * what can be done with the role. You specify the trusted principal + * that is allowed to assume the role in the role trust policy.

+ *

To assume a role from a different account, your Amazon Web Services account must be trusted by the + * role. The trust relationship is defined in the role's trust policy when the role is + * created. That trust policy states which accounts are allowed to delegate that access to + * users in the account.

+ *

A user who wants to access a role in a different account must also have permissions that + * are delegated from the account administrator. The administrator must attach a policy that + * allows the user to call AssumeRole for the ARN of the role in the other + * account.

+ *

To allow a user to assume a role in the same account, you can do either of the + * following:

+ *
    + *
  • + *

    Attach a policy to the user that allows the user to call AssumeRole + * (as long as the role's trust policy trusts the account).

    + *
  • + *
  • + *

    Add the user as a principal directly in the role's trust policy.

    + *
  • + *
+ *

You can do either because the role’s trust policy acts as an IAM resource-based + * policy. When a resource-based policy grants access to a principal in the same account, no + * additional identity-based policy is required. For more information about trust policies and + * resource-based policies, see IAM Policies in the + * IAM User Guide.

+ *

+ * Tags + *

+ *

(Optional) You can pass tag key-value pairs to your session. These tags are called + * session tags. For more information about session tags, see Passing Session Tags in STS in the + * IAM User Guide.

+ *

An administrator must grant you the permissions necessary to pass session tags. The + * administrator can also create granular permissions to allow you to pass only specific + * session tags. For more information, see Tutorial: Using Tags + * for Attribute-Based Access Control in the + * IAM User Guide.

+ *

You can set the session tags as transitive. Transitive tags persist during role + * chaining. For more information, see Chaining Roles + * with Session Tags in the IAM User Guide.

+ *

+ * Using MFA with AssumeRole + *

+ *

(Optional) You can include multi-factor authentication (MFA) information when you call + * AssumeRole. This is useful for cross-account scenarios to ensure that the + * user that assumes the role has been authenticated with an Amazon Web Services MFA device. In that + * scenario, the trust policy of the role being assumed includes a condition that tests for + * MFA authentication. If the caller does not include valid MFA information, the request to + * assume the role is denied. The condition in a trust policy that tests for MFA + * authentication might look like the following example.

+ *

+ * "Condition": \{"Bool": \{"aws:MultiFactorAuthPresent": true\}\} + *

+ *

For more information, see Configuring MFA-Protected API Access + * in the IAM User Guide guide.

+ *

To use MFA with AssumeRole, you pass values for the + * SerialNumber and TokenCode parameters. The + * SerialNumber value identifies the user's hardware or virtual MFA device. + * The TokenCode is the time-based one-time password (TOTP) that the MFA device + * produces.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { STSClient, AssumeRoleCommand } from "@aws-sdk/client-sts"; // ES Modules import + * // const { STSClient, AssumeRoleCommand } = require("@aws-sdk/client-sts"); // CommonJS import + * const client = new STSClient(config); + * const input = { // AssumeRoleRequest + * RoleArn: "STRING_VALUE", // required + * RoleSessionName: "STRING_VALUE", // required + * PolicyArns: [ // policyDescriptorListType + * { // PolicyDescriptorType + * arn: "STRING_VALUE", + * }, + * ], + * Policy: "STRING_VALUE", + * DurationSeconds: Number("int"), + * Tags: [ // tagListType + * { // Tag + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * ], + * TransitiveTagKeys: [ // tagKeyListType + * "STRING_VALUE", + * ], + * ExternalId: "STRING_VALUE", + * SerialNumber: "STRING_VALUE", + * TokenCode: "STRING_VALUE", + * SourceIdentity: "STRING_VALUE", + * ProvidedContexts: [ // ProvidedContextsListType + * { // ProvidedContext + * ProviderArn: "STRING_VALUE", + * ContextAssertion: "STRING_VALUE", + * }, + * ], + * }; + * const command = new AssumeRoleCommand(input); + * const response = await client.send(command); + * // { // AssumeRoleResponse + * // Credentials: { // Credentials + * // AccessKeyId: "STRING_VALUE", // required + * // SecretAccessKey: "STRING_VALUE", // required + * // SessionToken: "STRING_VALUE", // required + * // Expiration: new Date("TIMESTAMP"), // required + * // }, + * // AssumedRoleUser: { // AssumedRoleUser + * // AssumedRoleId: "STRING_VALUE", // required + * // Arn: "STRING_VALUE", // required + * // }, + * // PackedPolicySize: Number("int"), + * // SourceIdentity: "STRING_VALUE", + * // }; + * + * ``` + * + * @param AssumeRoleCommandInput - {@link AssumeRoleCommandInput} + * @returns {@link AssumeRoleCommandOutput} + * @see {@link AssumeRoleCommandInput} for command's `input` shape. + * @see {@link AssumeRoleCommandOutput} for command's `response` shape. + * @see {@link STSClientResolvedConfig | config} for STSClient's `config` shape. + * + * @throws {@link ExpiredTokenException} (client fault) + *

The web identity token that was passed is expired or is not valid. Get a new identity + * token from the identity provider and then retry the request.

+ * + * @throws {@link MalformedPolicyDocumentException} (client fault) + *

The request was rejected because the policy document was malformed. The error message + * describes the specific error.

+ * + * @throws {@link PackedPolicyTooLargeException} (client fault) + *

The request was rejected because the total packed size of the session policies and + * session tags combined was too large. An Amazon Web Services conversion compresses the session policy + * document, session policy ARNs, and session tags into a packed binary format that has a + * separate limit. The error message indicates by percentage how close the policies and + * tags are to the upper size limit. For more information, see Passing Session Tags in STS in + * the IAM User Guide.

+ *

You could receive this error even though you meet other defined session policy and + * session tag limits. For more information, see IAM and STS Entity Character Limits in the IAM User + * Guide.

+ * + * @throws {@link RegionDisabledException} (client fault) + *

STS is not activated in the requested region for the account that is being asked to + * generate credentials. The account administrator must use the IAM console to activate + * STS in that region. For more information, see Activating and + * Deactivating STS in an Amazon Web Services Region in the IAM User + * Guide.

+ * + * @throws {@link STSServiceException} + *

Base exception class for all service exceptions from STS service.

+ * + * + * @example To assume a role + * ```javascript + * // + * const input = { + * ExternalId: "123ABC", + * Policy: `{"Version":"2012-10-17","Statement":[{"Sid":"Stmt1","Effect":"Allow","Action":"s3:ListAllMyBuckets","Resource":"*"}]}`, + * RoleArn: "arn:aws:iam::123456789012:role/demo", + * RoleSessionName: "testAssumeRoleSession", + * Tags: [ + * { + * Key: "Project", + * Value: "Unicorn" + * }, + * { + * Key: "Team", + * Value: "Automation" + * }, + * { + * Key: "Cost-Center", + * Value: "12345" + * } + * ], + * TransitiveTagKeys: [ + * "Project", + * "Cost-Center" + * ] + * }; + * const command = new AssumeRoleCommand(input); + * const response = await client.send(command); + * /* response is + * { + * AssumedRoleUser: { + * Arn: "arn:aws:sts::123456789012:assumed-role/demo/Bob", + * AssumedRoleId: "ARO123EXAMPLE123:Bob" + * }, + * Credentials: { + * AccessKeyId: "AKIAIOSFODNN7EXAMPLE", + * Expiration: "2011-07-15T23:28:33.359Z", + * SecretAccessKey: "wJalrXUtnFEMI/K7MDENG/bPxRfiCYzEXAMPLEKEY", + * SessionToken: "AQoDYXdzEPT//////////wEXAMPLEtc764bNrC9SAPBSM22wDOk4x4HIZ8j4FZTwdQWLWsKWHGBuFqwAeMicRXmxfpSPfIeoIYRqTflfKD8YUuwthAx7mSEI/qkPpKPi/kMcGdQrmGdeehM4IC1NtBmUpp2wUE8phUZampKsburEDy0KPkyQDYwT7WZ0wq5VSXDvp75YU9HFvlRd8Tx6q6fE8YQcHNVXAkiY9q6d+xo0rKwT38xVqr7ZD0u0iPPkUL64lIZbqBAz+scqKmlzm8FDrypNC9Yjc8fPOLn9FX9KSYvKTr4rvx3iSIlTJabIQwj2ICCR/oLxBA==" + * }, + * PackedPolicySize: 8 + * } + * *\/ + * ``` + * + * @public + */ +export declare class AssumeRoleCommand extends AssumeRoleCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: AssumeRoleRequest; + output: AssumeRoleResponse; + }; + sdk: { + input: AssumeRoleCommandInput; + output: AssumeRoleCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts new file mode 100644 index 00000000..58d7df8a --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts @@ -0,0 +1,288 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { AssumeRoleWithWebIdentityRequest, AssumeRoleWithWebIdentityResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, STSClientResolvedConfig } from "../STSClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link AssumeRoleWithWebIdentityCommand}. + */ +export interface AssumeRoleWithWebIdentityCommandInput extends AssumeRoleWithWebIdentityRequest { +} +/** + * @public + * + * The output of {@link AssumeRoleWithWebIdentityCommand}. + */ +export interface AssumeRoleWithWebIdentityCommandOutput extends AssumeRoleWithWebIdentityResponse, __MetadataBearer { +} +declare const AssumeRoleWithWebIdentityCommand_base: { + new (input: AssumeRoleWithWebIdentityCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: AssumeRoleWithWebIdentityCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns a set of temporary security credentials for users who have been authenticated in + * a mobile or web application with a web identity provider. Example providers include the + * OAuth 2.0 providers Login with Amazon and Facebook, or any OpenID Connect-compatible + * identity provider such as Google or Amazon Cognito federated identities.

+ * + *

For mobile applications, we recommend that you use Amazon Cognito. You can use Amazon Cognito with the + * Amazon Web Services SDK for iOS Developer Guide and the Amazon Web Services SDK for Android Developer Guide to uniquely + * identify a user. You can also supply the user with a consistent identity throughout the + * lifetime of an application.

+ *

To learn more about Amazon Cognito, see Amazon Cognito identity + * pools in Amazon Cognito Developer Guide.

+ *
+ *

Calling AssumeRoleWithWebIdentity does not require the use of Amazon Web Services + * security credentials. Therefore, you can distribute an application (for example, on mobile + * devices) that requests temporary security credentials without including long-term Amazon Web Services + * credentials in the application. You also don't need to deploy server-based proxy services + * that use long-term Amazon Web Services credentials. Instead, the identity of the caller is validated by + * using a token from the web identity provider. For a comparison of + * AssumeRoleWithWebIdentity with the other API operations that produce + * temporary credentials, see Requesting Temporary Security + * Credentials and Compare STS + * credentials in the IAM User Guide.

+ *

The temporary security credentials returned by this API consist of an access key ID, a + * secret access key, and a security token. Applications can use these temporary security + * credentials to sign calls to Amazon Web Services service API operations.

+ *

+ * Session Duration + *

+ *

By default, the temporary security credentials created by + * AssumeRoleWithWebIdentity last for one hour. However, you can use the + * optional DurationSeconds parameter to specify the duration of your session. + * You can provide a value from 900 seconds (15 minutes) up to the maximum session duration + * setting for the role. This setting can have a value from 1 hour to 12 hours. To learn how + * to view the maximum value for your role, see Update the maximum session duration for a role in the + * IAM User Guide. The maximum session duration limit applies when + * you use the AssumeRole* API operations or the assume-role* CLI + * commands. However the limit does not apply when you use those operations to create a + * console URL. For more information, see Using IAM Roles in the + * IAM User Guide.

+ *

+ * Permissions + *

+ *

The temporary security credentials created by AssumeRoleWithWebIdentity can + * be used to make API calls to any Amazon Web Services service with the following exception: you cannot + * call the STS GetFederationToken or GetSessionToken API + * operations.

+ *

(Optional) You can pass inline or managed session policies to + * this operation. You can pass a single JSON policy document to use as an inline session + * policy. You can also specify up to 10 managed policy Amazon Resource Names (ARNs) to use as + * managed session policies. The plaintext that you use for both inline and managed session + * policies can't exceed 2,048 characters. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

+ * Tags + *

+ *

(Optional) You can configure your IdP to pass attributes into your web identity token as + * session tags. Each session tag consists of a key name and an associated value. For more + * information about session tags, see Passing Session Tags in STS in the + * IAM User Guide.

+ *

You can pass up to 50 session tags. The plaintext session tag keys can’t exceed 128 + * characters and the values can’t exceed 256 characters. For these and additional limits, see + * IAM + * and STS Character Limits in the IAM User Guide.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

You can pass a session tag with the same key as a tag that is attached to the role. When + * you do, the session tag overrides the role tag with the same key.

+ *

An administrator must grant you the permissions necessary to pass session tags. The + * administrator can also create granular permissions to allow you to pass only specific + * session tags. For more information, see Tutorial: Using Tags + * for Attribute-Based Access Control in the + * IAM User Guide.

+ *

You can set the session tags as transitive. Transitive tags persist during role + * chaining. For more information, see Chaining Roles + * with Session Tags in the IAM User Guide.

+ *

+ * Identities + *

+ *

Before your application can call AssumeRoleWithWebIdentity, you must have + * an identity token from a supported identity provider and create a role that the application + * can assume. The role that your application assumes must trust the identity provider that is + * associated with the identity token. In other words, the identity provider must be specified + * in the role's trust policy.

+ * + *

Calling AssumeRoleWithWebIdentity can result in an entry in your + * CloudTrail logs. The entry includes the Subject of + * the provided web identity token. We recommend that you avoid using any personally + * identifiable information (PII) in this field. For example, you could instead use a GUID + * or a pairwise identifier, as suggested + * in the OIDC specification.

+ *
+ *

For more information about how to use OIDC federation and the + * AssumeRoleWithWebIdentity API, see the following resources:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { STSClient, AssumeRoleWithWebIdentityCommand } from "@aws-sdk/client-sts"; // ES Modules import + * // const { STSClient, AssumeRoleWithWebIdentityCommand } = require("@aws-sdk/client-sts"); // CommonJS import + * const client = new STSClient(config); + * const input = { // AssumeRoleWithWebIdentityRequest + * RoleArn: "STRING_VALUE", // required + * RoleSessionName: "STRING_VALUE", // required + * WebIdentityToken: "STRING_VALUE", // required + * ProviderId: "STRING_VALUE", + * PolicyArns: [ // policyDescriptorListType + * { // PolicyDescriptorType + * arn: "STRING_VALUE", + * }, + * ], + * Policy: "STRING_VALUE", + * DurationSeconds: Number("int"), + * }; + * const command = new AssumeRoleWithWebIdentityCommand(input); + * const response = await client.send(command); + * // { // AssumeRoleWithWebIdentityResponse + * // Credentials: { // Credentials + * // AccessKeyId: "STRING_VALUE", // required + * // SecretAccessKey: "STRING_VALUE", // required + * // SessionToken: "STRING_VALUE", // required + * // Expiration: new Date("TIMESTAMP"), // required + * // }, + * // SubjectFromWebIdentityToken: "STRING_VALUE", + * // AssumedRoleUser: { // AssumedRoleUser + * // AssumedRoleId: "STRING_VALUE", // required + * // Arn: "STRING_VALUE", // required + * // }, + * // PackedPolicySize: Number("int"), + * // Provider: "STRING_VALUE", + * // Audience: "STRING_VALUE", + * // SourceIdentity: "STRING_VALUE", + * // }; + * + * ``` + * + * @param AssumeRoleWithWebIdentityCommandInput - {@link AssumeRoleWithWebIdentityCommandInput} + * @returns {@link AssumeRoleWithWebIdentityCommandOutput} + * @see {@link AssumeRoleWithWebIdentityCommandInput} for command's `input` shape. + * @see {@link AssumeRoleWithWebIdentityCommandOutput} for command's `response` shape. + * @see {@link STSClientResolvedConfig | config} for STSClient's `config` shape. + * + * @throws {@link ExpiredTokenException} (client fault) + *

The web identity token that was passed is expired or is not valid. Get a new identity + * token from the identity provider and then retry the request.

+ * + * @throws {@link IDPCommunicationErrorException} (client fault) + *

The request could not be fulfilled because the identity provider (IDP) that was asked + * to verify the incoming identity token could not be reached. This is often a transient + * error caused by network conditions. Retry the request a limited number of times so that + * you don't exceed the request rate. If the error persists, the identity provider might be + * down or not responding.

+ * + * @throws {@link IDPRejectedClaimException} (client fault) + *

The identity provider (IdP) reported that authentication failed. This might be because + * the claim is invalid.

+ *

If this error is returned for the AssumeRoleWithWebIdentity operation, it + * can also mean that the claim has expired or has been explicitly revoked.

+ * + * @throws {@link InvalidIdentityTokenException} (client fault) + *

The web identity token that was passed could not be validated by Amazon Web Services. Get a new + * identity token from the identity provider and then retry the request.

+ * + * @throws {@link MalformedPolicyDocumentException} (client fault) + *

The request was rejected because the policy document was malformed. The error message + * describes the specific error.

+ * + * @throws {@link PackedPolicyTooLargeException} (client fault) + *

The request was rejected because the total packed size of the session policies and + * session tags combined was too large. An Amazon Web Services conversion compresses the session policy + * document, session policy ARNs, and session tags into a packed binary format that has a + * separate limit. The error message indicates by percentage how close the policies and + * tags are to the upper size limit. For more information, see Passing Session Tags in STS in + * the IAM User Guide.

+ *

You could receive this error even though you meet other defined session policy and + * session tag limits. For more information, see IAM and STS Entity Character Limits in the IAM User + * Guide.

+ * + * @throws {@link RegionDisabledException} (client fault) + *

STS is not activated in the requested region for the account that is being asked to + * generate credentials. The account administrator must use the IAM console to activate + * STS in that region. For more information, see Activating and + * Deactivating STS in an Amazon Web Services Region in the IAM User + * Guide.

+ * + * @throws {@link STSServiceException} + *

Base exception class for all service exceptions from STS service.

+ * + * + * @example To assume a role as an OpenID Connect-federated user + * ```javascript + * // + * const input = { + * DurationSeconds: 3600, + * Policy: `{"Version":"2012-10-17","Statement":[{"Sid":"Stmt1","Effect":"Allow","Action":"s3:ListAllMyBuckets","Resource":"*"}]}`, + * ProviderId: "www.amazon.com", + * RoleArn: "arn:aws:iam::123456789012:role/FederatedWebIdentityRole", + * RoleSessionName: "app1", + * WebIdentityToken: "Atza%7CIQEBLjAsAhRFiXuWpUXuRvQ9PZL3GMFcYevydwIUFAHZwXZXXXXXXXXJnrulxKDHwy87oGKPznh0D6bEQZTSCzyoCtL_8S07pLpr0zMbn6w1lfVZKNTBdDansFBmtGnIsIapjI6xKR02Yc_2bQ8LZbUXSGm6Ry6_BG7PrtLZtj_dfCTj92xNGed-CrKqjG7nPBjNIL016GGvuS5gSvPRUxWES3VYfm1wl7WTI7jn-Pcb6M-buCgHhFOzTQxod27L9CqnOLio7N3gZAGpsp6n1-AJBOCJckcyXe2c6uD0srOJeZlKUm2eTDVMf8IehDVI0r1QOnTV6KzzAI3OY87Vd_cVMQ" + * }; + * const command = new AssumeRoleWithWebIdentityCommand(input); + * const response = await client.send(command); + * /* response is + * { + * AssumedRoleUser: { + * Arn: "arn:aws:sts::123456789012:assumed-role/FederatedWebIdentityRole/app1", + * AssumedRoleId: "AROACLKWSDQRAOEXAMPLE:app1" + * }, + * Audience: "client.5498841531868486423.1548@apps.example.com", + * Credentials: { + * AccessKeyId: "AKIAIOSFODNN7EXAMPLE", + * Expiration: "2014-10-24T23:00:23Z", + * SecretAccessKey: "wJalrXUtnFEMI/K7MDENG/bPxRfiCYzEXAMPLEKEY", + * SessionToken: "AQoDYXdzEE0a8ANXXXXXXXXNO1ewxE5TijQyp+IEXAMPLE" + * }, + * PackedPolicySize: 123, + * Provider: "www.amazon.com", + * SubjectFromWebIdentityToken: "amzn1.account.AF6RHO7KZU5XRVQJGXK6HEXAMPLE" + * } + * *\/ + * ``` + * + * @public + */ +export declare class AssumeRoleWithWebIdentityCommand extends AssumeRoleWithWebIdentityCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: AssumeRoleWithWebIdentityRequest; + output: AssumeRoleWithWebIdentityResponse; + }; + sdk: { + input: AssumeRoleWithWebIdentityCommandInput; + output: AssumeRoleWithWebIdentityCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/index.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/index.d.ts new file mode 100644 index 00000000..0f200f52 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/index.d.ts @@ -0,0 +1,2 @@ +export * from "./AssumeRoleCommand"; +export * from "./AssumeRoleWithWebIdentityCommand"; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultRoleAssumers.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultRoleAssumers.d.ts new file mode 100644 index 00000000..0e25207e --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultRoleAssumers.d.ts @@ -0,0 +1,23 @@ +import { Pluggable } from "@smithy/types"; +import { DefaultCredentialProvider, RoleAssumer, RoleAssumerWithWebIdentity, STSRoleAssumerOptions } from "./defaultStsRoleAssumers"; +import { ServiceInputTypes, ServiceOutputTypes } from "./STSClient"; +/** + * The default role assumer that used by credential providers when sts:AssumeRole API is needed. + */ +export declare const getDefaultRoleAssumer: (stsOptions?: STSRoleAssumerOptions, stsPlugins?: Pluggable[]) => RoleAssumer; +/** + * The default role assumer that used by credential providers when sts:AssumeRoleWithWebIdentity API is needed. + */ +export declare const getDefaultRoleAssumerWithWebIdentity: (stsOptions?: STSRoleAssumerOptions, stsPlugins?: Pluggable[]) => RoleAssumerWithWebIdentity; +/** + * The default credential providers depend STS client to assume role with desired API: sts:assumeRole, + * sts:assumeRoleWithWebIdentity, etc. This function decorates the default credential provider with role assumers which + * encapsulates the process of calling STS commands. This can only be imported by AWS client packages to avoid circular + * dependencies. + * + * @internal + * + * @deprecated this is no longer needed. Use the defaultProvider directly, + * which will load STS if needed. + */ +export declare const decorateDefaultCredentialProvider: (provider: DefaultCredentialProvider) => DefaultCredentialProvider; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultStsRoleAssumers.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultStsRoleAssumers.d.ts new file mode 100644 index 00000000..c4ba0c47 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultStsRoleAssumers.d.ts @@ -0,0 +1,43 @@ +import type { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentity, Logger, Provider } from "@smithy/types"; +import { AssumeRoleCommandInput } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput } from "./commands/AssumeRoleWithWebIdentityCommand"; +import type { STSClient, STSClientConfig } from "./STSClient"; +/** + * @public + */ +export type STSRoleAssumerOptions = Pick & { + credentialProviderLogger?: Logger; + parentClientConfig?: CredentialProviderOptions["parentClientConfig"]; +}; +/** + * @internal + */ +export type RoleAssumer = (sourceCreds: AwsCredentialIdentity, params: AssumeRoleCommandInput) => Promise; +/** + * The default role assumer that used by credential providers when sts:AssumeRole API is needed. + * @internal + */ +export declare const getDefaultRoleAssumer: (stsOptions: STSRoleAssumerOptions, STSClient: new (options: STSClientConfig) => STSClient) => RoleAssumer; +/** + * @internal + */ +export type RoleAssumerWithWebIdentity = (params: AssumeRoleWithWebIdentityCommandInput) => Promise; +/** + * The default role assumer that used by credential providers when sts:AssumeRoleWithWebIdentity API is needed. + * @internal + */ +export declare const getDefaultRoleAssumerWithWebIdentity: (stsOptions: STSRoleAssumerOptions, STSClient: new (options: STSClientConfig) => STSClient) => RoleAssumerWithWebIdentity; +/** + * @internal + */ +export type DefaultCredentialProvider = (input: any) => Provider; +/** + * The default credential providers depend STS client to assume role with desired API: sts:assumeRole, + * sts:assumeRoleWithWebIdentity, etc. This function decorates the default credential provider with role assumers which + * encapsulates the process of calling STS commands. This can only be imported by AWS client packages to avoid circular + * dependencies. + * + * @internal + */ +export declare const decorateDefaultCredentialProvider: (provider: DefaultCredentialProvider) => DefaultCredentialProvider; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/EndpointParameters.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/EndpointParameters.d.ts new file mode 100644 index 00000000..39f6c7ef --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/EndpointParameters.d.ts @@ -0,0 +1,46 @@ +import { Endpoint, EndpointParameters as __EndpointParameters, EndpointV2, Provider } from "@smithy/types"; +/** + * @public + */ +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: string | Provider | Endpoint | Provider | EndpointV2 | Provider; + useGlobalEndpoint?: boolean | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: (options: T & ClientInputEndpointParameters) => T & ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const commonParams: { + readonly UseGlobalEndpoint: { + readonly type: "builtInParams"; + readonly name: "useGlobalEndpoint"; + }; + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; + UseGlobalEndpoint?: boolean; +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/endpointResolver.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/endpointResolver.d.ts new file mode 100644 index 00000000..70a8eaec --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/endpointResolver.d.ts @@ -0,0 +1,5 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: (endpointParams: EndpointParameters, context?: { + logger?: Logger; +}) => EndpointV2; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/ruleset.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/ruleset.d.ts new file mode 100644 index 00000000..4b238994 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/extensionConfiguration.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/extensionConfiguration.d.ts new file mode 100644 index 00000000..970e12bd --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +/** + * @internal + */ +export interface STSExtensionConfiguration extends HttpHandlerExtensionConfiguration, DefaultExtensionConfiguration, AwsRegionExtensionConfiguration, HttpAuthExtensionConfiguration { +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/index.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/index.d.ts new file mode 100644 index 00000000..98b87b4d --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/index.d.ts @@ -0,0 +1,17 @@ +/** + * Security Token Service + *

Security Token Service (STS) enables you to request temporary, limited-privilege + * credentials for users. This guide provides descriptions of the STS API. For + * more information about using this service, see Temporary Security Credentials.

+ * + * @packageDocumentation + */ +export * from "./STSClient"; +export * from "./STS"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export type { RuntimeExtension } from "./runtimeExtensions"; +export type { STSExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./models"; +export * from "./defaultRoleAssumers"; +export { STSServiceException } from "./models/STSServiceException"; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/STSServiceException.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/STSServiceException.d.ts new file mode 100644 index 00000000..fd1a9a2c --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/STSServiceException.d.ts @@ -0,0 +1,14 @@ +import { ServiceException as __ServiceException, ServiceExceptionOptions as __ServiceExceptionOptions } from "@smithy/smithy-client"; +export type { __ServiceExceptionOptions }; +export { __ServiceException }; +/** + * @public + * + * Base exception class for all service exceptions from STS service. + */ +export declare class STSServiceException extends __ServiceException { + /** + * @internal + */ + constructor(options: __ServiceExceptionOptions); +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/index.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/index.d.ts new file mode 100644 index 00000000..09c5d6e0 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/models_0.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/models_0.d.ts new file mode 100644 index 00000000..5b58b938 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/models_0.d.ts @@ -0,0 +1,712 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { STSServiceException as __BaseException } from "./STSServiceException"; +/** + *

The identifiers for the temporary security credentials that the operation + * returns.

+ * @public + */ +export interface AssumedRoleUser { + /** + *

A unique identifier that contains the role ID and the role session name of the role that + * is being assumed. The role ID is generated by Amazon Web Services when the role is created.

+ * @public + */ + AssumedRoleId: string | undefined; + /** + *

The ARN of the temporary security credentials that are returned from the AssumeRole action. For more information about ARNs and how to use them in + * policies, see IAM Identifiers in the + * IAM User Guide.

+ * @public + */ + Arn: string | undefined; +} +/** + *

A reference to the IAM managed policy that is passed as a session policy for a role + * session or a federated user session.

+ * @public + */ +export interface PolicyDescriptorType { + /** + *

The Amazon Resource Name (ARN) of the IAM managed policy to use as a session policy + * for the role. For more information about ARNs, see Amazon Resource Names (ARNs) and Amazon Web Services + * Service Namespaces in the Amazon Web Services General Reference.

+ * @public + */ + arn?: string | undefined; +} +/** + *

Contains information about the provided context. This includes the signed and encrypted + * trusted context assertion and the context provider ARN from which the trusted context + * assertion was generated.

+ * @public + */ +export interface ProvidedContext { + /** + *

The context provider ARN from which the trusted context assertion was generated.

+ * @public + */ + ProviderArn?: string | undefined; + /** + *

The signed and encrypted trusted context assertion generated by the context provider. + * The trusted context assertion is signed and encrypted by Amazon Web Services STS.

+ * @public + */ + ContextAssertion?: string | undefined; +} +/** + *

You can pass custom key-value pair attributes when you assume a role or federate a user. + * These are called session tags. You can then use the session tags to control access to + * resources. For more information, see Tagging Amazon Web Services STS Sessions in the + * IAM User Guide.

+ * @public + */ +export interface Tag { + /** + *

The key for a session tag.

+ *

You can pass up to 50 session tags. The plain text session tag keys can’t exceed 128 + * characters. For these and additional limits, see IAM + * and STS Character Limits in the IAM User Guide.

+ * @public + */ + Key: string | undefined; + /** + *

The value for a session tag.

+ *

You can pass up to 50 session tags. The plain text session tag values can’t exceed 256 + * characters. For these and additional limits, see IAM + * and STS Character Limits in the IAM User Guide.

+ * @public + */ + Value: string | undefined; +} +/** + * @public + */ +export interface AssumeRoleRequest { + /** + *

The Amazon Resource Name (ARN) of the role to assume.

+ * @public + */ + RoleArn: string | undefined; + /** + *

An identifier for the assumed role session.

+ *

Use the role session name to uniquely identify a session when the same role is assumed + * by different principals or for different reasons. In cross-account scenarios, the role + * session name is visible to, and can be logged by the account that owns the role. The role + * session name is also used in the ARN of the assumed role principal. This means that + * subsequent cross-account API requests that use the temporary security credentials will + * expose the role session name to the external account in their CloudTrail logs.

+ *

For security purposes, administrators can view this field in CloudTrail logs to help identify who performed an action in Amazon Web Services. Your + * administrator might require that you specify your user name as the session name when you + * assume the role. For more information, see + * sts:RoleSessionName + * .

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ * @public + */ + RoleSessionName: string | undefined; + /** + *

The Amazon Resource Names (ARNs) of the IAM managed policies that you want to use as + * managed session policies. The policies must exist in the same account as the role.

+ *

This parameter is optional. You can provide up to 10 managed policy ARNs. However, the + * plaintext that you use for both inline and managed session policies can't exceed 2,048 + * characters. For more information about ARNs, see Amazon Resource Names (ARNs) and Amazon Web Services + * Service Namespaces in the Amazon Web Services General Reference.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ * @public + */ + PolicyArns?: PolicyDescriptorType[] | undefined; + /** + *

An IAM policy in JSON format that you want to use as an inline session policy.

+ *

This parameter is optional. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

The plaintext that you use for both inline and managed session policies can't exceed + * 2,048 characters. The JSON policy characters can be any ASCII character from the space + * character to the end of the valid character list (\u0020 through \u00FF). It can also + * include the tab (\u0009), linefeed (\u000A), and carriage return (\u000D) + * characters.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

For more information about role session permissions, see Session + * policies.

+ * @public + */ + Policy?: string | undefined; + /** + *

The duration, in seconds, of the role session. The value specified can range from 900 + * seconds (15 minutes) up to the maximum session duration set for the role. The maximum + * session duration setting can have a value from 1 hour to 12 hours. If you specify a value + * higher than this setting or the administrator setting (whichever is lower), the operation + * fails. For example, if you specify a session duration of 12 hours, but your administrator + * set the maximum session duration to 6 hours, your operation fails.

+ *

Role chaining limits your Amazon Web Services CLI or Amazon Web Services API role session to a maximum of one hour. + * When you use the AssumeRole API operation to assume a role, you can specify + * the duration of your role session with the DurationSeconds parameter. You can + * specify a parameter value of up to 43200 seconds (12 hours), depending on the maximum + * session duration setting for your role. However, if you assume a role using role chaining + * and provide a DurationSeconds parameter value greater than one hour, the + * operation fails. To learn how to view the maximum value for your role, see Update the maximum session duration for a role.

+ *

By default, the value is set to 3600 seconds.

+ * + *

The DurationSeconds parameter is separate from the duration of a console + * session that you might request using the returned credentials. The request to the + * federation endpoint for a console sign-in token takes a SessionDuration + * parameter that specifies the maximum length of the console session. For more + * information, see Creating a URL + * that Enables Federated Users to Access the Amazon Web Services Management Console in the + * IAM User Guide.

+ *
+ * @public + */ + DurationSeconds?: number | undefined; + /** + *

A list of session tags that you want to pass. Each session tag consists of a key name + * and an associated value. For more information about session tags, see Tagging Amazon Web Services STS + * Sessions in the IAM User Guide.

+ *

This parameter is optional. You can pass up to 50 session tags. The plaintext session + * tag keys can’t exceed 128 characters, and the values can’t exceed 256 characters. For these + * and additional limits, see IAM + * and STS Character Limits in the IAM User Guide.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

You can pass a session tag with the same key as a tag that is already attached to the + * role. When you do, session tags override a role tag with the same key.

+ *

Tag key–value pairs are not case sensitive, but case is preserved. This means that you + * cannot have separate Department and department tag keys. Assume + * that the role has the Department=Marketing tag and you pass the + * department=engineering session tag. Department + * and department are not saved as separate tags, and the session tag passed in + * the request takes precedence over the role tag.

+ *

Additionally, if you used temporary credentials to perform this operation, the new + * session inherits any transitive session tags from the calling session. If you pass a + * session tag with the same key as an inherited tag, the operation fails. To view the + * inherited tags for a session, see the CloudTrail logs. For more information, see Viewing Session Tags in CloudTrail in the + * IAM User Guide.

+ * @public + */ + Tags?: Tag[] | undefined; + /** + *

A list of keys for session tags that you want to set as transitive. If you set a tag key + * as transitive, the corresponding key and value passes to subsequent sessions in a role + * chain. For more information, see Chaining Roles + * with Session Tags in the IAM User Guide.

+ *

This parameter is optional. The transitive status of a session tag does not impact its + * packed binary size.

+ *

If you choose not to specify a transitive tag key, then no tags are passed from this + * session to any subsequent sessions.

+ * @public + */ + TransitiveTagKeys?: string[] | undefined; + /** + *

A unique identifier that might be required when you assume a role in another account. If + * the administrator of the account to which the role belongs provided you with an external + * ID, then provide that value in the ExternalId parameter. This value can be any + * string, such as a passphrase or account number. A cross-account role is usually set up to + * trust everyone in an account. Therefore, the administrator of the trusting account might + * send an external ID to the administrator of the trusted account. That way, only someone + * with the ID can assume the role, rather than everyone in the account. For more information + * about the external ID, see How to Use an External ID + * When Granting Access to Your Amazon Web Services Resources to a Third Party in the + * IAM User Guide.

+ *

The regex used to validate this parameter is a string of + * characters consisting of upper- and lower-case alphanumeric characters with no spaces. + * You can also include underscores or any of the following characters: =,.@:/-

+ * @public + */ + ExternalId?: string | undefined; + /** + *

The identification number of the MFA device that is associated with the user who is + * making the AssumeRole call. Specify this value if the trust policy of the role + * being assumed includes a condition that requires MFA authentication. The value is either + * the serial number for a hardware device (such as GAHT12345678) or an Amazon + * Resource Name (ARN) for a virtual device (such as + * arn:aws:iam::123456789012:mfa/user).

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ * @public + */ + SerialNumber?: string | undefined; + /** + *

The value provided by the MFA device, if the trust policy of the role being assumed + * requires MFA. (In other words, if the policy includes a condition that tests for MFA). If + * the role being assumed requires MFA and if the TokenCode value is missing or + * expired, the AssumeRole call returns an "access denied" error.

+ *

The format for this parameter, as described by its regex pattern, is a sequence of six + * numeric digits.

+ * @public + */ + TokenCode?: string | undefined; + /** + *

The source identity specified by the principal that is calling the + * AssumeRole operation. The source identity value persists across chained role sessions.

+ *

You can require users to specify a source identity when they assume a role. You do this + * by using the + * sts:SourceIdentity + * condition key in a role trust policy. You + * can use source identity information in CloudTrail logs to determine who took actions with a + * role. You can use the aws:SourceIdentity condition key to further control + * access to Amazon Web Services resources based on the value of source identity. For more information about + * using source identity, see Monitor and control + * actions taken with assumed roles in the + * IAM User Guide.

+ *

The regex used to validate this parameter is a string of characters consisting of upper- + * and lower-case alphanumeric characters with no spaces. You can also include underscores or + * any of the following characters: +=,.@-. You cannot use a value that begins with the text + * aws:. This prefix is reserved for Amazon Web Services internal use.

+ * @public + */ + SourceIdentity?: string | undefined; + /** + *

A list of previously acquired trusted context assertions in the format of a JSON array. + * The trusted context assertion is signed and encrypted by Amazon Web Services STS.

+ *

The following is an example of a ProvidedContext value that includes a + * single trusted context assertion and the ARN of the context provider from which the trusted + * context assertion was generated.

+ *

+ * [\{"ProviderArn":"arn:aws:iam::aws:contextProvider/IdentityCenter","ContextAssertion":"trusted-context-assertion"\}] + *

+ * @public + */ + ProvidedContexts?: ProvidedContext[] | undefined; +} +/** + *

Amazon Web Services credentials for API authentication.

+ * @public + */ +export interface Credentials { + /** + *

The access key ID that identifies the temporary security credentials.

+ * @public + */ + AccessKeyId: string | undefined; + /** + *

The secret access key that can be used to sign requests.

+ * @public + */ + SecretAccessKey: string | undefined; + /** + *

The token that users must pass to the service API to use the temporary + * credentials.

+ * @public + */ + SessionToken: string | undefined; + /** + *

The date on which the current credentials expire.

+ * @public + */ + Expiration: Date | undefined; +} +/** + * @internal + */ +export declare const CredentialsFilterSensitiveLog: (obj: Credentials) => any; +/** + *

Contains the response to a successful AssumeRole request, including + * temporary Amazon Web Services credentials that can be used to make Amazon Web Services requests.

+ * @public + */ +export interface AssumeRoleResponse { + /** + *

The temporary security credentials, which include an access key ID, a secret access key, + * and a security (or session) token.

+ * + *

The size of the security token that STS API operations return is not fixed. We + * strongly recommend that you make no assumptions about the maximum size.

+ *
+ * @public + */ + Credentials?: Credentials | undefined; + /** + *

The Amazon Resource Name (ARN) and the assumed role ID, which are identifiers that you + * can use to refer to the resulting temporary security credentials. For example, you can + * reference these credentials as a principal in a resource-based policy by using the ARN or + * assumed role ID. The ARN and ID include the RoleSessionName that you specified + * when you called AssumeRole.

+ * @public + */ + AssumedRoleUser?: AssumedRoleUser | undefined; + /** + *

A percentage value that indicates the packed size of the session policies and session + * tags combined passed in the request. The request fails if the packed size is greater than 100 percent, + * which means the policies and tags exceeded the allowed space.

+ * @public + */ + PackedPolicySize?: number | undefined; + /** + *

The source identity specified by the principal that is calling the + * AssumeRole operation.

+ *

You can require users to specify a source identity when they assume a role. You do this + * by using the sts:SourceIdentity condition key in a role trust policy. You can + * use source identity information in CloudTrail logs to determine who took actions with a role. + * You can use the aws:SourceIdentity condition key to further control access to + * Amazon Web Services resources based on the value of source identity. For more information about using + * source identity, see Monitor and control + * actions taken with assumed roles in the + * IAM User Guide.

+ *

The regex used to validate this parameter is a string of characters consisting of upper- + * and lower-case alphanumeric characters with no spaces. You can also include underscores or + * any of the following characters: =,.@-

+ * @public + */ + SourceIdentity?: string | undefined; +} +/** + * @internal + */ +export declare const AssumeRoleResponseFilterSensitiveLog: (obj: AssumeRoleResponse) => any; +/** + *

The web identity token that was passed is expired or is not valid. Get a new identity + * token from the identity provider and then retry the request.

+ * @public + */ +export declare class ExpiredTokenException extends __BaseException { + readonly name: "ExpiredTokenException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The request was rejected because the policy document was malformed. The error message + * describes the specific error.

+ * @public + */ +export declare class MalformedPolicyDocumentException extends __BaseException { + readonly name: "MalformedPolicyDocumentException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The request was rejected because the total packed size of the session policies and + * session tags combined was too large. An Amazon Web Services conversion compresses the session policy + * document, session policy ARNs, and session tags into a packed binary format that has a + * separate limit. The error message indicates by percentage how close the policies and + * tags are to the upper size limit. For more information, see Passing Session Tags in STS in + * the IAM User Guide.

+ *

You could receive this error even though you meet other defined session policy and + * session tag limits. For more information, see IAM and STS Entity Character Limits in the IAM User + * Guide.

+ * @public + */ +export declare class PackedPolicyTooLargeException extends __BaseException { + readonly name: "PackedPolicyTooLargeException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

STS is not activated in the requested region for the account that is being asked to + * generate credentials. The account administrator must use the IAM console to activate + * STS in that region. For more information, see Activating and + * Deactivating STS in an Amazon Web Services Region in the IAM User + * Guide.

+ * @public + */ +export declare class RegionDisabledException extends __BaseException { + readonly name: "RegionDisabledException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The identity provider (IdP) reported that authentication failed. This might be because + * the claim is invalid.

+ *

If this error is returned for the AssumeRoleWithWebIdentity operation, it + * can also mean that the claim has expired or has been explicitly revoked.

+ * @public + */ +export declare class IDPRejectedClaimException extends __BaseException { + readonly name: "IDPRejectedClaimException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The web identity token that was passed could not be validated by Amazon Web Services. Get a new + * identity token from the identity provider and then retry the request.

+ * @public + */ +export declare class InvalidIdentityTokenException extends __BaseException { + readonly name: "InvalidIdentityTokenException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface AssumeRoleWithWebIdentityRequest { + /** + *

The Amazon Resource Name (ARN) of the role that the caller is assuming.

+ * + *

Additional considerations apply to Amazon Cognito identity pools that assume cross-account IAM roles. The trust policies of these roles must accept the + * cognito-identity.amazonaws.com service principal and must contain the + * cognito-identity.amazonaws.com:aud condition key to restrict role + * assumption to users from your intended identity pools. A policy that trusts Amazon Cognito + * identity pools without this condition creates a risk that a user from an unintended + * identity pool can assume the role. For more information, see Trust policies for + * IAM roles in Basic (Classic) authentication in the Amazon Cognito + * Developer Guide.

+ *
+ * @public + */ + RoleArn: string | undefined; + /** + *

An identifier for the assumed role session. Typically, you pass the name or identifier + * that is associated with the user who is using your application. That way, the temporary + * security credentials that your application will use are associated with that user. This + * session name is included as part of the ARN and assumed role ID in the + * AssumedRoleUser response element.

+ *

For security purposes, administrators can view this field in CloudTrail logs to help identify who performed an action in Amazon Web Services. Your + * administrator might require that you specify your user name as the session name when you + * assume the role. For more information, see + * sts:RoleSessionName + * .

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ * @public + */ + RoleSessionName: string | undefined; + /** + *

The OAuth 2.0 access token or OpenID Connect ID token that is provided by the identity + * provider. Your application must get this token by authenticating the user who is using your + * application with a web identity provider before the application makes an + * AssumeRoleWithWebIdentity call. Timestamps in the token must be formatted + * as either an integer or a long integer. Tokens must be signed using either RSA keys (RS256, + * RS384, or RS512) or ECDSA keys (ES256, ES384, or ES512).

+ * @public + */ + WebIdentityToken: string | undefined; + /** + *

The fully qualified host component of the domain name of the OAuth 2.0 identity + * provider. Do not specify this value for an OpenID Connect identity provider.

+ *

Currently www.amazon.com and graph.facebook.com are the only + * supported identity providers for OAuth 2.0 access tokens. Do not include URL schemes and + * port numbers.

+ *

Do not specify this value for OpenID Connect ID tokens.

+ * @public + */ + ProviderId?: string | undefined; + /** + *

The Amazon Resource Names (ARNs) of the IAM managed policies that you want to use as + * managed session policies. The policies must exist in the same account as the role.

+ *

This parameter is optional. You can provide up to 10 managed policy ARNs. However, the + * plaintext that you use for both inline and managed session policies can't exceed 2,048 + * characters. For more information about ARNs, see Amazon Resource Names (ARNs) and Amazon Web Services + * Service Namespaces in the Amazon Web Services General Reference.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ * @public + */ + PolicyArns?: PolicyDescriptorType[] | undefined; + /** + *

An IAM policy in JSON format that you want to use as an inline session policy.

+ *

This parameter is optional. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

The plaintext that you use for both inline and managed session policies can't exceed + * 2,048 characters. The JSON policy characters can be any ASCII character from the space + * character to the end of the valid character list (\u0020 through \u00FF). It can also + * include the tab (\u0009), linefeed (\u000A), and carriage return (\u000D) + * characters.

+ *

For more information about role session permissions, see Session + * policies.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ * @public + */ + Policy?: string | undefined; + /** + *

The duration, in seconds, of the role session. The value can range from 900 seconds (15 + * minutes) up to the maximum session duration setting for the role. This setting can have a + * value from 1 hour to 12 hours. If you specify a value higher than this setting, the + * operation fails. For example, if you specify a session duration of 12 hours, but your + * administrator set the maximum session duration to 6 hours, your operation fails. To learn + * how to view the maximum value for your role, see View the + * Maximum Session Duration Setting for a Role in the + * IAM User Guide.

+ *

By default, the value is set to 3600 seconds.

+ * + *

The DurationSeconds parameter is separate from the duration of a console + * session that you might request using the returned credentials. The request to the + * federation endpoint for a console sign-in token takes a SessionDuration + * parameter that specifies the maximum length of the console session. For more + * information, see Creating a URL + * that Enables Federated Users to Access the Amazon Web Services Management Console in the + * IAM User Guide.

+ *
+ * @public + */ + DurationSeconds?: number | undefined; +} +/** + * @internal + */ +export declare const AssumeRoleWithWebIdentityRequestFilterSensitiveLog: (obj: AssumeRoleWithWebIdentityRequest) => any; +/** + *

Contains the response to a successful AssumeRoleWithWebIdentity + * request, including temporary Amazon Web Services credentials that can be used to make Amazon Web Services requests.

+ * @public + */ +export interface AssumeRoleWithWebIdentityResponse { + /** + *

The temporary security credentials, which include an access key ID, a secret access key, + * and a security token.

+ * + *

The size of the security token that STS API operations return is not fixed. We + * strongly recommend that you make no assumptions about the maximum size.

+ *
+ * @public + */ + Credentials?: Credentials | undefined; + /** + *

The unique user identifier that is returned by the identity provider. This identifier is + * associated with the WebIdentityToken that was submitted with the + * AssumeRoleWithWebIdentity call. The identifier is typically unique to the + * user and the application that acquired the WebIdentityToken (pairwise + * identifier). For OpenID Connect ID tokens, this field contains the value returned by the + * identity provider as the token's sub (Subject) claim.

+ * @public + */ + SubjectFromWebIdentityToken?: string | undefined; + /** + *

The Amazon Resource Name (ARN) and the assumed role ID, which are identifiers that you + * can use to refer to the resulting temporary security credentials. For example, you can + * reference these credentials as a principal in a resource-based policy by using the ARN or + * assumed role ID. The ARN and ID include the RoleSessionName that you specified + * when you called AssumeRole.

+ * @public + */ + AssumedRoleUser?: AssumedRoleUser | undefined; + /** + *

A percentage value that indicates the packed size of the session policies and session + * tags combined passed in the request. The request fails if the packed size is greater than 100 percent, + * which means the policies and tags exceeded the allowed space.

+ * @public + */ + PackedPolicySize?: number | undefined; + /** + *

The issuing authority of the web identity token presented. For OpenID Connect ID + * tokens, this contains the value of the iss field. For OAuth 2.0 access tokens, + * this contains the value of the ProviderId parameter that was passed in the + * AssumeRoleWithWebIdentity request.

+ * @public + */ + Provider?: string | undefined; + /** + *

The intended audience (also known as client ID) of the web identity token. This is + * traditionally the client identifier issued to the application that requested the web + * identity token.

+ * @public + */ + Audience?: string | undefined; + /** + *

The value of the source identity that is returned in the JSON web token (JWT) from the + * identity provider.

+ *

You can require users to set a source identity value when they assume a role. You do + * this by using the sts:SourceIdentity condition key in a role trust policy. + * That way, actions that are taken with the role are associated with that user. After the + * source identity is set, the value cannot be changed. It is present in the request for all + * actions that are taken by the role and persists across chained role + * sessions. You can configure your identity provider to use an attribute associated with your + * users, like user name or email, as the source identity when calling + * AssumeRoleWithWebIdentity. You do this by adding a claim to the JSON web + * token. To learn more about OIDC tokens and claims, see Using Tokens with User Pools in the Amazon Cognito Developer Guide. + * For more information about using source identity, see Monitor and control + * actions taken with assumed roles in the + * IAM User Guide.

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ * @public + */ + SourceIdentity?: string | undefined; +} +/** + * @internal + */ +export declare const AssumeRoleWithWebIdentityResponseFilterSensitiveLog: (obj: AssumeRoleWithWebIdentityResponse) => any; +/** + *

The request could not be fulfilled because the identity provider (IDP) that was asked + * to verify the incoming identity token could not be reached. This is often a transient + * error caused by network conditions. Retry the request a limited number of times so that + * you don't exceed the request rate. If the error persists, the identity provider might be + * down or not responding.

+ * @public + */ +export declare class IDPCommunicationErrorException extends __BaseException { + readonly name: "IDPCommunicationErrorException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/protocols/Aws_query.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/protocols/Aws_query.d.ts new file mode 100644 index 00000000..db11c3a7 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/protocols/Aws_query.d.ts @@ -0,0 +1,20 @@ +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { AssumeRoleCommandInput, AssumeRoleCommandOutput } from "../commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput, AssumeRoleWithWebIdentityCommandOutput } from "../commands/AssumeRoleWithWebIdentityCommand"; +/** + * serializeAws_queryAssumeRoleCommand + */ +export declare const se_AssumeRoleCommand: (input: AssumeRoleCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_queryAssumeRoleWithWebIdentityCommand + */ +export declare const se_AssumeRoleWithWebIdentityCommand: (input: AssumeRoleWithWebIdentityCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * deserializeAws_queryAssumeRoleCommand + */ +export declare const de_AssumeRoleCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_queryAssumeRoleWithWebIdentityCommand + */ +export declare const de_AssumeRoleWithWebIdentityCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.browser.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.browser.d.ts new file mode 100644 index 00000000..5513a9b9 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.browser.d.ts @@ -0,0 +1,59 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { STSClientConfig } from "./STSClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | ((_: unknown) => () => Promise); + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: import("@smithy/protocol-http").HttpHandler | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (params: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + } | undefined) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + useGlobalEndpoint?: boolean | import("@smithy/types").Provider | undefined; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.d.ts new file mode 100644 index 00000000..c9924b4d --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.d.ts @@ -0,0 +1,57 @@ +import { NoAuthSigner } from "@smithy/core"; +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { IdentityProviderConfig } from "@smithy/types"; +import { STSClientConfig } from "./STSClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-node").PreviouslyResolved | undefined) => Promise; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | { + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: NoAuthSigner; + }[]; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: RequestHandler | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + credentialDefaultProvider?: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (params: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + } | undefined) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + useGlobalEndpoint?: boolean | import("@smithy/types").Provider | undefined; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.native.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.native.d.ts new file mode 100644 index 00000000..5bf519f0 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.native.d.ts @@ -0,0 +1,58 @@ +import { STSClientConfig } from "./STSClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: import("@smithy/types").NodeHttpHandlerOptions | import("@smithy/types").FetchHttpHandlerOptions | Record | import("@smithy/protocol-http").HttpHandler | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + credentialDefaultProvider: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | ((_: unknown) => () => Promise); + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: import("@smithy/smithy-client").DefaultsMode | import("@smithy/types").Provider; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (params: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + } | undefined) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + useGlobalEndpoint?: boolean | import("@smithy/types").Provider | undefined; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.shared.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.shared.d.ts new file mode 100644 index 00000000..5b99276d --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.shared.d.ts @@ -0,0 +1,32 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { STSClientConfig } from "./STSClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: STSClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: (params: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + } | undefined) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: NoAuthSigner; + })[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeExtensions.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeExtensions.d.ts new file mode 100644 index 00000000..ebd8567f --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeExtensions.d.ts @@ -0,0 +1,17 @@ +import { STSExtensionConfiguration } from "./extensionConfiguration"; +/** + * @public + */ +export interface RuntimeExtension { + configure(extensionConfiguration: STSExtensionConfiguration): void; +} +/** + * @public + */ +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +/** + * @internal + */ +export declare const resolveRuntimeExtensions: (runtimeConfig: any, extensions: RuntimeExtension[]) => any; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDC.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDC.d.ts new file mode 100644 index 00000000..10ee8491 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDC.d.ts @@ -0,0 +1,22 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + CreateTokenCommandInput, + CreateTokenCommandOutput, +} from "./commands/CreateTokenCommand"; +import { SSOOIDCClient } from "./SSOOIDCClient"; +export interface SSOOIDC { + createToken( + args: CreateTokenCommandInput, + options?: __HttpHandlerOptions + ): Promise; + createToken( + args: CreateTokenCommandInput, + cb: (err: any, data?: CreateTokenCommandOutput) => void + ): void; + createToken( + args: CreateTokenCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateTokenCommandOutput) => void + ): void; +} +export declare class SSOOIDC extends SSOOIDCClient implements SSOOIDC {} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDCClient.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDCClient.d.ts new file mode 100644 index 00000000..d44b7af7 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDCClient.d.ts @@ -0,0 +1,121 @@ +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, +} from "@aws-sdk/middleware-host-header"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, +} from "@aws-sdk/middleware-user-agent"; +import { + RegionInputConfig, + RegionResolvedConfig, +} from "@smithy/config-resolver"; +import { + EndpointInputConfig, + EndpointResolvedConfig, +} from "@smithy/middleware-endpoint"; +import { + RetryInputConfig, + RetryResolvedConfig, +} from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { + Client as __Client, + DefaultsMode as __DefaultsMode, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@smithy/smithy-client"; +import { + BodyLengthCalculator as __BodyLengthCalculator, + CheckOptionalClientConfig as __CheckOptionalClientConfig, + ChecksumConstructor as __ChecksumConstructor, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, + UserAgent as __UserAgent, +} from "@smithy/types"; +import { + HttpAuthSchemeInputConfig, + HttpAuthSchemeResolvedConfig, +} from "./auth/httpAuthSchemeProvider"; +import { + CreateTokenCommandInput, + CreateTokenCommandOutput, +} from "./commands/CreateTokenCommand"; +import { + ClientInputEndpointParameters, + ClientResolvedEndpointParameters, + EndpointParameters, +} from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +export type ServiceInputTypes = CreateTokenCommandInput; +export type ServiceOutputTypes = CreateTokenCommandOutput; +export interface ClientDefaults + extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + requestHandler?: __HttpHandlerUserInput; + sha256?: __ChecksumConstructor | __HashConstructor; + urlParser?: __UrlParser; + bodyLengthChecker?: __BodyLengthCalculator; + streamCollector?: __StreamCollector; + base64Decoder?: __Decoder; + base64Encoder?: __Encoder; + utf8Decoder?: __Decoder; + utf8Encoder?: __Encoder; + runtime?: string; + disableHostPrefix?: boolean; + serviceId?: string; + useDualstackEndpoint?: boolean | __Provider; + useFipsEndpoint?: boolean | __Provider; + region?: string | __Provider; + profile?: string; + defaultUserAgentProvider?: Provider<__UserAgent>; + maxAttempts?: number | __Provider; + retryMode?: string | __Provider; + logger?: __Logger; + extensions?: RuntimeExtension[]; + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +export type SSOOIDCClientConfigType = Partial< + __SmithyConfiguration<__HttpHandlerOptions> +> & + ClientDefaults & + UserAgentInputConfig & + RetryInputConfig & + RegionInputConfig & + HostHeaderInputConfig & + EndpointInputConfig & + HttpAuthSchemeInputConfig & + ClientInputEndpointParameters; +export interface SSOOIDCClientConfig extends SSOOIDCClientConfigType {} +export type SSOOIDCClientResolvedConfigType = + __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RuntimeExtensionsConfig & + UserAgentResolvedConfig & + RetryResolvedConfig & + RegionResolvedConfig & + HostHeaderResolvedConfig & + EndpointResolvedConfig & + HttpAuthSchemeResolvedConfig & + ClientResolvedEndpointParameters; +export interface SSOOIDCClientResolvedConfig + extends SSOOIDCClientResolvedConfigType {} +export declare class SSOOIDCClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + SSOOIDCClientResolvedConfig +> { + readonly config: SSOOIDCClientResolvedConfig; + constructor( + ...[configuration]: __CheckOptionalClientConfig + ); + destroy(): void; +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 00000000..c39ba915 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,32 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + HttpAuthScheme, +} from "@smithy/types"; +import { SSOOIDCHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider( + httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider + ): void; + httpAuthSchemeProvider(): SSOOIDCHttpAuthSchemeProvider; + setCredentials( + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider + ): void; + credentials(): + | AwsCredentialIdentity + | AwsCredentialIdentityProvider + | undefined; +} +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +export declare const getHttpAuthExtensionConfiguration: ( + runtimeConfig: HttpAuthRuntimeConfig +) => HttpAuthExtensionConfiguration; +export declare const resolveHttpAuthRuntimeConfig: ( + config: HttpAuthExtensionConfiguration +) => HttpAuthRuntimeConfig; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 00000000..936b1011 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,47 @@ +import { + AwsSdkSigV4AuthInputConfig, + AwsSdkSigV4AuthResolvedConfig, + AwsSdkSigV4PreviouslyResolved, +} from "@aws-sdk/core"; +import { + HandlerExecutionContext, + HttpAuthScheme, + HttpAuthSchemeParameters, + HttpAuthSchemeParametersProvider, + HttpAuthSchemeProvider, + Provider, +} from "@smithy/types"; +import { SSOOIDCClientResolvedConfig } from "../SSOOIDCClient"; +export interface SSOOIDCHttpAuthSchemeParameters + extends HttpAuthSchemeParameters { + region?: string; +} +export interface SSOOIDCHttpAuthSchemeParametersProvider + extends HttpAuthSchemeParametersProvider< + SSOOIDCClientResolvedConfig, + HandlerExecutionContext, + SSOOIDCHttpAuthSchemeParameters, + object + > {} +export declare const defaultSSOOIDCHttpAuthSchemeParametersProvider: ( + config: SSOOIDCClientResolvedConfig, + context: HandlerExecutionContext, + input: object +) => Promise; +export interface SSOOIDCHttpAuthSchemeProvider + extends HttpAuthSchemeProvider {} +export declare const defaultSSOOIDCHttpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + authSchemePreference?: string[] | Provider; + httpAuthSchemes?: HttpAuthScheme[]; + httpAuthSchemeProvider?: SSOOIDCHttpAuthSchemeProvider; +} +export interface HttpAuthSchemeResolvedConfig + extends AwsSdkSigV4AuthResolvedConfig { + readonly authSchemePreference: Provider; + readonly httpAuthSchemes: HttpAuthScheme[]; + readonly httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; +} +export declare const resolveHttpAuthSchemeConfig: ( + config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & HttpAuthSchemeResolvedConfig; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/CreateTokenCommand.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/CreateTokenCommand.d.ts new file mode 100644 index 00000000..cb1de8b5 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/CreateTokenCommand.d.ts @@ -0,0 +1,43 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { CreateTokenRequest, CreateTokenResponse } from "../models/models_0"; +import { SSOOIDCClientResolvedConfig } from "../SSOOIDCClient"; +export { __MetadataBearer }; +export { $Command }; +export interface CreateTokenCommandInput extends CreateTokenRequest {} +export interface CreateTokenCommandOutput + extends CreateTokenResponse, + __MetadataBearer {} +declare const CreateTokenCommand_base: { + new ( + input: CreateTokenCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateTokenCommandInput, + CreateTokenCommandOutput, + SSOOIDCClientResolvedConfig, + CreateTokenCommandInput, + CreateTokenCommandOutput + >; + new ( + __0_0: CreateTokenCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateTokenCommandInput, + CreateTokenCommandOutput, + SSOOIDCClientResolvedConfig, + CreateTokenCommandInput, + CreateTokenCommandOutput + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class CreateTokenCommand extends CreateTokenCommand_base { + protected static __types: { + api: { + input: CreateTokenRequest; + output: CreateTokenResponse; + }; + sdk: { + input: CreateTokenCommandInput; + output: CreateTokenCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/index.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/index.d.ts new file mode 100644 index 00000000..09214cae --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/index.d.ts @@ -0,0 +1 @@ +export * from "./CreateTokenCommand"; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/EndpointParameters.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/EndpointParameters.d.ts new file mode 100644 index 00000000..7f245406 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/EndpointParameters.d.ts @@ -0,0 +1,51 @@ +import { + Endpoint, + EndpointParameters as __EndpointParameters, + EndpointV2, + Provider, +} from "@smithy/types"; +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: + | string + | Provider + | Endpoint + | Provider + | EndpointV2 + | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: ( + options: T & ClientInputEndpointParameters +) => T & + ClientInputEndpointParameters & { + defaultSigningName: string; + }; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/endpointResolver.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/endpointResolver.d.ts new file mode 100644 index 00000000..59099254 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/endpointResolver.d.ts @@ -0,0 +1,8 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: ( + endpointParams: EndpointParameters, + context?: { + logger?: Logger; + } +) => EndpointV2; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/ruleset.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/ruleset.d.ts new file mode 100644 index 00000000..4b238994 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/extensionConfiguration.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/extensionConfiguration.d.ts new file mode 100644 index 00000000..c208e338 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +export interface SSOOIDCExtensionConfiguration + extends HttpHandlerExtensionConfiguration, + DefaultExtensionConfiguration, + AwsRegionExtensionConfiguration, + HttpAuthExtensionConfiguration {} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/index.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/index.d.ts new file mode 100644 index 00000000..1e9247f6 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/index.d.ts @@ -0,0 +1,8 @@ +export * from "./SSOOIDCClient"; +export * from "./SSOOIDC"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export { RuntimeExtension } from "./runtimeExtensions"; +export { SSOOIDCExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./models"; +export { SSOOIDCServiceException } from "./models/SSOOIDCServiceException"; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts new file mode 100644 index 00000000..dae636f0 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts @@ -0,0 +1,9 @@ +import { + ServiceException as __ServiceException, + ServiceExceptionOptions as __ServiceExceptionOptions, +} from "@smithy/smithy-client"; +export { __ServiceExceptionOptions }; +export { __ServiceException }; +export declare class SSOOIDCServiceException extends __ServiceException { + constructor(options: __ServiceExceptionOptions); +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/index.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/index.d.ts new file mode 100644 index 00000000..09c5d6e0 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/models_0.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/models_0.d.ts new file mode 100644 index 00000000..68de7147 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/models_0.d.ts @@ -0,0 +1,123 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { SSOOIDCServiceException as __BaseException } from "./SSOOIDCServiceException"; +export declare class AccessDeniedException extends __BaseException { + readonly name: "AccessDeniedException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class AuthorizationPendingException extends __BaseException { + readonly name: "AuthorizationPendingException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export interface CreateTokenRequest { + clientId: string | undefined; + clientSecret: string | undefined; + grantType: string | undefined; + deviceCode?: string | undefined; + code?: string | undefined; + refreshToken?: string | undefined; + scope?: string[] | undefined; + redirectUri?: string | undefined; + codeVerifier?: string | undefined; +} +export declare const CreateTokenRequestFilterSensitiveLog: ( + obj: CreateTokenRequest +) => any; +export interface CreateTokenResponse { + accessToken?: string | undefined; + tokenType?: string | undefined; + expiresIn?: number | undefined; + refreshToken?: string | undefined; + idToken?: string | undefined; +} +export declare const CreateTokenResponseFilterSensitiveLog: ( + obj: CreateTokenResponse +) => any; +export declare class ExpiredTokenException extends __BaseException { + readonly name: "ExpiredTokenException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InternalServerException extends __BaseException { + readonly name: "InternalServerException"; + readonly $fault: "server"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidClientException extends __BaseException { + readonly name: "InvalidClientException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidGrantException extends __BaseException { + readonly name: "InvalidGrantException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidRequestException extends __BaseException { + readonly name: "InvalidRequestException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidScopeException extends __BaseException { + readonly name: "InvalidScopeException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class SlowDownException extends __BaseException { + readonly name: "SlowDownException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor(opts: __ExceptionOptionType); +} +export declare class UnauthorizedClientException extends __BaseException { + readonly name: "UnauthorizedClientException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class UnsupportedGrantTypeException extends __BaseException { + readonly name: "UnsupportedGrantTypeException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/protocols/Aws_restJson1.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/protocols/Aws_restJson1.d.ts new file mode 100644 index 00000000..d0657b8b --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/protocols/Aws_restJson1.d.ts @@ -0,0 +1,17 @@ +import { + HttpRequest as __HttpRequest, + HttpResponse as __HttpResponse, +} from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { + CreateTokenCommandInput, + CreateTokenCommandOutput, +} from "../commands/CreateTokenCommand"; +export declare const se_CreateTokenCommand: ( + input: CreateTokenCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const de_CreateTokenCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.browser.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.browser.d.ts new file mode 100644 index 00000000..c469a24b --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.browser.d.ts @@ -0,0 +1,120 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | import("@smithy/protocol-http").HttpHandler + | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.d.ts new file mode 100644 index 00000000..a24c900a --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.d.ts @@ -0,0 +1,114 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-node").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | RequestHandler + | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.native.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.native.d.ts new file mode 100644 index 00000000..c3610fd0 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.native.d.ts @@ -0,0 +1,124 @@ +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: + | import("@smithy/types").NodeHttpHandlerOptions + | import("@smithy/types").FetchHttpHandlerOptions + | Record + | import("@smithy/protocol-http").HttpHandler + | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: + | import("@smithy/smithy-client").DefaultsMode + | import("@smithy/types").Provider< + import("@smithy/smithy-client").DefaultsMode + >; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.shared.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.shared.d.ts new file mode 100644 index 00000000..130a1e36 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.shared.d.ts @@ -0,0 +1,49 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: NoAuthSigner; + } + )[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeExtensions.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeExtensions.d.ts new file mode 100644 index 00000000..d226882e --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeExtensions.d.ts @@ -0,0 +1,11 @@ +import { SSOOIDCExtensionConfiguration } from "./extensionConfiguration"; +export interface RuntimeExtension { + configure(extensionConfiguration: SSOOIDCExtensionConfiguration): void; +} +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +export declare const resolveRuntimeExtensions: ( + runtimeConfig: any, + extensions: RuntimeExtension[] +) => any; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STS.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STS.d.ts new file mode 100644 index 00000000..cca9cbba --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STS.d.ts @@ -0,0 +1,39 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + AssumeRoleCommandInput, + AssumeRoleCommandOutput, +} from "./commands/AssumeRoleCommand"; +import { + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, +} from "./commands/AssumeRoleWithWebIdentityCommand"; +import { STSClient } from "./STSClient"; +export interface STS { + assumeRole( + args: AssumeRoleCommandInput, + options?: __HttpHandlerOptions + ): Promise; + assumeRole( + args: AssumeRoleCommandInput, + cb: (err: any, data?: AssumeRoleCommandOutput) => void + ): void; + assumeRole( + args: AssumeRoleCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: AssumeRoleCommandOutput) => void + ): void; + assumeRoleWithWebIdentity( + args: AssumeRoleWithWebIdentityCommandInput, + options?: __HttpHandlerOptions + ): Promise; + assumeRoleWithWebIdentity( + args: AssumeRoleWithWebIdentityCommandInput, + cb: (err: any, data?: AssumeRoleWithWebIdentityCommandOutput) => void + ): void; + assumeRoleWithWebIdentity( + args: AssumeRoleWithWebIdentityCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: AssumeRoleWithWebIdentityCommandOutput) => void + ): void; +} +export declare class STS extends STSClient implements STS {} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STSClient.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STSClient.d.ts new file mode 100644 index 00000000..8bffddfc --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STSClient.d.ts @@ -0,0 +1,128 @@ +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, +} from "@aws-sdk/middleware-host-header"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, +} from "@aws-sdk/middleware-user-agent"; +import { + RegionInputConfig, + RegionResolvedConfig, +} from "@smithy/config-resolver"; +import { + EndpointInputConfig, + EndpointResolvedConfig, +} from "@smithy/middleware-endpoint"; +import { + RetryInputConfig, + RetryResolvedConfig, +} from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { + Client as __Client, + DefaultsMode as __DefaultsMode, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@smithy/smithy-client"; +import { + AwsCredentialIdentityProvider, + BodyLengthCalculator as __BodyLengthCalculator, + CheckOptionalClientConfig as __CheckOptionalClientConfig, + ChecksumConstructor as __ChecksumConstructor, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, + UserAgent as __UserAgent, +} from "@smithy/types"; +import { + HttpAuthSchemeInputConfig, + HttpAuthSchemeResolvedConfig, +} from "./auth/httpAuthSchemeProvider"; +import { + AssumeRoleCommandInput, + AssumeRoleCommandOutput, +} from "./commands/AssumeRoleCommand"; +import { + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, +} from "./commands/AssumeRoleWithWebIdentityCommand"; +import { + ClientInputEndpointParameters, + ClientResolvedEndpointParameters, + EndpointParameters, +} from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +export type ServiceInputTypes = + | AssumeRoleCommandInput + | AssumeRoleWithWebIdentityCommandInput; +export type ServiceOutputTypes = + | AssumeRoleCommandOutput + | AssumeRoleWithWebIdentityCommandOutput; +export interface ClientDefaults + extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + requestHandler?: __HttpHandlerUserInput; + sha256?: __ChecksumConstructor | __HashConstructor; + urlParser?: __UrlParser; + bodyLengthChecker?: __BodyLengthCalculator; + streamCollector?: __StreamCollector; + base64Decoder?: __Decoder; + base64Encoder?: __Encoder; + utf8Decoder?: __Decoder; + utf8Encoder?: __Encoder; + runtime?: string; + disableHostPrefix?: boolean; + serviceId?: string; + useDualstackEndpoint?: boolean | __Provider; + useFipsEndpoint?: boolean | __Provider; + region?: string | __Provider; + profile?: string; + defaultUserAgentProvider?: Provider<__UserAgent>; + credentialDefaultProvider?: (input: any) => AwsCredentialIdentityProvider; + maxAttempts?: number | __Provider; + retryMode?: string | __Provider; + logger?: __Logger; + extensions?: RuntimeExtension[]; + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +export type STSClientConfigType = Partial< + __SmithyConfiguration<__HttpHandlerOptions> +> & + ClientDefaults & + UserAgentInputConfig & + RetryInputConfig & + RegionInputConfig & + HostHeaderInputConfig & + EndpointInputConfig & + HttpAuthSchemeInputConfig & + ClientInputEndpointParameters; +export interface STSClientConfig extends STSClientConfigType {} +export type STSClientResolvedConfigType = + __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RuntimeExtensionsConfig & + UserAgentResolvedConfig & + RetryResolvedConfig & + RegionResolvedConfig & + HostHeaderResolvedConfig & + EndpointResolvedConfig & + HttpAuthSchemeResolvedConfig & + ClientResolvedEndpointParameters; +export interface STSClientResolvedConfig extends STSClientResolvedConfigType {} +export declare class STSClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + STSClientResolvedConfig +> { + readonly config: STSClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + destroy(): void; +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 00000000..ef83018d --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,32 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + HttpAuthScheme, +} from "@smithy/types"; +import { STSHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider( + httpAuthSchemeProvider: STSHttpAuthSchemeProvider + ): void; + httpAuthSchemeProvider(): STSHttpAuthSchemeProvider; + setCredentials( + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider + ): void; + credentials(): + | AwsCredentialIdentity + | AwsCredentialIdentityProvider + | undefined; +} +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: STSHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +export declare const getHttpAuthExtensionConfiguration: ( + runtimeConfig: HttpAuthRuntimeConfig +) => HttpAuthExtensionConfiguration; +export declare const resolveHttpAuthRuntimeConfig: ( + config: HttpAuthExtensionConfiguration +) => HttpAuthRuntimeConfig; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthSchemeProvider.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 00000000..0e17e2f4 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,57 @@ +import { + AwsSdkSigV4AuthInputConfig, + AwsSdkSigV4AuthResolvedConfig, + AwsSdkSigV4PreviouslyResolved, +} from "@aws-sdk/core"; +import { + Client, + HandlerExecutionContext, + HttpAuthScheme, + HttpAuthSchemeParameters, + HttpAuthSchemeParametersProvider, + HttpAuthSchemeProvider, + Provider, +} from "@smithy/types"; +import { STSClientResolvedConfig } from "../STSClient"; +export interface STSHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +export interface STSHttpAuthSchemeParametersProvider + extends HttpAuthSchemeParametersProvider< + STSClientResolvedConfig, + HandlerExecutionContext, + STSHttpAuthSchemeParameters, + object + > {} +export declare const defaultSTSHttpAuthSchemeParametersProvider: ( + config: STSClientResolvedConfig, + context: HandlerExecutionContext, + input: object +) => Promise; +export interface STSHttpAuthSchemeProvider + extends HttpAuthSchemeProvider {} +export declare const defaultSTSHttpAuthSchemeProvider: STSHttpAuthSchemeProvider; +export interface StsAuthInputConfig {} +export interface StsAuthResolvedConfig { + stsClientCtor: new (clientConfig: any) => Client; +} +export declare const resolveStsAuthConfig: ( + input: T & StsAuthInputConfig +) => T & StsAuthResolvedConfig; +export interface HttpAuthSchemeInputConfig + extends StsAuthInputConfig, + AwsSdkSigV4AuthInputConfig { + authSchemePreference?: string[] | Provider; + httpAuthSchemes?: HttpAuthScheme[]; + httpAuthSchemeProvider?: STSHttpAuthSchemeProvider; +} +export interface HttpAuthSchemeResolvedConfig + extends StsAuthResolvedConfig, + AwsSdkSigV4AuthResolvedConfig { + readonly authSchemePreference: Provider; + readonly httpAuthSchemes: HttpAuthScheme[]; + readonly httpAuthSchemeProvider: STSHttpAuthSchemeProvider; +} +export declare const resolveHttpAuthSchemeConfig: ( + config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & HttpAuthSchemeResolvedConfig; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleCommand.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleCommand.d.ts new file mode 100644 index 00000000..9333fbbc --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { AssumeRoleRequest, AssumeRoleResponse } from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + STSClientResolvedConfig, +} from "../STSClient"; +export { __MetadataBearer }; +export { $Command }; +export interface AssumeRoleCommandInput extends AssumeRoleRequest {} +export interface AssumeRoleCommandOutput + extends AssumeRoleResponse, + __MetadataBearer {} +declare const AssumeRoleCommand_base: { + new ( + input: AssumeRoleCommandInput + ): import("@smithy/smithy-client").CommandImpl< + AssumeRoleCommandInput, + AssumeRoleCommandOutput, + STSClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: AssumeRoleCommandInput + ): import("@smithy/smithy-client").CommandImpl< + AssumeRoleCommandInput, + AssumeRoleCommandOutput, + STSClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class AssumeRoleCommand extends AssumeRoleCommand_base { + protected static __types: { + api: { + input: AssumeRoleRequest; + output: AssumeRoleResponse; + }; + sdk: { + input: AssumeRoleCommandInput; + output: AssumeRoleCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts new file mode 100644 index 00000000..222e034e --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + AssumeRoleWithWebIdentityRequest, + AssumeRoleWithWebIdentityResponse, +} from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + STSClientResolvedConfig, +} from "../STSClient"; +export { __MetadataBearer }; +export { $Command }; +export interface AssumeRoleWithWebIdentityCommandInput + extends AssumeRoleWithWebIdentityRequest {} +export interface AssumeRoleWithWebIdentityCommandOutput + extends AssumeRoleWithWebIdentityResponse, + __MetadataBearer {} +declare const AssumeRoleWithWebIdentityCommand_base: { + new ( + input: AssumeRoleWithWebIdentityCommandInput + ): import("@smithy/smithy-client").CommandImpl< + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, + STSClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: AssumeRoleWithWebIdentityCommandInput + ): import("@smithy/smithy-client").CommandImpl< + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, + STSClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class AssumeRoleWithWebIdentityCommand extends AssumeRoleWithWebIdentityCommand_base { + protected static __types: { + api: { + input: AssumeRoleWithWebIdentityRequest; + output: AssumeRoleWithWebIdentityResponse; + }; + sdk: { + input: AssumeRoleWithWebIdentityCommandInput; + output: AssumeRoleWithWebIdentityCommandOutput; + }; + }; +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/index.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/index.d.ts new file mode 100644 index 00000000..0f200f52 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/index.d.ts @@ -0,0 +1,2 @@ +export * from "./AssumeRoleCommand"; +export * from "./AssumeRoleWithWebIdentityCommand"; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultRoleAssumers.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultRoleAssumers.d.ts new file mode 100644 index 00000000..b6f22ccb --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultRoleAssumers.d.ts @@ -0,0 +1,19 @@ +import { Pluggable } from "@smithy/types"; +import { + DefaultCredentialProvider, + RoleAssumer, + RoleAssumerWithWebIdentity, + STSRoleAssumerOptions, +} from "./defaultStsRoleAssumers"; +import { ServiceInputTypes, ServiceOutputTypes } from "./STSClient"; +export declare const getDefaultRoleAssumer: ( + stsOptions?: STSRoleAssumerOptions, + stsPlugins?: Pluggable[] +) => RoleAssumer; +export declare const getDefaultRoleAssumerWithWebIdentity: ( + stsOptions?: STSRoleAssumerOptions, + stsPlugins?: Pluggable[] +) => RoleAssumerWithWebIdentity; +export declare const decorateDefaultCredentialProvider: ( + provider: DefaultCredentialProvider +) => DefaultCredentialProvider; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultStsRoleAssumers.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultStsRoleAssumers.d.ts new file mode 100644 index 00000000..38313794 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultStsRoleAssumers.d.ts @@ -0,0 +1,33 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentity, Logger, Provider } from "@smithy/types"; +import { AssumeRoleCommandInput } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput } from "./commands/AssumeRoleWithWebIdentityCommand"; +import { STSClient, STSClientConfig } from "./STSClient"; +export type STSRoleAssumerOptions = Pick< + STSClientConfig, + "logger" | "region" | "requestHandler" +> & { + credentialProviderLogger?: Logger; + parentClientConfig?: CredentialProviderOptions["parentClientConfig"]; +}; +export type RoleAssumer = ( + sourceCreds: AwsCredentialIdentity, + params: AssumeRoleCommandInput +) => Promise; +export declare const getDefaultRoleAssumer: ( + stsOptions: STSRoleAssumerOptions, + STSClient: new (options: STSClientConfig) => STSClient +) => RoleAssumer; +export type RoleAssumerWithWebIdentity = ( + params: AssumeRoleWithWebIdentityCommandInput +) => Promise; +export declare const getDefaultRoleAssumerWithWebIdentity: ( + stsOptions: STSRoleAssumerOptions, + STSClient: new (options: STSClientConfig) => STSClient +) => RoleAssumerWithWebIdentity; +export type DefaultCredentialProvider = ( + input: any +) => Provider; +export declare const decorateDefaultCredentialProvider: ( + provider: DefaultCredentialProvider +) => DefaultCredentialProvider; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/EndpointParameters.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/EndpointParameters.d.ts new file mode 100644 index 00000000..33567fda --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/EndpointParameters.d.ts @@ -0,0 +1,57 @@ +import { + Endpoint, + EndpointParameters as __EndpointParameters, + EndpointV2, + Provider, +} from "@smithy/types"; +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: + | string + | Provider + | Endpoint + | Provider + | EndpointV2 + | Provider; + useGlobalEndpoint?: boolean | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: ( + options: T & ClientInputEndpointParameters +) => T & + ClientInputEndpointParameters & { + defaultSigningName: string; + }; +export declare const commonParams: { + readonly UseGlobalEndpoint: { + readonly type: "builtInParams"; + readonly name: "useGlobalEndpoint"; + }; + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; + UseGlobalEndpoint?: boolean; +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/endpointResolver.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/endpointResolver.d.ts new file mode 100644 index 00000000..59099254 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/endpointResolver.d.ts @@ -0,0 +1,8 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: ( + endpointParams: EndpointParameters, + context?: { + logger?: Logger; + } +) => EndpointV2; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/ruleset.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/ruleset.d.ts new file mode 100644 index 00000000..4b238994 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/extensionConfiguration.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/extensionConfiguration.d.ts new file mode 100644 index 00000000..14b124b3 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +export interface STSExtensionConfiguration + extends HttpHandlerExtensionConfiguration, + DefaultExtensionConfiguration, + AwsRegionExtensionConfiguration, + HttpAuthExtensionConfiguration {} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/index.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/index.d.ts new file mode 100644 index 00000000..157a3069 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/index.d.ts @@ -0,0 +1,9 @@ +export * from "./STSClient"; +export * from "./STS"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export { RuntimeExtension } from "./runtimeExtensions"; +export { STSExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./models"; +export * from "./defaultRoleAssumers"; +export { STSServiceException } from "./models/STSServiceException"; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/STSServiceException.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/STSServiceException.d.ts new file mode 100644 index 00000000..95fc485e --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/STSServiceException.d.ts @@ -0,0 +1,9 @@ +import { + ServiceException as __ServiceException, + ServiceExceptionOptions as __ServiceExceptionOptions, +} from "@smithy/smithy-client"; +export { __ServiceExceptionOptions }; +export { __ServiceException }; +export declare class STSServiceException extends __ServiceException { + constructor(options: __ServiceExceptionOptions); +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/index.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/index.d.ts new file mode 100644 index 00000000..09c5d6e0 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/models_0.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/models_0.d.ts new file mode 100644 index 00000000..1cba3715 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/models_0.d.ts @@ -0,0 +1,123 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { STSServiceException as __BaseException } from "./STSServiceException"; +export interface AssumedRoleUser { + AssumedRoleId: string | undefined; + Arn: string | undefined; +} +export interface PolicyDescriptorType { + arn?: string | undefined; +} +export interface ProvidedContext { + ProviderArn?: string | undefined; + ContextAssertion?: string | undefined; +} +export interface Tag { + Key: string | undefined; + Value: string | undefined; +} +export interface AssumeRoleRequest { + RoleArn: string | undefined; + RoleSessionName: string | undefined; + PolicyArns?: PolicyDescriptorType[] | undefined; + Policy?: string | undefined; + DurationSeconds?: number | undefined; + Tags?: Tag[] | undefined; + TransitiveTagKeys?: string[] | undefined; + ExternalId?: string | undefined; + SerialNumber?: string | undefined; + TokenCode?: string | undefined; + SourceIdentity?: string | undefined; + ProvidedContexts?: ProvidedContext[] | undefined; +} +export interface Credentials { + AccessKeyId: string | undefined; + SecretAccessKey: string | undefined; + SessionToken: string | undefined; + Expiration: Date | undefined; +} +export declare const CredentialsFilterSensitiveLog: (obj: Credentials) => any; +export interface AssumeRoleResponse { + Credentials?: Credentials | undefined; + AssumedRoleUser?: AssumedRoleUser | undefined; + PackedPolicySize?: number | undefined; + SourceIdentity?: string | undefined; +} +export declare const AssumeRoleResponseFilterSensitiveLog: ( + obj: AssumeRoleResponse +) => any; +export declare class ExpiredTokenException extends __BaseException { + readonly name: "ExpiredTokenException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class MalformedPolicyDocumentException extends __BaseException { + readonly name: "MalformedPolicyDocumentException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + MalformedPolicyDocumentException, + __BaseException + > + ); +} +export declare class PackedPolicyTooLargeException extends __BaseException { + readonly name: "PackedPolicyTooLargeException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class RegionDisabledException extends __BaseException { + readonly name: "RegionDisabledException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class IDPRejectedClaimException extends __BaseException { + readonly name: "IDPRejectedClaimException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidIdentityTokenException extends __BaseException { + readonly name: "InvalidIdentityTokenException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface AssumeRoleWithWebIdentityRequest { + RoleArn: string | undefined; + RoleSessionName: string | undefined; + WebIdentityToken: string | undefined; + ProviderId?: string | undefined; + PolicyArns?: PolicyDescriptorType[] | undefined; + Policy?: string | undefined; + DurationSeconds?: number | undefined; +} +export declare const AssumeRoleWithWebIdentityRequestFilterSensitiveLog: ( + obj: AssumeRoleWithWebIdentityRequest +) => any; +export interface AssumeRoleWithWebIdentityResponse { + Credentials?: Credentials | undefined; + SubjectFromWebIdentityToken?: string | undefined; + AssumedRoleUser?: AssumedRoleUser | undefined; + PackedPolicySize?: number | undefined; + Provider?: string | undefined; + Audience?: string | undefined; + SourceIdentity?: string | undefined; +} +export declare const AssumeRoleWithWebIdentityResponseFilterSensitiveLog: ( + obj: AssumeRoleWithWebIdentityResponse +) => any; +export declare class IDPCommunicationErrorException extends __BaseException { + readonly name: "IDPCommunicationErrorException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/protocols/Aws_query.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/protocols/Aws_query.d.ts new file mode 100644 index 00000000..1d03deb8 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/protocols/Aws_query.d.ts @@ -0,0 +1,29 @@ +import { + HttpRequest as __HttpRequest, + HttpResponse as __HttpResponse, +} from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { + AssumeRoleCommandInput, + AssumeRoleCommandOutput, +} from "../commands/AssumeRoleCommand"; +import { + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, +} from "../commands/AssumeRoleWithWebIdentityCommand"; +export declare const se_AssumeRoleCommand: ( + input: AssumeRoleCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_AssumeRoleWithWebIdentityCommand: ( + input: AssumeRoleWithWebIdentityCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const de_AssumeRoleCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_AssumeRoleWithWebIdentityCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.browser.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.browser.d.ts new file mode 100644 index 00000000..54a4e794 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.browser.d.ts @@ -0,0 +1,131 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { STSClientConfig } from "./STSClient"; +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | (( + _: unknown + ) => () => Promise); + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | import("@smithy/protocol-http").HttpHandler + | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + params: import("./endpoint/EndpointParameters").EndpointParameters, + context?: + | { + logger?: import("@smithy/types").Logger | undefined; + } + | undefined + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + useGlobalEndpoint?: + | boolean + | import("@smithy/types").Provider + | undefined; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.d.ts new file mode 100644 index 00000000..50cd2c7f --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.d.ts @@ -0,0 +1,112 @@ +import { NoAuthSigner } from "@smithy/core"; +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { IdentityProviderConfig } from "@smithy/types"; +import { STSClientConfig } from "./STSClient"; +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-node").PreviouslyResolved + | undefined + ) => Promise; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: NoAuthSigner; + }[]; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | RequestHandler + | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + credentialDefaultProvider?: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + params: import("./endpoint/EndpointParameters").EndpointParameters, + context?: + | { + logger?: import("@smithy/types").Logger | undefined; + } + | undefined + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + useGlobalEndpoint?: + | boolean + | import("@smithy/types").Provider + | undefined; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.native.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.native.d.ts new file mode 100644 index 00000000..5eda45e8 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.native.d.ts @@ -0,0 +1,135 @@ +import { STSClientConfig } from "./STSClient"; +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: + | import("@smithy/types").NodeHttpHandlerOptions + | import("@smithy/types").FetchHttpHandlerOptions + | Record + | import("@smithy/protocol-http").HttpHandler + | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + credentialDefaultProvider: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | (( + _: unknown + ) => () => Promise); + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: + | import("@smithy/smithy-client").DefaultsMode + | import("@smithy/types").Provider< + import("@smithy/smithy-client").DefaultsMode + >; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + params: import("./endpoint/EndpointParameters").EndpointParameters, + context?: + | { + logger?: import("@smithy/types").Logger | undefined; + } + | undefined + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + useGlobalEndpoint?: + | boolean + | import("@smithy/types").Provider + | undefined; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.shared.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.shared.d.ts new file mode 100644 index 00000000..860b0c8c --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.shared.d.ts @@ -0,0 +1,51 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { STSClientConfig } from "./STSClient"; +export declare const getRuntimeConfig: (config: STSClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: ( + params: import("./endpoint/EndpointParameters").EndpointParameters, + context?: + | { + logger?: import("@smithy/types").Logger | undefined; + } + | undefined + ) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: NoAuthSigner; + } + )[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeExtensions.d.ts b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeExtensions.d.ts new file mode 100644 index 00000000..d3cd411e --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeExtensions.d.ts @@ -0,0 +1,11 @@ +import { STSExtensionConfiguration } from "./extensionConfiguration"; +export interface RuntimeExtension { + configure(extensionConfiguration: STSExtensionConfiguration): void; +} +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +export declare const resolveRuntimeExtensions: ( + runtimeConfig: any, + extensions: RuntimeExtension[] +) => any; diff --git a/node_modules/@aws-sdk/nested-clients/package.json b/node_modules/@aws-sdk/nested-clients/package.json new file mode 100644 index 00000000..4a37b55b --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/package.json @@ -0,0 +1,115 @@ +{ + "name": "@aws-sdk/nested-clients", + "version": "3.799.0", + "description": "Nested clients for AWS SDK packages.", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "scripts": { + "build": "yarn lint && concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline nested-clients", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "lint": "node ../../scripts/validation/submodules-linter.js --pkg nested-clients", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "engines": { + "node": ">=18.0.0" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.2", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "./sso-oidc.d.ts", + "./sso-oidc.js", + "./sts.d.ts", + "./sts.js", + "dist-*/**" + ], + "browser": { + "./dist-es/submodules/sso-oidc/runtimeConfig": "./dist-es/submodules/sso-oidc/runtimeConfig.browser", + "./dist-es/submodules/sts/runtimeConfig": "./dist-es/submodules/sts/runtimeConfig.browser" + }, + "react-native": {}, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/nested-clients", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/nested-clients" + }, + "exports": { + "./sso-oidc": { + "types": "./dist-types/submodules/sso-oidc/index.d.ts", + "module": "./dist-es/submodules/sso-oidc/index.js", + "node": "./dist-cjs/submodules/sso-oidc/index.js", + "import": "./dist-es/submodules/sso-oidc/index.js", + "require": "./dist-cjs/submodules/sso-oidc/index.js" + }, + "./sts": { + "types": "./dist-types/submodules/sts/index.d.ts", + "module": "./dist-es/submodules/sts/index.js", + "node": "./dist-cjs/submodules/sts/index.js", + "import": "./dist-es/submodules/sts/index.js", + "require": "./dist-cjs/submodules/sts/index.js" + } + } +} diff --git a/node_modules/@aws-sdk/nested-clients/sso-oidc.d.ts b/node_modules/@aws-sdk/nested-clients/sso-oidc.d.ts new file mode 100644 index 00000000..ab472825 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/sso-oidc.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/nested-clients/sso-oidc" { + export * from "@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/index.d"; +} diff --git a/node_modules/@aws-sdk/nested-clients/sso-oidc.js b/node_modules/@aws-sdk/nested-clients/sso-oidc.js new file mode 100644 index 00000000..896865cf --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/sso-oidc.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/sso-oidc/index.js"); diff --git a/node_modules/@aws-sdk/nested-clients/sts.d.ts b/node_modules/@aws-sdk/nested-clients/sts.d.ts new file mode 100644 index 00000000..03b8e689 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/sts.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/nested-clients/sts" { + export * from "@aws-sdk/nested-clients/dist-types/submodules/sts/index.d"; +} diff --git a/node_modules/@aws-sdk/nested-clients/sts.js b/node_modules/@aws-sdk/nested-clients/sts.js new file mode 100644 index 00000000..8976f123 --- /dev/null +++ b/node_modules/@aws-sdk/nested-clients/sts.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/sts/index.js"); diff --git a/node_modules/@aws-sdk/region-config-resolver/LICENSE b/node_modules/@aws-sdk/region-config-resolver/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-sdk/region-config-resolver/README.md b/node_modules/@aws-sdk/region-config-resolver/README.md new file mode 100644 index 00000000..389b7655 --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/README.md @@ -0,0 +1,12 @@ +# @aws-sdk/region-config-resolver + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/region-config-resolver/latest.svg)](https://www.npmjs.com/package/@aws-sdk/region-config-resolver) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/region-config-resolver.svg)](https://www.npmjs.com/package/@aws-sdk/region-config-resolver) + +> An internal package + +This package provides utilities for AWS region config resolvers. + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-cjs/index.js b/node_modules/@aws-sdk/region-config-resolver/dist-cjs/index.js new file mode 100644 index 00000000..ddc184f9 --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-cjs/index.js @@ -0,0 +1,105 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + NODE_REGION_CONFIG_FILE_OPTIONS: () => NODE_REGION_CONFIG_FILE_OPTIONS, + NODE_REGION_CONFIG_OPTIONS: () => NODE_REGION_CONFIG_OPTIONS, + REGION_ENV_NAME: () => REGION_ENV_NAME, + REGION_INI_NAME: () => REGION_INI_NAME, + getAwsRegionExtensionConfiguration: () => getAwsRegionExtensionConfiguration, + resolveAwsRegionExtensionConfiguration: () => resolveAwsRegionExtensionConfiguration, + resolveRegionConfig: () => resolveRegionConfig +}); +module.exports = __toCommonJS(index_exports); + +// src/extensions/index.ts +var getAwsRegionExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return { + setRegion(region) { + runtimeConfig.region = region; + }, + region() { + return runtimeConfig.region; + } + }; +}, "getAwsRegionExtensionConfiguration"); +var resolveAwsRegionExtensionConfiguration = /* @__PURE__ */ __name((awsRegionExtensionConfiguration) => { + return { + region: awsRegionExtensionConfiguration.region() + }; +}, "resolveAwsRegionExtensionConfiguration"); + +// src/regionConfig/config.ts +var REGION_ENV_NAME = "AWS_REGION"; +var REGION_INI_NAME = "region"; +var NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env) => env[REGION_ENV_NAME], "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => profile[REGION_INI_NAME], "configFileSelector"), + default: /* @__PURE__ */ __name(() => { + throw new Error("Region is missing"); + }, "default") +}; +var NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials" +}; + +// src/regionConfig/isFipsRegion.ts +var isFipsRegion = /* @__PURE__ */ __name((region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")), "isFipsRegion"); + +// src/regionConfig/getRealRegion.ts +var getRealRegion = /* @__PURE__ */ __name((region) => isFipsRegion(region) ? ["fips-aws-global", "aws-fips"].includes(region) ? "us-east-1" : region.replace(/fips-(dkr-|prod-)?|-fips/, "") : region, "getRealRegion"); + +// src/regionConfig/resolveRegionConfig.ts +var resolveRegionConfig = /* @__PURE__ */ __name((input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return Object.assign(input, { + region: /* @__PURE__ */ __name(async () => { + if (typeof region === "string") { + return getRealRegion(region); + } + const providedRegion = await region(); + return getRealRegion(providedRegion); + }, "region"), + useFipsEndpoint: /* @__PURE__ */ __name(async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if (isFipsRegion(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + }, "useFipsEndpoint") + }); +}, "resolveRegionConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getAwsRegionExtensionConfiguration, + resolveAwsRegionExtensionConfiguration, + REGION_ENV_NAME, + REGION_INI_NAME, + NODE_REGION_CONFIG_OPTIONS, + NODE_REGION_CONFIG_FILE_OPTIONS, + resolveRegionConfig +}); + diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-es/extensions/index.js b/node_modules/@aws-sdk/region-config-resolver/dist-es/extensions/index.js new file mode 100644 index 00000000..eb033149 --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-es/extensions/index.js @@ -0,0 +1,15 @@ +export const getAwsRegionExtensionConfiguration = (runtimeConfig) => { + return { + setRegion(region) { + runtimeConfig.region = region; + }, + region() { + return runtimeConfig.region; + }, + }; +}; +export const resolveAwsRegionExtensionConfiguration = (awsRegionExtensionConfiguration) => { + return { + region: awsRegionExtensionConfiguration.region(), + }; +}; diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-es/index.js b/node_modules/@aws-sdk/region-config-resolver/dist-es/index.js new file mode 100644 index 00000000..6f4e482f --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./extensions"; +export * from "./regionConfig"; diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/config.js b/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/config.js new file mode 100644 index 00000000..7db98960 --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/config.js @@ -0,0 +1,12 @@ +export const REGION_ENV_NAME = "AWS_REGION"; +export const REGION_INI_NAME = "region"; +export const NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[REGION_ENV_NAME], + configFileSelector: (profile) => profile[REGION_INI_NAME], + default: () => { + throw new Error("Region is missing"); + }, +}; +export const NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials", +}; diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/getRealRegion.js b/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/getRealRegion.js new file mode 100644 index 00000000..8d1246bf --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/getRealRegion.js @@ -0,0 +1,6 @@ +import { isFipsRegion } from "./isFipsRegion"; +export const getRealRegion = (region) => isFipsRegion(region) + ? ["fips-aws-global", "aws-fips"].includes(region) + ? "us-east-1" + : region.replace(/fips-(dkr-|prod-)?|-fips/, "") + : region; diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/index.js b/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/index.js new file mode 100644 index 00000000..83675f77 --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/index.js @@ -0,0 +1,2 @@ +export * from "./config"; +export * from "./resolveRegionConfig"; diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/isFipsRegion.js b/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/isFipsRegion.js new file mode 100644 index 00000000..d758967d --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/isFipsRegion.js @@ -0,0 +1 @@ +export const isFipsRegion = (region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")); diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/resolveRegionConfig.js b/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/resolveRegionConfig.js new file mode 100644 index 00000000..f88e00fb --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/resolveRegionConfig.js @@ -0,0 +1,24 @@ +import { getRealRegion } from "./getRealRegion"; +import { isFipsRegion } from "./isFipsRegion"; +export const resolveRegionConfig = (input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return Object.assign(input, { + region: async () => { + if (typeof region === "string") { + return getRealRegion(region); + } + const providedRegion = await region(); + return getRealRegion(providedRegion); + }, + useFipsEndpoint: async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if (isFipsRegion(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + }, + }); +}; diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-types/extensions/index.d.ts b/node_modules/@aws-sdk/region-config-resolver/dist-types/extensions/index.d.ts new file mode 100644 index 00000000..7756bad0 --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-types/extensions/index.d.ts @@ -0,0 +1,16 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { Provider } from "@smithy/types"; +export type RegionExtensionRuntimeConfigType = Partial<{ + region: string | Provider; +}>; +/** + * @internal + */ +export declare const getAwsRegionExtensionConfiguration: (runtimeConfig: RegionExtensionRuntimeConfigType) => { + setRegion(region: Provider): void; + region(): Provider; +}; +/** + * @internal + */ +export declare const resolveAwsRegionExtensionConfiguration: (awsRegionExtensionConfiguration: AwsRegionExtensionConfiguration) => RegionExtensionRuntimeConfigType; diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-types/index.d.ts b/node_modules/@aws-sdk/region-config-resolver/dist-types/index.d.ts new file mode 100644 index 00000000..6f4e482f --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./extensions"; +export * from "./regionConfig"; diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/config.d.ts b/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/config.d.ts new file mode 100644 index 00000000..d203bb05 --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/config.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors, LocalConfigOptions } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const REGION_ENV_NAME = "AWS_REGION"; +/** + * @internal + */ +export declare const REGION_INI_NAME = "region"; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_FILE_OPTIONS: LocalConfigOptions; diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/getRealRegion.d.ts b/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/getRealRegion.d.ts new file mode 100644 index 00000000..c70fb5b6 --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/getRealRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getRealRegion: (region: string) => string; diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/index.d.ts b/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/index.d.ts new file mode 100644 index 00000000..6dcf5e55 --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./config"; +/** + * @internal + */ +export * from "./resolveRegionConfig"; diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/isFipsRegion.d.ts b/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/isFipsRegion.d.ts new file mode 100644 index 00000000..b42cee72 --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/isFipsRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isFipsRegion: (region: string) => boolean; diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts b/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts new file mode 100644 index 00000000..84ed4d01 --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts @@ -0,0 +1,37 @@ +import { Provider } from "@smithy/types"; +/** + * @public + */ +export interface RegionInputConfig { + /** + * The AWS region to which this client will send requests + */ + region?: string | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { +} +/** + * @internal + */ +export interface RegionResolvedConfig { + /** + * Resolved value for input config {@link RegionInputConfig.region} + */ + region: Provider; + /** + * Resolved value for input {@link RegionInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export declare const resolveRegionConfig: (input: T & RegionInputConfig & PreviouslyResolved) => T & RegionResolvedConfig; +export {}; diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/extensions/index.d.ts b/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 00000000..c1328e3e --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1,14 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { Provider } from "@smithy/types"; +export type RegionExtensionRuntimeConfigType = Partial<{ + region: string | Provider; +}>; +export declare const getAwsRegionExtensionConfiguration: ( + runtimeConfig: RegionExtensionRuntimeConfigType +) => { + setRegion(region: Provider): void; + region(): Provider; +}; +export declare const resolveAwsRegionExtensionConfiguration: ( + awsRegionExtensionConfiguration: AwsRegionExtensionConfiguration +) => RegionExtensionRuntimeConfigType; diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..6f4e482f --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./extensions"; +export * from "./regionConfig"; diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/config.d.ts b/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/config.d.ts new file mode 100644 index 00000000..ceb3e02b --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/config.d.ts @@ -0,0 +1,8 @@ +import { + LoadedConfigSelectors, + LocalConfigOptions, +} from "@smithy/node-config-provider"; +export declare const REGION_ENV_NAME = "AWS_REGION"; +export declare const REGION_INI_NAME = "region"; +export declare const NODE_REGION_CONFIG_OPTIONS: LoadedConfigSelectors; +export declare const NODE_REGION_CONFIG_FILE_OPTIONS: LocalConfigOptions; diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts b/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts new file mode 100644 index 00000000..f06119bd --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts @@ -0,0 +1 @@ +export declare const getRealRegion: (region: string) => string; diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/index.d.ts b/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/index.d.ts new file mode 100644 index 00000000..83675f77 --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/index.d.ts @@ -0,0 +1,2 @@ +export * from "./config"; +export * from "./resolveRegionConfig"; diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts b/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts new file mode 100644 index 00000000..13d34f29 --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts @@ -0,0 +1 @@ +export declare const isFipsRegion: (region: string) => boolean; diff --git a/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts b/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts new file mode 100644 index 00000000..86b83642 --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts @@ -0,0 +1,14 @@ +import { Provider } from "@smithy/types"; +export interface RegionInputConfig { + region?: string | Provider; + useFipsEndpoint?: boolean | Provider; +} +interface PreviouslyResolved {} +export interface RegionResolvedConfig { + region: Provider; + useFipsEndpoint: Provider; +} +export declare const resolveRegionConfig: ( + input: T & RegionInputConfig & PreviouslyResolved +) => T & RegionResolvedConfig; +export {}; diff --git a/node_modules/@aws-sdk/region-config-resolver/package.json b/node_modules/@aws-sdk/region-config-resolver/package.json new file mode 100644 index 00000000..605f5309 --- /dev/null +++ b/node_modules/@aws-sdk/region-config-resolver/package.json @@ -0,0 +1,59 @@ +{ + "name": "@aws-sdk/region-config-resolver", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline region-config-resolver", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "jest": "28.1.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/region-config-resolver", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/region-config-resolver" + } +} diff --git a/node_modules/@aws-sdk/s3-request-presigner/LICENSE b/node_modules/@aws-sdk/s3-request-presigner/LICENSE new file mode 100644 index 00000000..e907b586 --- /dev/null +++ b/node_modules/@aws-sdk/s3-request-presigner/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-sdk/s3-request-presigner/README.md b/node_modules/@aws-sdk/s3-request-presigner/README.md new file mode 100644 index 00000000..eb914d3e --- /dev/null +++ b/node_modules/@aws-sdk/s3-request-presigner/README.md @@ -0,0 +1,125 @@ +# @aws-sdk/s3-request-presigner + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/s3-request-presigner/latest.svg)](https://www.npmjs.com/package/@aws-sdk/s3-request-presigner) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/s3-request-presigner.svg)](https://www.npmjs.com/package/@aws-sdk/s3-request-presigner) + +This package provides a presigner based on signature V4 that will attempt to +generate signed url for S3. + +### Get Presigned URL with Client and Command + +You can generated presigned url from S3 client and command. Here's the example: + +```javascript +import { getSignedUrl } from "@aws-sdk/s3-request-presigner"; +import { S3Client, GetObjectCommand } from "@aws-sdk/client-s3"; +const client = new S3Client(clientParams); +const command = new GetObjectCommand(getObjectParams); +const url = await getSignedUrl(client, command, { expiresIn: 3600 }); +``` + +You can get signed URL for other S3 operations too, like `PutObjectCommand`. +`expiresIn` config from the examples above is optional. If not set, it's default +at `900`. + +If you already have a request, you can pre-sign the request following the +section bellow. + +### Get Presigned URL from an Existing Request + +```javascript +import { S3RequestPresigner } from "@aws-sdk/s3-request-presigner"; +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { Hash } from "@aws-sdk/hash-node"; +const signer = new S3RequestPresigner({ + region: regionProvider, + credentials: credentialsProvider, + sha256: Hash.bind(null, "sha256"), // In Node.js + //sha256: Sha256 // In browsers +}); +const presigned = await signer.presign(request); +``` + +To avoid redundant construction parameters when instantiating the s3 presigner, +you can simply spread the configuration of an existing s3 client and supply it to +the presigner's constructor. + +```javascript +//s3 is instantiated from S3Client from @aws-sdk/client-s3-* packages +const signer = new S3RequestPresigner({ + ...s3.config, +}); +``` + +### Get Presigned URL with headers that cannot be signed + +By using the `getSignedUrl` with a `S3Client` you are able to sign your +headers, improving the security of presigned url. Importantly, if you want to +sign any `x-amz-*` headers (like the ChecksumSHA256 header in this example), +you need to provide those headers to the set of `unhoistableHeaders` in the +`getSignedUrl` params which will force those headers to be present in the +upload request. + +```javascript +import { PutObjectCommand, S3Client } from "@aws-sdk/client-s3"; +import { getSignedUrl } from "@aws-sdk/s3-request-presigner"; + +const s3Client = new S3Client({ region: "us-east-1" }); +const command = new PutObjectCommand({ + Bucket: bucket, + Key: key, + ChecksumSHA256: sha, +}); + +const presigned = getSignedUrl(s3Client, command, { + expiresIn: expiration, + // Set of all x-amz-* headers you wish to have signed + unhoistableHeaders: new Set(["x-amz-checksum-sha256"]), +}); +``` + +### Get Presigned URL with headers that should be signed + +For headers that are not `x-amz-*` you are able to add them to the set of +`signableHeaders` to be enforced in the presigned urls request. + +```javascript +import { PutObjectCommand, S3Client } from "@aws-sdk/client-s3"; +import { getSignedUrl } from "@aws-sdk/s3-request-presigner"; + +const s3Client = new S3Client({ region: "us-east-1" }); +const command = new PutObjectCommand({ + Bucket: bucket, + Key: key, + ContentType: contentType, +}); + +const presigned = getSignedUrl(s3Client, command, { + signableHeaders: new Set(["content-type"]), + expiresIn: expiration, +}); +``` + +### PutObject with use of `hoistableHeaders` + +`hoistableHeaders` overrides the default behavior of not hoisting +any headers that begin with `x-amz-*`. + +```js +// example: Server Side Encryption headers +import { getSignedUrl } from "@aws-sdk/s3-request-presigner"; +import { S3Client, PutObjectCommand } from "@aws-sdk/client-s3"; + +const params = { + Key: "...", + Bucket: "...", + ServerSideEncryption: "aws:kms", + SSEKMSKeyId: "arn:aws:kms:us-west-2:0000:key/abcd-1234-abcd", +}; +const s3Client = new S3Client(); +const command = new PutObjectCommand(params); + +const preSignedUrl = await getSignedUrl(s3Client, command, { + hoistableHeaders: new Set(["x-amz-server-side-encryption", "x-amz-server-side-encryption-aws-kms-key-id"]), +}); +``` diff --git a/node_modules/@aws-sdk/s3-request-presigner/dist-cjs/index.js b/node_modules/@aws-sdk/s3-request-presigner/dist-cjs/index.js new file mode 100644 index 00000000..056acf8d --- /dev/null +++ b/node_modules/@aws-sdk/s3-request-presigner/dist-cjs/index.js @@ -0,0 +1,186 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + S3RequestPresigner: () => S3RequestPresigner, + getSignedUrl: () => getSignedUrl +}); +module.exports = __toCommonJS(index_exports); + +// src/getSignedUrl.ts +var import_util_format_url = require("@aws-sdk/util-format-url"); +var import_middleware_endpoint = require("@smithy/middleware-endpoint"); +var import_protocol_http = require("@smithy/protocol-http"); + +// src/presigner.ts +var import_signature_v4_multi_region = require("@aws-sdk/signature-v4-multi-region"); + +// src/constants.ts +var UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +var SHA256_HEADER = "X-Amz-Content-Sha256"; + +// src/presigner.ts +var S3RequestPresigner = class { + static { + __name(this, "S3RequestPresigner"); + } + signer; + constructor(options) { + const resolvedOptions = { + // Allow `signingName` because we want to support usecase of supply client's resolved config + // directly. Where service equals signingName. + service: options.signingName || options.service || "s3", + uriEscapePath: options.uriEscapePath || false, + applyChecksum: options.applyChecksum || false, + ...options + }; + this.signer = new import_signature_v4_multi_region.SignatureV4MultiRegion(resolvedOptions); + } + presign(requestToSign, { + unsignableHeaders = /* @__PURE__ */ new Set(), + hoistableHeaders = /* @__PURE__ */ new Set(), + unhoistableHeaders = /* @__PURE__ */ new Set(), + ...options + } = {}) { + this.prepareRequest(requestToSign, { + unsignableHeaders, + unhoistableHeaders, + hoistableHeaders + }); + return this.signer.presign(requestToSign, { + expiresIn: 900, + unsignableHeaders, + unhoistableHeaders, + ...options + }); + } + presignWithCredentials(requestToSign, credentials, { + unsignableHeaders = /* @__PURE__ */ new Set(), + hoistableHeaders = /* @__PURE__ */ new Set(), + unhoistableHeaders = /* @__PURE__ */ new Set(), + ...options + } = {}) { + this.prepareRequest(requestToSign, { + unsignableHeaders, + unhoistableHeaders, + hoistableHeaders + }); + return this.signer.presignWithCredentials(requestToSign, credentials, { + expiresIn: 900, + unsignableHeaders, + unhoistableHeaders, + ...options + }); + } + prepareRequest(requestToSign, { + unsignableHeaders = /* @__PURE__ */ new Set(), + unhoistableHeaders = /* @__PURE__ */ new Set(), + hoistableHeaders = /* @__PURE__ */ new Set() + } = {}) { + unsignableHeaders.add("content-type"); + Object.keys(requestToSign.headers).map((header) => header.toLowerCase()).filter((header) => header.startsWith("x-amz-server-side-encryption")).forEach((header) => { + if (!hoistableHeaders.has(header)) { + unhoistableHeaders.add(header); + } + }); + requestToSign.headers[SHA256_HEADER] = UNSIGNED_PAYLOAD; + const currentHostHeader = requestToSign.headers.host; + const port = requestToSign.port; + const expectedHostHeader = `${requestToSign.hostname}${requestToSign.port != null ? ":" + port : ""}`; + if (!currentHostHeader || currentHostHeader === requestToSign.hostname && requestToSign.port != null) { + requestToSign.headers.host = expectedHostHeader; + } + } +}; + +// src/getSignedUrl.ts +var getSignedUrl = /* @__PURE__ */ __name(async (client, command, options = {}) => { + let s3Presigner; + let region; + if (typeof client.config.endpointProvider === "function") { + const endpointV2 = await (0, import_middleware_endpoint.getEndpointFromInstructions)( + command.input, + command.constructor, + client.config + ); + const authScheme = endpointV2.properties?.authSchemes?.[0]; + if (authScheme?.name === "sigv4a") { + region = authScheme?.signingRegionSet?.join(","); + } else { + region = authScheme?.signingRegion; + } + s3Presigner = new S3RequestPresigner({ + ...client.config, + signingName: authScheme?.signingName, + region: /* @__PURE__ */ __name(async () => region, "region") + }); + } else { + s3Presigner = new S3RequestPresigner(client.config); + } + const presignInterceptMiddleware = /* @__PURE__ */ __name((next, context) => async (args) => { + const { request } = args; + if (!import_protocol_http.HttpRequest.isInstance(request)) { + throw new Error("Request to be presigned is not an valid HTTP request."); + } + delete request.headers["amz-sdk-invocation-id"]; + delete request.headers["amz-sdk-request"]; + delete request.headers["x-amz-user-agent"]; + let presigned2; + const presignerOptions = { + ...options, + signingRegion: options.signingRegion ?? context["signing_region"] ?? region, + signingService: options.signingService ?? context["signing_service"] + }; + if (context.s3ExpressIdentity) { + presigned2 = await s3Presigner.presignWithCredentials(request, context.s3ExpressIdentity, presignerOptions); + } else { + presigned2 = await s3Presigner.presign(request, presignerOptions); + } + return { + // Intercept the middleware stack by returning fake response + response: {}, + output: { + $metadata: { httpStatusCode: 200 }, + presigned: presigned2 + } + }; + }, "presignInterceptMiddleware"); + const middlewareName = "presignInterceptMiddleware"; + const clientStack = client.middlewareStack.clone(); + clientStack.addRelativeTo(presignInterceptMiddleware, { + name: middlewareName, + relation: "before", + toMiddleware: "awsAuthMiddleware", + override: true + }); + const handler = command.resolveMiddleware(clientStack, client.config, {}); + const { output } = await handler({ input: command.input }); + const { presigned } = output; + return (0, import_util_format_url.formatUrl)(presigned); +}, "getSignedUrl"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getSignedUrl, + S3RequestPresigner +}); + diff --git a/node_modules/@aws-sdk/s3-request-presigner/dist-es/constants.js b/node_modules/@aws-sdk/s3-request-presigner/dist-es/constants.js new file mode 100644 index 00000000..938f00b9 --- /dev/null +++ b/node_modules/@aws-sdk/s3-request-presigner/dist-es/constants.js @@ -0,0 +1,9 @@ +export const UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +export const SHA256_HEADER = "X-Amz-Content-Sha256"; +export const ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +export const CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +export const AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +export const SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +export const EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +export const HOST_HEADER = "host"; +export const ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; diff --git a/node_modules/@aws-sdk/s3-request-presigner/dist-es/getSignedUrl.js b/node_modules/@aws-sdk/s3-request-presigner/dist-es/getSignedUrl.js new file mode 100644 index 00000000..a6a17e1f --- /dev/null +++ b/node_modules/@aws-sdk/s3-request-presigner/dist-es/getSignedUrl.js @@ -0,0 +1,66 @@ +import { formatUrl } from "@aws-sdk/util-format-url"; +import { getEndpointFromInstructions } from "@smithy/middleware-endpoint"; +import { HttpRequest } from "@smithy/protocol-http"; +import { S3RequestPresigner } from "./presigner"; +export const getSignedUrl = async (client, command, options = {}) => { + let s3Presigner; + let region; + if (typeof client.config.endpointProvider === "function") { + const endpointV2 = await getEndpointFromInstructions(command.input, command.constructor, client.config); + const authScheme = endpointV2.properties?.authSchemes?.[0]; + if (authScheme?.name === "sigv4a") { + region = authScheme?.signingRegionSet?.join(","); + } + else { + region = authScheme?.signingRegion; + } + s3Presigner = new S3RequestPresigner({ + ...client.config, + signingName: authScheme?.signingName, + region: async () => region, + }); + } + else { + s3Presigner = new S3RequestPresigner(client.config); + } + const presignInterceptMiddleware = (next, context) => async (args) => { + const { request } = args; + if (!HttpRequest.isInstance(request)) { + throw new Error("Request to be presigned is not an valid HTTP request."); + } + delete request.headers["amz-sdk-invocation-id"]; + delete request.headers["amz-sdk-request"]; + delete request.headers["x-amz-user-agent"]; + let presigned; + const presignerOptions = { + ...options, + signingRegion: options.signingRegion ?? context["signing_region"] ?? region, + signingService: options.signingService ?? context["signing_service"], + }; + if (context.s3ExpressIdentity) { + presigned = await s3Presigner.presignWithCredentials(request, context.s3ExpressIdentity, presignerOptions); + } + else { + presigned = await s3Presigner.presign(request, presignerOptions); + } + return { + response: {}, + output: { + $metadata: { httpStatusCode: 200 }, + presigned, + }, + }; + }; + const middlewareName = "presignInterceptMiddleware"; + const clientStack = client.middlewareStack.clone(); + clientStack.addRelativeTo(presignInterceptMiddleware, { + name: middlewareName, + relation: "before", + toMiddleware: "awsAuthMiddleware", + override: true, + }); + const handler = command.resolveMiddleware(clientStack, client.config, {}); + const { output } = await handler({ input: command.input }); + const { presigned } = output; + return formatUrl(presigned); +}; diff --git a/node_modules/@aws-sdk/s3-request-presigner/dist-es/index.js b/node_modules/@aws-sdk/s3-request-presigner/dist-es/index.js new file mode 100644 index 00000000..f20b818e --- /dev/null +++ b/node_modules/@aws-sdk/s3-request-presigner/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./getSignedUrl"; +export * from "./presigner"; diff --git a/node_modules/@aws-sdk/s3-request-presigner/dist-es/presigner.js b/node_modules/@aws-sdk/s3-request-presigner/dist-es/presigner.js new file mode 100644 index 00000000..8b46fe10 --- /dev/null +++ b/node_modules/@aws-sdk/s3-request-presigner/dist-es/presigner.js @@ -0,0 +1,58 @@ +import { SignatureV4MultiRegion } from "@aws-sdk/signature-v4-multi-region"; +import { SHA256_HEADER, UNSIGNED_PAYLOAD } from "./constants"; +export class S3RequestPresigner { + signer; + constructor(options) { + const resolvedOptions = { + service: options.signingName || options.service || "s3", + uriEscapePath: options.uriEscapePath || false, + applyChecksum: options.applyChecksum || false, + ...options, + }; + this.signer = new SignatureV4MultiRegion(resolvedOptions); + } + presign(requestToSign, { unsignableHeaders = new Set(), hoistableHeaders = new Set(), unhoistableHeaders = new Set(), ...options } = {}) { + this.prepareRequest(requestToSign, { + unsignableHeaders, + unhoistableHeaders, + hoistableHeaders, + }); + return this.signer.presign(requestToSign, { + expiresIn: 900, + unsignableHeaders, + unhoistableHeaders, + ...options, + }); + } + presignWithCredentials(requestToSign, credentials, { unsignableHeaders = new Set(), hoistableHeaders = new Set(), unhoistableHeaders = new Set(), ...options } = {}) { + this.prepareRequest(requestToSign, { + unsignableHeaders, + unhoistableHeaders, + hoistableHeaders, + }); + return this.signer.presignWithCredentials(requestToSign, credentials, { + expiresIn: 900, + unsignableHeaders, + unhoistableHeaders, + ...options, + }); + } + prepareRequest(requestToSign, { unsignableHeaders = new Set(), unhoistableHeaders = new Set(), hoistableHeaders = new Set(), } = {}) { + unsignableHeaders.add("content-type"); + Object.keys(requestToSign.headers) + .map((header) => header.toLowerCase()) + .filter((header) => header.startsWith("x-amz-server-side-encryption")) + .forEach((header) => { + if (!hoistableHeaders.has(header)) { + unhoistableHeaders.add(header); + } + }); + requestToSign.headers[SHA256_HEADER] = UNSIGNED_PAYLOAD; + const currentHostHeader = requestToSign.headers.host; + const port = requestToSign.port; + const expectedHostHeader = `${requestToSign.hostname}${requestToSign.port != null ? ":" + port : ""}`; + if (!currentHostHeader || (currentHostHeader === requestToSign.hostname && requestToSign.port != null)) { + requestToSign.headers.host = expectedHostHeader; + } + } +} diff --git a/node_modules/@aws-sdk/s3-request-presigner/dist-types/constants.d.ts b/node_modules/@aws-sdk/s3-request-presigner/dist-types/constants.d.ts new file mode 100644 index 00000000..41ae278f --- /dev/null +++ b/node_modules/@aws-sdk/s3-request-presigner/dist-types/constants.d.ts @@ -0,0 +1,9 @@ +export declare const UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +export declare const SHA256_HEADER = "X-Amz-Content-Sha256"; +export declare const ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +export declare const CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +export declare const AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +export declare const SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +export declare const EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +export declare const HOST_HEADER = "host"; +export declare const ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; diff --git a/node_modules/@aws-sdk/s3-request-presigner/dist-types/getSignedUrl.d.ts b/node_modules/@aws-sdk/s3-request-presigner/dist-types/getSignedUrl.d.ts new file mode 100644 index 00000000..c31a5416 --- /dev/null +++ b/node_modules/@aws-sdk/s3-request-presigner/dist-types/getSignedUrl.d.ts @@ -0,0 +1,6 @@ +import { Client, Command } from "@smithy/smithy-client"; +import { MetadataBearer, RequestPresigningArguments } from "@smithy/types"; +/** + * @public + */ +export declare const getSignedUrl: (client: Client, command: Command, options?: RequestPresigningArguments) => Promise; diff --git a/node_modules/@aws-sdk/s3-request-presigner/dist-types/index.d.ts b/node_modules/@aws-sdk/s3-request-presigner/dist-types/index.d.ts new file mode 100644 index 00000000..f20b818e --- /dev/null +++ b/node_modules/@aws-sdk/s3-request-presigner/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./getSignedUrl"; +export * from "./presigner"; diff --git a/node_modules/@aws-sdk/s3-request-presigner/dist-types/presigner.d.ts b/node_modules/@aws-sdk/s3-request-presigner/dist-types/presigner.d.ts new file mode 100644 index 00000000..db3a77a5 --- /dev/null +++ b/node_modules/@aws-sdk/s3-request-presigner/dist-types/presigner.d.ts @@ -0,0 +1,15 @@ +import { SignatureV4MultiRegionInit } from "@aws-sdk/signature-v4-multi-region"; +import { AwsCredentialIdentity, RequestPresigner, RequestPresigningArguments } from "@smithy/types"; +import { HttpRequest as IHttpRequest } from "@smithy/types"; +type PartialBy = Omit & Partial>; +export type S3RequestPresignerOptions = PartialBy & { + signingName?: string; +}; +export declare class S3RequestPresigner implements RequestPresigner { + private readonly signer; + constructor(options: S3RequestPresignerOptions); + presign(requestToSign: IHttpRequest, { unsignableHeaders, hoistableHeaders, unhoistableHeaders, ...options }?: RequestPresigningArguments): Promise; + presignWithCredentials(requestToSign: IHttpRequest, credentials: AwsCredentialIdentity, { unsignableHeaders, hoistableHeaders, unhoistableHeaders, ...options }?: RequestPresigningArguments): Promise; + private prepareRequest; +} +export {}; diff --git a/node_modules/@aws-sdk/s3-request-presigner/dist-types/ts3.4/constants.d.ts b/node_modules/@aws-sdk/s3-request-presigner/dist-types/ts3.4/constants.d.ts new file mode 100644 index 00000000..41ae278f --- /dev/null +++ b/node_modules/@aws-sdk/s3-request-presigner/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,9 @@ +export declare const UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +export declare const SHA256_HEADER = "X-Amz-Content-Sha256"; +export declare const ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +export declare const CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +export declare const AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +export declare const SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +export declare const EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +export declare const HOST_HEADER = "host"; +export declare const ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; diff --git a/node_modules/@aws-sdk/s3-request-presigner/dist-types/ts3.4/getSignedUrl.d.ts b/node_modules/@aws-sdk/s3-request-presigner/dist-types/ts3.4/getSignedUrl.d.ts new file mode 100644 index 00000000..ad0bbf19 --- /dev/null +++ b/node_modules/@aws-sdk/s3-request-presigner/dist-types/ts3.4/getSignedUrl.d.ts @@ -0,0 +1,11 @@ +import { Client, Command } from "@smithy/smithy-client"; +import { MetadataBearer, RequestPresigningArguments } from "@smithy/types"; +export declare const getSignedUrl: < + InputTypesUnion extends object, + InputType extends InputTypesUnion, + OutputType extends MetadataBearer = MetadataBearer +>( + client: Client, + command: Command, + options?: RequestPresigningArguments +) => Promise; diff --git a/node_modules/@aws-sdk/s3-request-presigner/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/s3-request-presigner/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..f20b818e --- /dev/null +++ b/node_modules/@aws-sdk/s3-request-presigner/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./getSignedUrl"; +export * from "./presigner"; diff --git a/node_modules/@aws-sdk/s3-request-presigner/dist-types/ts3.4/presigner.d.ts b/node_modules/@aws-sdk/s3-request-presigner/dist-types/ts3.4/presigner.d.ts new file mode 100644 index 00000000..bc5d0fa8 --- /dev/null +++ b/node_modules/@aws-sdk/s3-request-presigner/dist-types/ts3.4/presigner.d.ts @@ -0,0 +1,40 @@ +import { SignatureV4MultiRegionInit } from "@aws-sdk/signature-v4-multi-region"; +import { + AwsCredentialIdentity, + RequestPresigner, + RequestPresigningArguments, +} from "@smithy/types"; +import { HttpRequest as IHttpRequest } from "@smithy/types"; +type PartialBy = Pick> & + Partial>; +export type S3RequestPresignerOptions = PartialBy< + SignatureV4MultiRegionInit, + "service" | "uriEscapePath" +> & { + signingName?: string; +}; +export declare class S3RequestPresigner implements RequestPresigner { + private readonly signer; + constructor(options: S3RequestPresignerOptions); + presign( + requestToSign: IHttpRequest, + { + unsignableHeaders, + hoistableHeaders, + unhoistableHeaders, + ...options + }?: RequestPresigningArguments + ): Promise; + presignWithCredentials( + requestToSign: IHttpRequest, + credentials: AwsCredentialIdentity, + { + unsignableHeaders, + hoistableHeaders, + unhoistableHeaders, + ...options + }?: RequestPresigningArguments + ): Promise; + private prepareRequest; +} +export {}; diff --git a/node_modules/@aws-sdk/s3-request-presigner/package.json b/node_modules/@aws-sdk/s3-request-presigner/package.json new file mode 100644 index 00000000..ea3b16c5 --- /dev/null +++ b/node_modules/@aws-sdk/s3-request-presigner/package.json @@ -0,0 +1,63 @@ +{ + "name": "@aws-sdk/s3-request-presigner", + "version": "3.802.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline s3-request-presigner", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/signature-v4-multi-region": "3.800.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-format-url": "3.775.0", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@aws-sdk/client-s3": "3.802.0", + "@smithy/hash-node": "^4.0.2", + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/s3-request-presigner", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/s3-request-presigner" + } +} diff --git a/node_modules/@aws-sdk/signature-v4-multi-region/LICENSE b/node_modules/@aws-sdk/signature-v4-multi-region/LICENSE new file mode 100644 index 00000000..e907b586 --- /dev/null +++ b/node_modules/@aws-sdk/signature-v4-multi-region/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-sdk/signature-v4-multi-region/README.md b/node_modules/@aws-sdk/signature-v4-multi-region/README.md new file mode 100644 index 00000000..4120f788 --- /dev/null +++ b/node_modules/@aws-sdk/signature-v4-multi-region/README.md @@ -0,0 +1,32 @@ +# @aws-sdk/signature-v4-multi-region + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/signature-v4-multi-region/latest.svg)](https://www.npmjs.com/package/@aws-sdk/signature-v4-multi-region) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/signature-v4-multi-region.svg)](https://www.npmjs.com/package/@aws-sdk/signature-v4-multi-region) + +See also https://github.com/aws/aws-sdk-js-v3/tree/main#functionality-requiring-aws-common-runtime-crt. + +## Usage + +This package contains optional dependency [`@aws-sdk/signature-v4-crt`](https://www.npmjs.com/package/@aws-sdk/signature-v4). +You need to install this package explicitly to sign an un-regional request using SigV4a algorithm. The package contains +Node.js native implementation which requires building at installation. The installed package MAY NOT work if the +instance building the package runs a different operating system than the instance running the application. + +The `@aws-sdk/signature-v4-crt` is only supported in Node.js currently because it depends on a native dependency. + +Please refer to [this issue](https://github.com/aws/aws-sdk-js-v3/issues/2822) for more information. + +Note: You can also use a native JS (non-CRT) implementation of the SigV4A signer, instructions for which are here: +https://github.com/aws/aws-sdk-js-v3/tree/main#functionality-requiring-aws-common-runtime-crt + +Please refer to the note regarding bundle size in the link above, before deciding to use the JS SigV4A signer (including in browsers). + +## Description + +This package provides a SigV4-compatible request signer that wraps a pure-JS SigV4 signer +([`@aws-sdk/signature-v4`](https://www.npmjs.com/package/@aws-sdk/signature-v4)) for regional requests, and attempts to +call a native implementation of SigV4a signer([`@aws-sdk/signature-v4-crt`](https://www.npmjs.com/package/@aws-sdk/signature-v4)) +it the request is multi-region. + +A multi-region request is identified by the `signingRegion` parameter. A request is multi-region if the `signingRegion` +parameter is set to `*`. diff --git a/node_modules/@aws-sdk/signature-v4-multi-region/dist-cjs/index.js b/node_modules/@aws-sdk/signature-v4-multi-region/dist-cjs/index.js new file mode 100644 index 00000000..b4b5502c --- /dev/null +++ b/node_modules/@aws-sdk/signature-v4-multi-region/dist-cjs/index.js @@ -0,0 +1,142 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + SignatureV4MultiRegion: () => SignatureV4MultiRegion, + signatureV4CrtContainer: () => signatureV4CrtContainer +}); +module.exports = __toCommonJS(index_exports); + +// src/SignatureV4MultiRegion.ts +var import_middleware_sdk_s3 = require("@aws-sdk/middleware-sdk-s3"); +var import_signature_v4 = require("@smithy/signature-v4"); + +// src/signature-v4-crt-container.ts +var signatureV4CrtContainer = { + CrtSignerV4: null +}; + +// src/SignatureV4MultiRegion.ts +var SignatureV4MultiRegion = class { + static { + __name(this, "SignatureV4MultiRegion"); + } + sigv4aSigner; + sigv4Signer; + signerOptions; + constructor(options) { + this.sigv4Signer = new import_middleware_sdk_s3.SignatureV4S3Express(options); + this.signerOptions = options; + } + async sign(requestToSign, options = {}) { + if (options.signingRegion === "*") { + return this.getSigv4aSigner().sign(requestToSign, options); + } + return this.sigv4Signer.sign(requestToSign, options); + } + /** + * Sign with alternate credentials to the ones provided in the constructor. + * Note: This is only supported for SigV4a when using the CRT implementation. + */ + async signWithCredentials(requestToSign, credentials, options = {}) { + if (options.signingRegion === "*") { + const signer = this.getSigv4aSigner(); + const CrtSignerV4 = signatureV4CrtContainer.CrtSignerV4; + if (CrtSignerV4 && signer instanceof CrtSignerV4) { + return signer.signWithCredentials(requestToSign, credentials, options); + } else { + throw new Error( + `signWithCredentials with signingRegion '*' is only supported when using the CRT dependency @aws-sdk/signature-v4-crt. Please check whether you have installed the "@aws-sdk/signature-v4-crt" package explicitly. You must also register the package by calling [require("@aws-sdk/signature-v4-crt");] or an ESM equivalent such as [import "@aws-sdk/signature-v4-crt";]. For more information please go to https://github.com/aws/aws-sdk-js-v3#functionality-requiring-aws-common-runtime-crt` + ); + } + } + return this.sigv4Signer.signWithCredentials(requestToSign, credentials, options); + } + /** + * Presign a request. + * Note: This is only supported for SigV4a when using the CRT implementation. + */ + async presign(originalRequest, options = {}) { + if (options.signingRegion === "*") { + const signer = this.getSigv4aSigner(); + const CrtSignerV4 = signatureV4CrtContainer.CrtSignerV4; + if (CrtSignerV4 && signer instanceof CrtSignerV4) { + return signer.presign(originalRequest, options); + } else { + throw new Error( + `presign with signingRegion '*' is only supported when using the CRT dependency @aws-sdk/signature-v4-crt. Please check whether you have installed the "@aws-sdk/signature-v4-crt" package explicitly. You must also register the package by calling [require("@aws-sdk/signature-v4-crt");] or an ESM equivalent such as [import "@aws-sdk/signature-v4-crt";]. For more information please go to https://github.com/aws/aws-sdk-js-v3#functionality-requiring-aws-common-runtime-crt` + ); + } + } + return this.sigv4Signer.presign(originalRequest, options); + } + async presignWithCredentials(originalRequest, credentials, options = {}) { + if (options.signingRegion === "*") { + throw new Error("Method presignWithCredentials is not supported for [signingRegion=*]."); + } + return this.sigv4Signer.presignWithCredentials(originalRequest, credentials, options); + } + getSigv4aSigner() { + if (!this.sigv4aSigner) { + const CrtSignerV4 = signatureV4CrtContainer.CrtSignerV4; + const JsSigV4aSigner = import_signature_v4.signatureV4aContainer.SignatureV4a; + if (this.signerOptions.runtime === "node") { + if (!CrtSignerV4 && !JsSigV4aSigner) { + throw new Error( + "Neither CRT nor JS SigV4a implementation is available. Please load either @aws-sdk/signature-v4-crt or @aws-sdk/signature-v4a. For more information please go to https://github.com/aws/aws-sdk-js-v3#functionality-requiring-aws-common-runtime-crt" + ); + } + if (CrtSignerV4 && typeof CrtSignerV4 === "function") { + this.sigv4aSigner = new CrtSignerV4({ + ...this.signerOptions, + signingAlgorithm: 1 + }); + } else if (JsSigV4aSigner && typeof JsSigV4aSigner === "function") { + this.sigv4aSigner = new JsSigV4aSigner({ + ...this.signerOptions + }); + } else { + throw new Error( + "Available SigV4a implementation is not a valid constructor. Please ensure you've properly imported @aws-sdk/signature-v4-crt or @aws-sdk/signature-v4a.For more information please go to https://github.com/aws/aws-sdk-js-v3#functionality-requiring-aws-common-runtime-crt" + ); + } + } else { + if (!JsSigV4aSigner || typeof JsSigV4aSigner !== "function") { + throw new Error( + "JS SigV4a implementation is not available or not a valid constructor. Please check whether you have installed the @aws-sdk/signature-v4a package explicitly. The CRT implementation is not available for browsers. You must also register the package by calling [require('@aws-sdk/signature-v4a');] or an ESM equivalent such as [import '@aws-sdk/signature-v4a';]. For more information please go to https://github.com/aws/aws-sdk-js-v3#using-javascript-non-crt-implementation-of-sigv4a" + ); + } + this.sigv4aSigner = new JsSigV4aSigner({ + ...this.signerOptions + }); + } + } + return this.sigv4aSigner; + } +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + SignatureV4MultiRegion, + signatureV4CrtContainer +}); + diff --git a/node_modules/@aws-sdk/signature-v4-multi-region/dist-es/SignatureV4MultiRegion.js b/node_modules/@aws-sdk/signature-v4-multi-region/dist-es/SignatureV4MultiRegion.js new file mode 100644 index 00000000..86dff960 --- /dev/null +++ b/node_modules/@aws-sdk/signature-v4-multi-region/dist-es/SignatureV4MultiRegion.js @@ -0,0 +1,103 @@ +import { SignatureV4S3Express } from "@aws-sdk/middleware-sdk-s3"; +import { signatureV4aContainer, } from "@smithy/signature-v4"; +import { signatureV4CrtContainer } from "./signature-v4-crt-container"; +export class SignatureV4MultiRegion { + sigv4aSigner; + sigv4Signer; + signerOptions; + constructor(options) { + this.sigv4Signer = new SignatureV4S3Express(options); + this.signerOptions = options; + } + async sign(requestToSign, options = {}) { + if (options.signingRegion === "*") { + return this.getSigv4aSigner().sign(requestToSign, options); + } + return this.sigv4Signer.sign(requestToSign, options); + } + async signWithCredentials(requestToSign, credentials, options = {}) { + if (options.signingRegion === "*") { + const signer = this.getSigv4aSigner(); + const CrtSignerV4 = signatureV4CrtContainer.CrtSignerV4; + if (CrtSignerV4 && signer instanceof CrtSignerV4) { + return signer.signWithCredentials(requestToSign, credentials, options); + } + else { + throw new Error(`signWithCredentials with signingRegion '*' is only supported when using the CRT dependency @aws-sdk/signature-v4-crt. ` + + `Please check whether you have installed the "@aws-sdk/signature-v4-crt" package explicitly. ` + + `You must also register the package by calling [require("@aws-sdk/signature-v4-crt");] ` + + `or an ESM equivalent such as [import "@aws-sdk/signature-v4-crt";]. ` + + `For more information please go to https://github.com/aws/aws-sdk-js-v3#functionality-requiring-aws-common-runtime-crt`); + } + } + return this.sigv4Signer.signWithCredentials(requestToSign, credentials, options); + } + async presign(originalRequest, options = {}) { + if (options.signingRegion === "*") { + const signer = this.getSigv4aSigner(); + const CrtSignerV4 = signatureV4CrtContainer.CrtSignerV4; + if (CrtSignerV4 && signer instanceof CrtSignerV4) { + return signer.presign(originalRequest, options); + } + else { + throw new Error(`presign with signingRegion '*' is only supported when using the CRT dependency @aws-sdk/signature-v4-crt. ` + + `Please check whether you have installed the "@aws-sdk/signature-v4-crt" package explicitly. ` + + `You must also register the package by calling [require("@aws-sdk/signature-v4-crt");] ` + + `or an ESM equivalent such as [import "@aws-sdk/signature-v4-crt";]. ` + + `For more information please go to https://github.com/aws/aws-sdk-js-v3#functionality-requiring-aws-common-runtime-crt`); + } + } + return this.sigv4Signer.presign(originalRequest, options); + } + async presignWithCredentials(originalRequest, credentials, options = {}) { + if (options.signingRegion === "*") { + throw new Error("Method presignWithCredentials is not supported for [signingRegion=*]."); + } + return this.sigv4Signer.presignWithCredentials(originalRequest, credentials, options); + } + getSigv4aSigner() { + if (!this.sigv4aSigner) { + const CrtSignerV4 = signatureV4CrtContainer.CrtSignerV4; + const JsSigV4aSigner = signatureV4aContainer.SignatureV4a; + if (this.signerOptions.runtime === "node") { + if (!CrtSignerV4 && !JsSigV4aSigner) { + throw new Error("Neither CRT nor JS SigV4a implementation is available. " + + "Please load either @aws-sdk/signature-v4-crt or @aws-sdk/signature-v4a. " + + "For more information please go to " + + "https://github.com/aws/aws-sdk-js-v3#functionality-requiring-aws-common-runtime-crt"); + } + if (CrtSignerV4 && typeof CrtSignerV4 === "function") { + this.sigv4aSigner = new CrtSignerV4({ + ...this.signerOptions, + signingAlgorithm: 1, + }); + } + else if (JsSigV4aSigner && typeof JsSigV4aSigner === "function") { + this.sigv4aSigner = new JsSigV4aSigner({ + ...this.signerOptions, + }); + } + else { + throw new Error("Available SigV4a implementation is not a valid constructor. " + + "Please ensure you've properly imported @aws-sdk/signature-v4-crt or @aws-sdk/signature-v4a." + + "For more information please go to " + + "https://github.com/aws/aws-sdk-js-v3#functionality-requiring-aws-common-runtime-crt"); + } + } + else { + if (!JsSigV4aSigner || typeof JsSigV4aSigner !== "function") { + throw new Error("JS SigV4a implementation is not available or not a valid constructor. " + + "Please check whether you have installed the @aws-sdk/signature-v4a package explicitly. The CRT implementation is not available for browsers. " + + "You must also register the package by calling [require('@aws-sdk/signature-v4a');] " + + "or an ESM equivalent such as [import '@aws-sdk/signature-v4a';]. " + + "For more information please go to " + + "https://github.com/aws/aws-sdk-js-v3#using-javascript-non-crt-implementation-of-sigv4a"); + } + this.sigv4aSigner = new JsSigV4aSigner({ + ...this.signerOptions, + }); + } + } + return this.sigv4aSigner; + } +} diff --git a/node_modules/@aws-sdk/signature-v4-multi-region/dist-es/index.js b/node_modules/@aws-sdk/signature-v4-multi-region/dist-es/index.js new file mode 100644 index 00000000..1e32dd29 --- /dev/null +++ b/node_modules/@aws-sdk/signature-v4-multi-region/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./SignatureV4MultiRegion"; +export * from "./signature-v4-crt-container"; diff --git a/node_modules/@aws-sdk/signature-v4-multi-region/dist-es/signature-v4-crt-container.js b/node_modules/@aws-sdk/signature-v4-multi-region/dist-es/signature-v4-crt-container.js new file mode 100644 index 00000000..c4bcc64c --- /dev/null +++ b/node_modules/@aws-sdk/signature-v4-multi-region/dist-es/signature-v4-crt-container.js @@ -0,0 +1,3 @@ +export const signatureV4CrtContainer = { + CrtSignerV4: null, +}; diff --git a/node_modules/@aws-sdk/signature-v4-multi-region/dist-types/SignatureV4MultiRegion.d.ts b/node_modules/@aws-sdk/signature-v4-multi-region/dist-types/SignatureV4MultiRegion.d.ts new file mode 100644 index 00000000..637deb8e --- /dev/null +++ b/node_modules/@aws-sdk/signature-v4-multi-region/dist-types/SignatureV4MultiRegion.d.ts @@ -0,0 +1,34 @@ +import { SignatureV4CryptoInit, SignatureV4Init } from "@smithy/signature-v4"; +import { AwsCredentialIdentity, HttpRequest, RequestPresigner, RequestPresigningArguments, RequestSigner, RequestSigningArguments } from "@smithy/types"; +/** + * @internal + */ +export type SignatureV4MultiRegionInit = SignatureV4Init & SignatureV4CryptoInit & { + runtime?: string; +}; +/** + * A SigV4-compatible signer for S3 service. In order to support SigV4a algorithm according to the operation input + * dynamically, the signer wraps native module SigV4a signer and JS SigV4 signer. It signs the request with SigV4a + * algorithm if the request needs to be signed with `*` region. Otherwise, it signs the request with normal SigV4 + * signer. + * @internal + */ +export declare class SignatureV4MultiRegion implements RequestPresigner, RequestSigner { + private sigv4aSigner?; + private readonly sigv4Signer; + private readonly signerOptions; + constructor(options: SignatureV4MultiRegionInit); + sign(requestToSign: HttpRequest, options?: RequestSigningArguments): Promise; + /** + * Sign with alternate credentials to the ones provided in the constructor. + * Note: This is only supported for SigV4a when using the CRT implementation. + */ + signWithCredentials(requestToSign: HttpRequest, credentials: AwsCredentialIdentity, options?: RequestSigningArguments): Promise; + /** + * Presign a request. + * Note: This is only supported for SigV4a when using the CRT implementation. + */ + presign(originalRequest: HttpRequest, options?: RequestPresigningArguments): Promise; + presignWithCredentials(originalRequest: HttpRequest, credentials: AwsCredentialIdentity, options?: RequestPresigningArguments): Promise; + private getSigv4aSigner; +} diff --git a/node_modules/@aws-sdk/signature-v4-multi-region/dist-types/index.d.ts b/node_modules/@aws-sdk/signature-v4-multi-region/dist-types/index.d.ts new file mode 100644 index 00000000..1a5cf5f7 --- /dev/null +++ b/node_modules/@aws-sdk/signature-v4-multi-region/dist-types/index.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + */ +export * from "./SignatureV4MultiRegion"; +export * from "./signature-v4-crt-container"; diff --git a/node_modules/@aws-sdk/signature-v4-multi-region/dist-types/signature-v4-crt-container.d.ts b/node_modules/@aws-sdk/signature-v4-multi-region/dist-types/signature-v4-crt-container.d.ts new file mode 100644 index 00000000..853fbd5b --- /dev/null +++ b/node_modules/@aws-sdk/signature-v4-multi-region/dist-types/signature-v4-crt-container.d.ts @@ -0,0 +1,28 @@ +import type { AwsCredentialIdentity } from "@aws-sdk/types"; +import type { HttpRequest, RequestPresigner, RequestSigner, RequestSigningArguments } from "@smithy/types"; +/** + * @public + */ +export type OptionalCrtSignerV4 = { + /** + * This constructor is not typed so as not to require a type import + * from the signature-v4-crt package. + * + * The true type is CrtSignerV4 from \@aws-sdk/signature-v4-crt. + */ + new (options: any): RequestPresigner & RequestSigner & { + signWithCredentials(requestToSign: HttpRequest, credentials: AwsCredentialIdentity, options: RequestSigningArguments): Promise; + }; +}; +/** + * @public + * + * \@aws-sdk/signature-v4-crt will install the constructor in this + * container if it is installed. + * + * This avoids a runtime-require being interpreted statically by bundlers. + * + */ +export declare const signatureV4CrtContainer: { + CrtSignerV4: null | OptionalCrtSignerV4; +}; diff --git a/node_modules/@aws-sdk/signature-v4-multi-region/dist-types/ts3.4/SignatureV4MultiRegion.d.ts b/node_modules/@aws-sdk/signature-v4-multi-region/dist-types/ts3.4/SignatureV4MultiRegion.d.ts new file mode 100644 index 00000000..6eaa80d7 --- /dev/null +++ b/node_modules/@aws-sdk/signature-v4-multi-region/dist-types/ts3.4/SignatureV4MultiRegion.d.ts @@ -0,0 +1,40 @@ +import { SignatureV4CryptoInit, SignatureV4Init } from "@smithy/signature-v4"; +import { + AwsCredentialIdentity, + HttpRequest, + RequestPresigner, + RequestPresigningArguments, + RequestSigner, + RequestSigningArguments, +} from "@smithy/types"; +export type SignatureV4MultiRegionInit = SignatureV4Init & + SignatureV4CryptoInit & { + runtime?: string; + }; +export declare class SignatureV4MultiRegion + implements RequestPresigner, RequestSigner +{ + private sigv4aSigner?; + private readonly sigv4Signer; + private readonly signerOptions; + constructor(options: SignatureV4MultiRegionInit); + sign( + requestToSign: HttpRequest, + options?: RequestSigningArguments + ): Promise; + signWithCredentials( + requestToSign: HttpRequest, + credentials: AwsCredentialIdentity, + options?: RequestSigningArguments + ): Promise; + presign( + originalRequest: HttpRequest, + options?: RequestPresigningArguments + ): Promise; + presignWithCredentials( + originalRequest: HttpRequest, + credentials: AwsCredentialIdentity, + options?: RequestPresigningArguments + ): Promise; + private getSigv4aSigner; +} diff --git a/node_modules/@aws-sdk/signature-v4-multi-region/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/signature-v4-multi-region/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..1e32dd29 --- /dev/null +++ b/node_modules/@aws-sdk/signature-v4-multi-region/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./SignatureV4MultiRegion"; +export * from "./signature-v4-crt-container"; diff --git a/node_modules/@aws-sdk/signature-v4-multi-region/dist-types/ts3.4/signature-v4-crt-container.d.ts b/node_modules/@aws-sdk/signature-v4-multi-region/dist-types/ts3.4/signature-v4-crt-container.d.ts new file mode 100644 index 00000000..c91bec13 --- /dev/null +++ b/node_modules/@aws-sdk/signature-v4-multi-region/dist-types/ts3.4/signature-v4-crt-container.d.ts @@ -0,0 +1,20 @@ +import { AwsCredentialIdentity } from "@aws-sdk/types"; +import { + HttpRequest, + RequestPresigner, + RequestSigner, + RequestSigningArguments, +} from "@smithy/types"; +export type OptionalCrtSignerV4 = { + new (options: any): RequestPresigner & + RequestSigner & { + signWithCredentials( + requestToSign: HttpRequest, + credentials: AwsCredentialIdentity, + options: RequestSigningArguments + ): Promise; + }; +}; +export declare const signatureV4CrtContainer: { + CrtSignerV4: null | OptionalCrtSignerV4; +}; diff --git a/node_modules/@aws-sdk/signature-v4-multi-region/package.json b/node_modules/@aws-sdk/signature-v4-multi-region/package.json new file mode 100644 index 00000000..0fa5a313 --- /dev/null +++ b/node_modules/@aws-sdk/signature-v4-multi-region/package.json @@ -0,0 +1,60 @@ +{ + "name": "@aws-sdk/signature-v4-multi-region", + "version": "3.800.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline signature-v4-multi-region", + "build:es": "tsc -p tsconfig.es.json", + "build:browser": "node ./test-browser/browser-build/esbuild", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:e2e": "yarn g:vitest run -c vitest.config.e2e.ts", + "test:browser": "yarn build:browser && yarn g:vitest run -c vitest.config.browser.ts", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-sdk-s3": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/signature-v4-multi-region", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/signature-v4-multi-region" + } +} diff --git a/node_modules/@aws-sdk/token-providers/LICENSE b/node_modules/@aws-sdk/token-providers/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-sdk/token-providers/README.md b/node_modules/@aws-sdk/token-providers/README.md new file mode 100644 index 00000000..9078019d --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/README.md @@ -0,0 +1,53 @@ +# @aws-sdk/token-providers + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/token-providers/latest.svg)](https://www.npmjs.com/package/@aws-sdk/token-providers) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/token-providers.svg)](https://www.npmjs.com/package/@aws-sdk/token-providers) + +A collection of all token providers. The token providers should be used when the authorization +type is going to be token based. For example, the `bearer` authorization type set using +[httpBearerAuth trait][http-bearer-auth-trait] in Smithy. + +## Static Token Provider + +```ts +import { fromStatic } from "@aws-sdk/token-providers"; + +const token = { token: "TOKEN" }; +const staticTokenProvider = fromStatic(token); + +const staticToken = await staticTokenProvider(); // returns { token: "TOKEN" } +``` + +## SSO Token Provider + +```ts +import { fromSso } from "@aws-sdk/token-providers"; + +// returns token from SSO token cache or ssoOidc.createToken() call. +const ssoToken = await fromSso(); +``` + +## Token Provider Chain + +```ts +import { nodeProvider } from "@aws-sdk/token-providers"; + +// returns token from default providers. +const token = await nodeProvider(); +``` + +[http-bearer-auth-trait]: https://smithy.io/2.0/spec/authentication-traits.html#smithy-api-httpbearerauth-trait + +--- + +### Development + +This package contains a minimal copy of the SSO OIDC client, instead of relying on the full client, which +would cause a circular dependency. + +When regenerating the bundled version of the SSO OIDC client, run the esbuild.js script and then make the following changes: + +- Remove any dependency of the generated client on the credential chain such that it would create + a circular dependency back to this package. Because we only need the `CreateTokenCommand`, the client, and this command's + associated `Exception`s, it is possible to remove auth dependencies. +- Ensure all required packages are declared in the `package.json` of token-providers. diff --git a/node_modules/@aws-sdk/token-providers/dist-cjs/index.js b/node_modules/@aws-sdk/token-providers/dist-cjs/index.js new file mode 100644 index 00000000..51a38dff --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-cjs/index.js @@ -0,0 +1,217 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + fromSso: () => fromSso, + fromStatic: () => fromStatic, + nodeProvider: () => nodeProvider +}); +module.exports = __toCommonJS(index_exports); + +// src/fromSso.ts + + + +// src/constants.ts +var EXPIRE_WINDOW_MS = 5 * 60 * 1e3; +var REFRESH_MESSAGE = `To refresh this SSO session run 'aws sso login' with the corresponding profile.`; + +// src/getSsoOidcClient.ts +var getSsoOidcClient = /* @__PURE__ */ __name(async (ssoRegion, init = {}) => { + const { SSOOIDCClient } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/nested-clients/sso-oidc"))); + const ssoOidcClient = new SSOOIDCClient( + Object.assign({}, init.clientConfig ?? {}, { + region: ssoRegion ?? init.clientConfig?.region, + logger: init.clientConfig?.logger ?? init.parentClientConfig?.logger + }) + ); + return ssoOidcClient; +}, "getSsoOidcClient"); + +// src/getNewSsoOidcToken.ts +var getNewSsoOidcToken = /* @__PURE__ */ __name(async (ssoToken, ssoRegion, init = {}) => { + const { CreateTokenCommand } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/nested-clients/sso-oidc"))); + const ssoOidcClient = await getSsoOidcClient(ssoRegion, init); + return ssoOidcClient.send( + new CreateTokenCommand({ + clientId: ssoToken.clientId, + clientSecret: ssoToken.clientSecret, + refreshToken: ssoToken.refreshToken, + grantType: "refresh_token" + }) + ); +}, "getNewSsoOidcToken"); + +// src/validateTokenExpiry.ts +var import_property_provider = require("@smithy/property-provider"); +var validateTokenExpiry = /* @__PURE__ */ __name((token) => { + if (token.expiration && token.expiration.getTime() < Date.now()) { + throw new import_property_provider.TokenProviderError(`Token is expired. ${REFRESH_MESSAGE}`, false); + } +}, "validateTokenExpiry"); + +// src/validateTokenKey.ts + +var validateTokenKey = /* @__PURE__ */ __name((key, value, forRefresh = false) => { + if (typeof value === "undefined") { + throw new import_property_provider.TokenProviderError( + `Value not present for '${key}' in SSO Token${forRefresh ? ". Cannot refresh" : ""}. ${REFRESH_MESSAGE}`, + false + ); + } +}, "validateTokenKey"); + +// src/writeSSOTokenToFile.ts +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); +var import_fs = require("fs"); +var { writeFile } = import_fs.promises; +var writeSSOTokenToFile = /* @__PURE__ */ __name((id, ssoToken) => { + const tokenFilepath = (0, import_shared_ini_file_loader.getSSOTokenFilepath)(id); + const tokenString = JSON.stringify(ssoToken, null, 2); + return writeFile(tokenFilepath, tokenString); +}, "writeSSOTokenToFile"); + +// src/fromSso.ts +var lastRefreshAttemptTime = /* @__PURE__ */ new Date(0); +var fromSso = /* @__PURE__ */ __name((_init = {}) => async ({ callerClientConfig } = {}) => { + const init = { + ..._init, + parentClientConfig: { + ...callerClientConfig, + ..._init.parentClientConfig + } + }; + init.logger?.debug("@aws-sdk/token-providers - fromSso"); + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + const profileName = (0, import_shared_ini_file_loader.getProfileName)({ + profile: init.profile ?? callerClientConfig?.profile + }); + const profile = profiles[profileName]; + if (!profile) { + throw new import_property_provider.TokenProviderError(`Profile '${profileName}' could not be found in shared credentials file.`, false); + } else if (!profile["sso_session"]) { + throw new import_property_provider.TokenProviderError(`Profile '${profileName}' is missing required property 'sso_session'.`); + } + const ssoSessionName = profile["sso_session"]; + const ssoSessions = await (0, import_shared_ini_file_loader.loadSsoSessionData)(init); + const ssoSession = ssoSessions[ssoSessionName]; + if (!ssoSession) { + throw new import_property_provider.TokenProviderError( + `Sso session '${ssoSessionName}' could not be found in shared credentials file.`, + false + ); + } + for (const ssoSessionRequiredKey of ["sso_start_url", "sso_region"]) { + if (!ssoSession[ssoSessionRequiredKey]) { + throw new import_property_provider.TokenProviderError( + `Sso session '${ssoSessionName}' is missing required property '${ssoSessionRequiredKey}'.`, + false + ); + } + } + const ssoStartUrl = ssoSession["sso_start_url"]; + const ssoRegion = ssoSession["sso_region"]; + let ssoToken; + try { + ssoToken = await (0, import_shared_ini_file_loader.getSSOTokenFromFile)(ssoSessionName); + } catch (e) { + throw new import_property_provider.TokenProviderError( + `The SSO session token associated with profile=${profileName} was not found or is invalid. ${REFRESH_MESSAGE}`, + false + ); + } + validateTokenKey("accessToken", ssoToken.accessToken); + validateTokenKey("expiresAt", ssoToken.expiresAt); + const { accessToken, expiresAt } = ssoToken; + const existingToken = { token: accessToken, expiration: new Date(expiresAt) }; + if (existingToken.expiration.getTime() - Date.now() > EXPIRE_WINDOW_MS) { + return existingToken; + } + if (Date.now() - lastRefreshAttemptTime.getTime() < 30 * 1e3) { + validateTokenExpiry(existingToken); + return existingToken; + } + validateTokenKey("clientId", ssoToken.clientId, true); + validateTokenKey("clientSecret", ssoToken.clientSecret, true); + validateTokenKey("refreshToken", ssoToken.refreshToken, true); + try { + lastRefreshAttemptTime.setTime(Date.now()); + const newSsoOidcToken = await getNewSsoOidcToken(ssoToken, ssoRegion, init); + validateTokenKey("accessToken", newSsoOidcToken.accessToken); + validateTokenKey("expiresIn", newSsoOidcToken.expiresIn); + const newTokenExpiration = new Date(Date.now() + newSsoOidcToken.expiresIn * 1e3); + try { + await writeSSOTokenToFile(ssoSessionName, { + ...ssoToken, + accessToken: newSsoOidcToken.accessToken, + expiresAt: newTokenExpiration.toISOString(), + refreshToken: newSsoOidcToken.refreshToken + }); + } catch (error) { + } + return { + token: newSsoOidcToken.accessToken, + expiration: newTokenExpiration + }; + } catch (error) { + validateTokenExpiry(existingToken); + return existingToken; + } +}, "fromSso"); + +// src/fromStatic.ts + +var fromStatic = /* @__PURE__ */ __name(({ token, logger }) => async () => { + logger?.debug("@aws-sdk/token-providers - fromStatic"); + if (!token || !token.token) { + throw new import_property_provider.TokenProviderError(`Please pass a valid token to fromStatic`, false); + } + return token; +}, "fromStatic"); + +// src/nodeProvider.ts + +var nodeProvider = /* @__PURE__ */ __name((init = {}) => (0, import_property_provider.memoize)( + (0, import_property_provider.chain)(fromSso(init), async () => { + throw new import_property_provider.TokenProviderError("Could not load token from any providers", false); + }), + (token) => token.expiration !== void 0 && token.expiration.getTime() - Date.now() < 3e5, + (token) => token.expiration !== void 0 +), "nodeProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromSso, + fromStatic, + nodeProvider +}); + diff --git a/node_modules/@aws-sdk/token-providers/dist-es/constants.js b/node_modules/@aws-sdk/token-providers/dist-es/constants.js new file mode 100644 index 00000000..b84a1267 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-es/constants.js @@ -0,0 +1,2 @@ +export const EXPIRE_WINDOW_MS = 5 * 60 * 1000; +export const REFRESH_MESSAGE = `To refresh this SSO session run 'aws sso login' with the corresponding profile.`; diff --git a/node_modules/@aws-sdk/token-providers/dist-es/fromSso.js b/node_modules/@aws-sdk/token-providers/dist-es/fromSso.js new file mode 100644 index 00000000..61d20750 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-es/fromSso.js @@ -0,0 +1,88 @@ +import { TokenProviderError } from "@smithy/property-provider"; +import { getProfileName, getSSOTokenFromFile, loadSsoSessionData, parseKnownFiles, } from "@smithy/shared-ini-file-loader"; +import { EXPIRE_WINDOW_MS, REFRESH_MESSAGE } from "./constants"; +import { getNewSsoOidcToken } from "./getNewSsoOidcToken"; +import { validateTokenExpiry } from "./validateTokenExpiry"; +import { validateTokenKey } from "./validateTokenKey"; +import { writeSSOTokenToFile } from "./writeSSOTokenToFile"; +const lastRefreshAttemptTime = new Date(0); +export const fromSso = (_init = {}) => async ({ callerClientConfig } = {}) => { + const init = { + ..._init, + parentClientConfig: { + ...callerClientConfig, + ..._init.parentClientConfig, + }, + }; + init.logger?.debug("@aws-sdk/token-providers - fromSso"); + const profiles = await parseKnownFiles(init); + const profileName = getProfileName({ + profile: init.profile ?? callerClientConfig?.profile, + }); + const profile = profiles[profileName]; + if (!profile) { + throw new TokenProviderError(`Profile '${profileName}' could not be found in shared credentials file.`, false); + } + else if (!profile["sso_session"]) { + throw new TokenProviderError(`Profile '${profileName}' is missing required property 'sso_session'.`); + } + const ssoSessionName = profile["sso_session"]; + const ssoSessions = await loadSsoSessionData(init); + const ssoSession = ssoSessions[ssoSessionName]; + if (!ssoSession) { + throw new TokenProviderError(`Sso session '${ssoSessionName}' could not be found in shared credentials file.`, false); + } + for (const ssoSessionRequiredKey of ["sso_start_url", "sso_region"]) { + if (!ssoSession[ssoSessionRequiredKey]) { + throw new TokenProviderError(`Sso session '${ssoSessionName}' is missing required property '${ssoSessionRequiredKey}'.`, false); + } + } + const ssoStartUrl = ssoSession["sso_start_url"]; + const ssoRegion = ssoSession["sso_region"]; + let ssoToken; + try { + ssoToken = await getSSOTokenFromFile(ssoSessionName); + } + catch (e) { + throw new TokenProviderError(`The SSO session token associated with profile=${profileName} was not found or is invalid. ${REFRESH_MESSAGE}`, false); + } + validateTokenKey("accessToken", ssoToken.accessToken); + validateTokenKey("expiresAt", ssoToken.expiresAt); + const { accessToken, expiresAt } = ssoToken; + const existingToken = { token: accessToken, expiration: new Date(expiresAt) }; + if (existingToken.expiration.getTime() - Date.now() > EXPIRE_WINDOW_MS) { + return existingToken; + } + if (Date.now() - lastRefreshAttemptTime.getTime() < 30 * 1000) { + validateTokenExpiry(existingToken); + return existingToken; + } + validateTokenKey("clientId", ssoToken.clientId, true); + validateTokenKey("clientSecret", ssoToken.clientSecret, true); + validateTokenKey("refreshToken", ssoToken.refreshToken, true); + try { + lastRefreshAttemptTime.setTime(Date.now()); + const newSsoOidcToken = await getNewSsoOidcToken(ssoToken, ssoRegion, init); + validateTokenKey("accessToken", newSsoOidcToken.accessToken); + validateTokenKey("expiresIn", newSsoOidcToken.expiresIn); + const newTokenExpiration = new Date(Date.now() + newSsoOidcToken.expiresIn * 1000); + try { + await writeSSOTokenToFile(ssoSessionName, { + ...ssoToken, + accessToken: newSsoOidcToken.accessToken, + expiresAt: newTokenExpiration.toISOString(), + refreshToken: newSsoOidcToken.refreshToken, + }); + } + catch (error) { + } + return { + token: newSsoOidcToken.accessToken, + expiration: newTokenExpiration, + }; + } + catch (error) { + validateTokenExpiry(existingToken); + return existingToken; + } +}; diff --git a/node_modules/@aws-sdk/token-providers/dist-es/fromStatic.js b/node_modules/@aws-sdk/token-providers/dist-es/fromStatic.js new file mode 100644 index 00000000..0704ae08 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-es/fromStatic.js @@ -0,0 +1,8 @@ +import { TokenProviderError } from "@smithy/property-provider"; +export const fromStatic = ({ token, logger }) => async () => { + logger?.debug("@aws-sdk/token-providers - fromStatic"); + if (!token || !token.token) { + throw new TokenProviderError(`Please pass a valid token to fromStatic`, false); + } + return token; +}; diff --git a/node_modules/@aws-sdk/token-providers/dist-es/getNewSsoOidcToken.js b/node_modules/@aws-sdk/token-providers/dist-es/getNewSsoOidcToken.js new file mode 100644 index 00000000..00f7b2c0 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-es/getNewSsoOidcToken.js @@ -0,0 +1,11 @@ +import { getSsoOidcClient } from "./getSsoOidcClient"; +export const getNewSsoOidcToken = async (ssoToken, ssoRegion, init = {}) => { + const { CreateTokenCommand } = await import("@aws-sdk/nested-clients/sso-oidc"); + const ssoOidcClient = await getSsoOidcClient(ssoRegion, init); + return ssoOidcClient.send(new CreateTokenCommand({ + clientId: ssoToken.clientId, + clientSecret: ssoToken.clientSecret, + refreshToken: ssoToken.refreshToken, + grantType: "refresh_token", + })); +}; diff --git a/node_modules/@aws-sdk/token-providers/dist-es/getSsoOidcClient.js b/node_modules/@aws-sdk/token-providers/dist-es/getSsoOidcClient.js new file mode 100644 index 00000000..689be721 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-es/getSsoOidcClient.js @@ -0,0 +1,8 @@ +export const getSsoOidcClient = async (ssoRegion, init = {}) => { + const { SSOOIDCClient } = await import("@aws-sdk/nested-clients/sso-oidc"); + const ssoOidcClient = new SSOOIDCClient(Object.assign({}, init.clientConfig ?? {}, { + region: ssoRegion ?? init.clientConfig?.region, + logger: init.clientConfig?.logger ?? init.parentClientConfig?.logger, + })); + return ssoOidcClient; +}; diff --git a/node_modules/@aws-sdk/token-providers/dist-es/index.js b/node_modules/@aws-sdk/token-providers/dist-es/index.js new file mode 100644 index 00000000..a0b176b4 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./fromSso"; +export * from "./fromStatic"; +export * from "./nodeProvider"; diff --git a/node_modules/@aws-sdk/token-providers/dist-es/nodeProvider.js b/node_modules/@aws-sdk/token-providers/dist-es/nodeProvider.js new file mode 100644 index 00000000..a0c7b520 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-es/nodeProvider.js @@ -0,0 +1,5 @@ +import { chain, memoize, TokenProviderError } from "@smithy/property-provider"; +import { fromSso } from "./fromSso"; +export const nodeProvider = (init = {}) => memoize(chain(fromSso(init), async () => { + throw new TokenProviderError("Could not load token from any providers", false); +}), (token) => token.expiration !== undefined && token.expiration.getTime() - Date.now() < 300000, (token) => token.expiration !== undefined); diff --git a/node_modules/@aws-sdk/token-providers/dist-es/validateTokenExpiry.js b/node_modules/@aws-sdk/token-providers/dist-es/validateTokenExpiry.js new file mode 100644 index 00000000..8118d7c7 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-es/validateTokenExpiry.js @@ -0,0 +1,7 @@ +import { TokenProviderError } from "@smithy/property-provider"; +import { REFRESH_MESSAGE } from "./constants"; +export const validateTokenExpiry = (token) => { + if (token.expiration && token.expiration.getTime() < Date.now()) { + throw new TokenProviderError(`Token is expired. ${REFRESH_MESSAGE}`, false); + } +}; diff --git a/node_modules/@aws-sdk/token-providers/dist-es/validateTokenKey.js b/node_modules/@aws-sdk/token-providers/dist-es/validateTokenKey.js new file mode 100644 index 00000000..49796380 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-es/validateTokenKey.js @@ -0,0 +1,7 @@ +import { TokenProviderError } from "@smithy/property-provider"; +import { REFRESH_MESSAGE } from "./constants"; +export const validateTokenKey = (key, value, forRefresh = false) => { + if (typeof value === "undefined") { + throw new TokenProviderError(`Value not present for '${key}' in SSO Token${forRefresh ? ". Cannot refresh" : ""}. ${REFRESH_MESSAGE}`, false); + } +}; diff --git a/node_modules/@aws-sdk/token-providers/dist-es/writeSSOTokenToFile.js b/node_modules/@aws-sdk/token-providers/dist-es/writeSSOTokenToFile.js new file mode 100644 index 00000000..6da2c9b5 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-es/writeSSOTokenToFile.js @@ -0,0 +1,8 @@ +import { getSSOTokenFilepath } from "@smithy/shared-ini-file-loader"; +import { promises as fsPromises } from "fs"; +const { writeFile } = fsPromises; +export const writeSSOTokenToFile = (id, ssoToken) => { + const tokenFilepath = getSSOTokenFilepath(id); + const tokenString = JSON.stringify(ssoToken, null, 2); + return writeFile(tokenFilepath, tokenString); +}; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/constants.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/constants.d.ts new file mode 100644 index 00000000..de28cde9 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/constants.d.ts @@ -0,0 +1,8 @@ +/** + * The time window (5 mins) that SDK will treat the SSO token expires in before the defined expiration date in token. + * This is needed because server side may have invalidated the token before the defined expiration date. + * + * @internal + */ +export declare const EXPIRE_WINDOW_MS: number; +export declare const REFRESH_MESSAGE = "To refresh this SSO session run 'aws sso login' with the corresponding profile."; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/fromSso.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/fromSso.d.ts new file mode 100644 index 00000000..03f5359a --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/fromSso.d.ts @@ -0,0 +1,12 @@ +import { CredentialProviderOptions, RuntimeConfigIdentityProvider, TokenIdentity } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +export interface FromSsoInit extends SourceProfileInit, CredentialProviderOptions { + /** + * @see SSOOIDCClientConfig in \@aws-sdk/client-sso-oidc. + */ + clientConfig?: any; +} +/** + * Creates a token provider that will read from SSO token cache or ssoOidc.createToken() call. + */ +export declare const fromSso: (_init?: FromSsoInit) => RuntimeConfigIdentityProvider; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/fromStatic.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/fromStatic.d.ts new file mode 100644 index 00000000..d4961724 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/fromStatic.d.ts @@ -0,0 +1,9 @@ +import { CredentialProviderOptions, TokenIdentity, TokenIdentityProvider } from "@aws-sdk/types"; +export interface FromStaticInit extends CredentialProviderOptions { + token?: TokenIdentity; +} +/** + * Creates a token provider that will read from static token. + * @public + */ +export declare const fromStatic: ({ token, logger }: FromStaticInit) => TokenIdentityProvider; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/getNewSsoOidcToken.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/getNewSsoOidcToken.d.ts new file mode 100644 index 00000000..75c63225 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/getNewSsoOidcToken.d.ts @@ -0,0 +1,8 @@ +/// +import { SSOToken } from "@smithy/shared-ini-file-loader"; +import { FromSsoInit } from "./fromSso"; +/** + * Returns a new SSO OIDC token from ssoOids.createToken() API call. + * @internal + */ +export declare const getNewSsoOidcToken: (ssoToken: SSOToken, ssoRegion: string, init?: FromSsoInit) => Promise; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/getSsoOidcClient.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/getSsoOidcClient.d.ts new file mode 100644 index 00000000..5c9dcb48 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/getSsoOidcClient.d.ts @@ -0,0 +1,7 @@ +/// +import { FromSsoInit } from "./fromSso"; +/** + * Returns a SSOOIDC client for the given region. + * @internal + */ +export declare const getSsoOidcClient: (ssoRegion: string, init?: FromSsoInit) => Promise; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/index.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/index.d.ts new file mode 100644 index 00000000..a0b176b4 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromSso"; +export * from "./fromStatic"; +export * from "./nodeProvider"; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/nodeProvider.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/nodeProvider.d.ts new file mode 100644 index 00000000..e4846ec5 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/nodeProvider.d.ts @@ -0,0 +1,18 @@ +import { TokenIdentityProvider } from "@aws-sdk/types"; +import { FromSsoInit } from "./fromSso"; +/** + * Creates a token provider that will attempt to find token from the + * following sources (listed in order of precedence): + * * SSO token from SSO cache or ssoOidc.createToken() call + * + * The default token provider is designed to invoke one provider at a time and only + * continue to the next if no token has been located. It currently has only SSO + * Token Provider in the chain. + * + * @param init Configuration that is passed to each individual + * provider + * + * @see fromSso The function used to source credentials from + * SSO cache or ssoOidc.createToken() call + */ +export declare const nodeProvider: (init?: FromSsoInit) => TokenIdentityProvider; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/constants.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/constants.d.ts new file mode 100644 index 00000000..d7e75772 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,3 @@ +export declare const EXPIRE_WINDOW_MS: number; +export declare const REFRESH_MESSAGE = + "To refresh this SSO session run 'aws sso login' with the corresponding profile."; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromSso.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromSso.d.ts new file mode 100644 index 00000000..3b5bb602 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromSso.d.ts @@ -0,0 +1,14 @@ +import { + CredentialProviderOptions, + RuntimeConfigIdentityProvider, + TokenIdentity, +} from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +export interface FromSsoInit + extends SourceProfileInit, + CredentialProviderOptions { + clientConfig?: any; +} +export declare const fromSso: ( + _init?: FromSsoInit +) => RuntimeConfigIdentityProvider; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromStatic.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromStatic.d.ts new file mode 100644 index 00000000..e6800123 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromStatic.d.ts @@ -0,0 +1,12 @@ +import { + CredentialProviderOptions, + TokenIdentity, + TokenIdentityProvider, +} from "@aws-sdk/types"; +export interface FromStaticInit extends CredentialProviderOptions { + token?: TokenIdentity; +} +export declare const fromStatic: ({ + token, + logger, +}: FromStaticInit) => TokenIdentityProvider; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getNewSsoOidcToken.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getNewSsoOidcToken.d.ts new file mode 100644 index 00000000..6bcd71dd --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getNewSsoOidcToken.d.ts @@ -0,0 +1,9 @@ +import { SSOToken } from "@smithy/shared-ini-file-loader"; +import { FromSsoInit } from "./fromSso"; +export declare const getNewSsoOidcToken: ( + ssoToken: SSOToken, + ssoRegion: string, + init?: FromSsoInit +) => Promise< + import("@aws-sdk/nested-clients/sso-oidc").CreateTokenCommandOutput +>; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getSsoOidcClient.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getSsoOidcClient.d.ts new file mode 100644 index 00000000..c07dc691 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getSsoOidcClient.d.ts @@ -0,0 +1,5 @@ +import { FromSsoInit } from "./fromSso"; +export declare const getSsoOidcClient: ( + ssoRegion: string, + init?: FromSsoInit +) => Promise; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..a0b176b4 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromSso"; +export * from "./fromStatic"; +export * from "./nodeProvider"; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/nodeProvider.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/nodeProvider.d.ts new file mode 100644 index 00000000..11a9bd43 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/nodeProvider.d.ts @@ -0,0 +1,5 @@ +import { TokenIdentityProvider } from "@aws-sdk/types"; +import { FromSsoInit } from "./fromSso"; +export declare const nodeProvider: ( + init?: FromSsoInit +) => TokenIdentityProvider; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenExpiry.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenExpiry.d.ts new file mode 100644 index 00000000..90036052 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenExpiry.d.ts @@ -0,0 +1,2 @@ +import { TokenIdentity } from "@aws-sdk/types"; +export declare const validateTokenExpiry: (token: TokenIdentity) => void; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenKey.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenKey.d.ts new file mode 100644 index 00000000..105b2b4f --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenKey.d.ts @@ -0,0 +1,5 @@ +export declare const validateTokenKey: ( + key: string, + value: unknown, + forRefresh?: boolean +) => void; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/writeSSOTokenToFile.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/writeSSOTokenToFile.d.ts new file mode 100644 index 00000000..a6d025f3 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/writeSSOTokenToFile.d.ts @@ -0,0 +1,5 @@ +import { SSOToken } from "@smithy/shared-ini-file-loader"; +export declare const writeSSOTokenToFile: ( + id: string, + ssoToken: SSOToken +) => Promise; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/validateTokenExpiry.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/validateTokenExpiry.d.ts new file mode 100644 index 00000000..1253784a --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/validateTokenExpiry.d.ts @@ -0,0 +1,5 @@ +import { TokenIdentity } from "@aws-sdk/types"; +/** + * Throws TokenProviderError is token is expired. + */ +export declare const validateTokenExpiry: (token: TokenIdentity) => void; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/validateTokenKey.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/validateTokenKey.d.ts new file mode 100644 index 00000000..a9618fd8 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/validateTokenKey.d.ts @@ -0,0 +1,4 @@ +/** + * Throws TokenProviderError if value is undefined for key. + */ +export declare const validateTokenKey: (key: string, value: unknown, forRefresh?: boolean) => void; diff --git a/node_modules/@aws-sdk/token-providers/dist-types/writeSSOTokenToFile.d.ts b/node_modules/@aws-sdk/token-providers/dist-types/writeSSOTokenToFile.d.ts new file mode 100644 index 00000000..a1e17e88 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/dist-types/writeSSOTokenToFile.d.ts @@ -0,0 +1,5 @@ +import { SSOToken } from "@smithy/shared-ini-file-loader"; +/** + * Writes SSO token to file based on filepath computed from ssoStartUrl or session name. + */ +export declare const writeSSOTokenToFile: (id: string, ssoToken: SSOToken) => Promise; diff --git a/node_modules/@aws-sdk/token-providers/package.json b/node_modules/@aws-sdk/token-providers/package.json new file mode 100644 index 00000000..930187e2 --- /dev/null +++ b/node_modules/@aws-sdk/token-providers/package.json @@ -0,0 +1,67 @@ +{ + "name": "@aws-sdk/token-providers", + "version": "3.799.0", + "description": "A collection of token providers", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "sideEffects": false, + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline token-providers", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "token" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/nested-clients": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": {}, + "react-native": {}, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/token-providers", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/token-providers" + } +} diff --git a/node_modules/@aws-sdk/types/LICENSE b/node_modules/@aws-sdk/types/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-sdk/types/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-sdk/types/README.md b/node_modules/@aws-sdk/types/README.md new file mode 100644 index 00000000..a5658db8 --- /dev/null +++ b/node_modules/@aws-sdk/types/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/types + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/types/latest.svg)](https://www.npmjs.com/package/@aws-sdk/types) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/types.svg)](https://www.npmjs.com/package/@aws-sdk/types) diff --git a/node_modules/@aws-sdk/types/dist-cjs/index.js b/node_modules/@aws-sdk/types/dist-cjs/index.js new file mode 100644 index 00000000..8114db0e --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-cjs/index.js @@ -0,0 +1,294 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + AbortController: () => import_types.AbortController, + AbortHandler: () => import_types.AbortHandler, + AbortSignal: () => import_types.AbortSignal, + AbsoluteLocation: () => import_types.AbsoluteLocation, + AuthScheme: () => import_types.AuthScheme, + AvailableMessage: () => import_types.AvailableMessage, + AvailableMessages: () => import_types.AvailableMessages, + AwsCredentialIdentity: () => import_types.AwsCredentialIdentity, + AwsCredentialIdentityProvider: () => import_types.AwsCredentialIdentityProvider, + BinaryHeaderValue: () => import_types.BinaryHeaderValue, + BlobTypes: () => import_types.BlobTypes, + BodyLengthCalculator: () => import_types.BodyLengthCalculator, + BooleanHeaderValue: () => import_types.BooleanHeaderValue, + BuildHandler: () => import_types.BuildHandler, + BuildHandlerArguments: () => import_types.BuildHandlerArguments, + BuildHandlerOptions: () => import_types.BuildHandlerOptions, + BuildHandlerOutput: () => import_types.BuildHandlerOutput, + BuildMiddleware: () => import_types.BuildMiddleware, + ByteHeaderValue: () => import_types.ByteHeaderValue, + Checksum: () => import_types.Checksum, + ChecksumConstructor: () => import_types.ChecksumConstructor, + Client: () => import_types.Client, + Command: () => import_types.Command, + ConnectConfiguration: () => import_types.ConnectConfiguration, + ConnectionManager: () => import_types.ConnectionManager, + ConnectionManagerConfiguration: () => import_types.ConnectionManagerConfiguration, + ConnectionPool: () => import_types.ConnectionPool, + DateInput: () => import_types.DateInput, + Decoder: () => import_types.Decoder, + DeserializeHandler: () => import_types.DeserializeHandler, + DeserializeHandlerArguments: () => import_types.DeserializeHandlerArguments, + DeserializeHandlerOptions: () => import_types.DeserializeHandlerOptions, + DeserializeHandlerOutput: () => import_types.DeserializeHandlerOutput, + DeserializeMiddleware: () => import_types.DeserializeMiddleware, + DocumentType: () => import_types.DocumentType, + Encoder: () => import_types.Encoder, + Endpoint: () => import_types.Endpoint, + EndpointARN: () => import_types.EndpointARN, + EndpointBearer: () => import_types.EndpointBearer, + EndpointObjectProperty: () => import_types.EndpointObjectProperty, + EndpointParameters: () => import_types.EndpointParameters, + EndpointPartition: () => import_types.EndpointPartition, + EndpointURL: () => import_types.EndpointURL, + EndpointURLScheme: () => import_types.EndpointURLScheme, + EndpointV2: () => import_types.EndpointV2, + EventSigner: () => import_types.EventSigner, + EventSigningArguments: () => import_types.EventSigningArguments, + EventStreamMarshaller: () => import_types.EventStreamMarshaller, + EventStreamMarshallerDeserFn: () => import_types.EventStreamMarshallerDeserFn, + EventStreamMarshallerSerFn: () => import_types.EventStreamMarshallerSerFn, + EventStreamPayloadHandler: () => import_types.EventStreamPayloadHandler, + EventStreamPayloadHandlerProvider: () => import_types.EventStreamPayloadHandlerProvider, + EventStreamRequestSigner: () => import_types.EventStreamRequestSigner, + EventStreamSerdeContext: () => import_types.EventStreamSerdeContext, + EventStreamSerdeProvider: () => import_types.EventStreamSerdeProvider, + EventStreamSignerProvider: () => import_types.EventStreamSignerProvider, + ExponentialBackoffJitterType: () => import_types.ExponentialBackoffJitterType, + ExponentialBackoffStrategyOptions: () => import_types.ExponentialBackoffStrategyOptions, + FinalizeHandler: () => import_types.FinalizeHandler, + FinalizeHandlerArguments: () => import_types.FinalizeHandlerArguments, + FinalizeHandlerOutput: () => import_types.FinalizeHandlerOutput, + FinalizeRequestHandlerOptions: () => import_types.FinalizeRequestHandlerOptions, + FinalizeRequestMiddleware: () => import_types.FinalizeRequestMiddleware, + FormattedEvent: () => import_types.FormattedEvent, + GetAwsChunkedEncodingStream: () => import_types.GetAwsChunkedEncodingStream, + GetAwsChunkedEncodingStreamOptions: () => import_types.GetAwsChunkedEncodingStreamOptions, + Handler: () => import_types.Handler, + HandlerExecutionContext: () => import_types.HandlerExecutionContext, + HandlerOptions: () => import_types.HandlerOptions, + Hash: () => import_types.Hash, + HashConstructor: () => import_types.HashConstructor, + HeaderBag: () => import_types.HeaderBag, + HostAddressType: () => HostAddressType, + HttpAuthDefinition: () => import_types.HttpAuthDefinition, + HttpAuthLocation: () => import_types.HttpAuthLocation, + HttpHandlerOptions: () => import_types.HttpHandlerOptions, + HttpMessage: () => import_types.HttpMessage, + HttpRequest: () => import_types.HttpRequest, + HttpResponse: () => import_types.HttpResponse, + Identity: () => import_types.Identity, + IniSection: () => import_types.IniSection, + InitializeHandler: () => import_types.InitializeHandler, + InitializeHandlerArguments: () => import_types.InitializeHandlerArguments, + InitializeHandlerOptions: () => import_types.InitializeHandlerOptions, + InitializeHandlerOutput: () => import_types.InitializeHandlerOutput, + InitializeMiddleware: () => import_types.InitializeMiddleware, + Int64: () => import_types.Int64, + IntegerHeaderValue: () => import_types.IntegerHeaderValue, + LongHeaderValue: () => import_types.LongHeaderValue, + MemoizedProvider: () => import_types.MemoizedProvider, + Message: () => import_types.Message, + MessageDecoder: () => import_types.MessageDecoder, + MessageEncoder: () => import_types.MessageEncoder, + MessageHeaderValue: () => import_types.MessageHeaderValue, + MessageHeaders: () => import_types.MessageHeaders, + MessageSigner: () => import_types.MessageSigner, + MetadataBearer: () => import_types.MetadataBearer, + MiddlewareStack: () => import_types.MiddlewareStack, + MiddlewareType: () => import_types.MiddlewareType, + PaginationConfiguration: () => import_types.PaginationConfiguration, + Paginator: () => import_types.Paginator, + ParsedIniData: () => import_types.ParsedIniData, + Pluggable: () => import_types.Pluggable, + Priority: () => import_types.Priority, + Profile: () => import_types.Profile, + Provider: () => import_types.Provider, + QueryParameterBag: () => import_types.QueryParameterBag, + RegionInfo: () => import_types.RegionInfo, + RegionInfoProvider: () => import_types.RegionInfoProvider, + RegionInfoProviderOptions: () => import_types.RegionInfoProviderOptions, + Relation: () => import_types.Relation, + RelativeLocation: () => import_types.RelativeLocation, + RelativeMiddlewareOptions: () => import_types.RelativeMiddlewareOptions, + RequestContext: () => import_types.RequestContext, + RequestHandler: () => import_types.RequestHandler, + RequestHandlerMetadata: () => import_types.RequestHandlerMetadata, + RequestHandlerOutput: () => import_types.RequestHandlerOutput, + RequestHandlerProtocol: () => import_types.RequestHandlerProtocol, + RequestPresigner: () => import_types.RequestPresigner, + RequestPresigningArguments: () => import_types.RequestPresigningArguments, + RequestSerializer: () => import_types.RequestSerializer, + RequestSigner: () => import_types.RequestSigner, + RequestSigningArguments: () => import_types.RequestSigningArguments, + ResponseDeserializer: () => import_types.ResponseDeserializer, + ResponseMetadata: () => import_types.ResponseMetadata, + RetryBackoffStrategy: () => import_types.RetryBackoffStrategy, + RetryErrorInfo: () => import_types.RetryErrorInfo, + RetryErrorType: () => import_types.RetryErrorType, + RetryStrategy: () => import_types.RetryStrategy, + RetryStrategyOptions: () => import_types.RetryStrategyOptions, + RetryStrategyV2: () => import_types.RetryStrategyV2, + RetryToken: () => import_types.RetryToken, + RetryableTrait: () => import_types.RetryableTrait, + SdkError: () => import_types.SdkError, + SdkStream: () => import_types.SdkStream, + SdkStreamMixin: () => import_types.SdkStreamMixin, + SdkStreamMixinInjector: () => import_types.SdkStreamMixinInjector, + SdkStreamSerdeContext: () => import_types.SdkStreamSerdeContext, + SerdeContext: () => import_types.SerdeContext, + SerializeHandler: () => import_types.SerializeHandler, + SerializeHandlerArguments: () => import_types.SerializeHandlerArguments, + SerializeHandlerOptions: () => import_types.SerializeHandlerOptions, + SerializeHandlerOutput: () => import_types.SerializeHandlerOutput, + SerializeMiddleware: () => import_types.SerializeMiddleware, + SharedConfigFiles: () => import_types.SharedConfigFiles, + ShortHeaderValue: () => import_types.ShortHeaderValue, + SignableMessage: () => import_types.SignableMessage, + SignedMessage: () => import_types.SignedMessage, + SigningArguments: () => import_types.SigningArguments, + SmithyException: () => import_types.SmithyException, + SourceData: () => import_types.SourceData, + StandardRetryBackoffStrategy: () => import_types.StandardRetryBackoffStrategy, + StandardRetryToken: () => import_types.StandardRetryToken, + Step: () => import_types.Step, + StreamCollector: () => import_types.StreamCollector, + StreamHasher: () => import_types.StreamHasher, + StringHeaderValue: () => import_types.StringHeaderValue, + StringSigner: () => import_types.StringSigner, + Terminalware: () => import_types.Terminalware, + TimestampHeaderValue: () => import_types.TimestampHeaderValue, + TokenIdentity: () => import_types.TokenIdentity, + TokenIdentityProvider: () => import_types.TokenIdentityProvider, + URI: () => import_types.URI, + UrlParser: () => import_types.UrlParser, + UserAgent: () => import_types.UserAgent, + UserAgentPair: () => import_types.UserAgentPair, + UuidHeaderValue: () => import_types.UuidHeaderValue, + WaiterConfiguration: () => import_types.WaiterConfiguration, + WithSdkStreamMixin: () => import_types.WithSdkStreamMixin, + randomValues: () => import_types.randomValues +}); +module.exports = __toCommonJS(index_exports); + +// src/abort.ts +var import_types = require("@smithy/types"); + +// src/auth.ts + + +// src/blob/blob-types.ts + + +// src/checksum.ts + + +// src/client.ts + + +// src/command.ts + + +// src/connection.ts + + +// src/crypto.ts + + +// src/dns.ts +var HostAddressType = /* @__PURE__ */ ((HostAddressType2) => { + HostAddressType2["AAAA"] = "AAAA"; + HostAddressType2["A"] = "A"; + return HostAddressType2; +})(HostAddressType || {}); + +// src/encode.ts + + +// src/endpoint.ts + + +// src/eventStream.ts + + +// src/http.ts + + +// src/identity/AwsCredentialIdentity.ts + + +// src/identity/Identity.ts + + +// src/identity/TokenIdentity.ts + + +// src/middleware.ts + + +// src/pagination.ts + + +// src/profile.ts + + +// src/response.ts + + +// src/retry.ts + + +// src/serde.ts + + +// src/shapes.ts + + +// src/signature.ts + + +// src/stream.ts + + +// src/transfer.ts + + +// src/uri.ts + + +// src/util.ts + + +// src/waiter.ts + +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + HttpAuthLocation, + HostAddressType, + EndpointURLScheme, + RequestHandlerProtocol +}); + diff --git a/node_modules/@aws-sdk/types/dist-es/abort.js b/node_modules/@aws-sdk/types/dist-es/abort.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/abort.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/auth.js b/node_modules/@aws-sdk/types/dist-es/auth.js new file mode 100644 index 00000000..81f903b2 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/auth.js @@ -0,0 +1 @@ +export { HttpAuthLocation } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-es/blob/blob-types.js b/node_modules/@aws-sdk/types/dist-es/blob/blob-types.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/blob/blob-types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/checksum.js b/node_modules/@aws-sdk/types/dist-es/checksum.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/checksum.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/client.js b/node_modules/@aws-sdk/types/dist-es/client.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/client.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/command.js b/node_modules/@aws-sdk/types/dist-es/command.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/command.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/connection.js b/node_modules/@aws-sdk/types/dist-es/connection.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/connection.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/credentials.js b/node_modules/@aws-sdk/types/dist-es/credentials.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/credentials.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/crypto.js b/node_modules/@aws-sdk/types/dist-es/crypto.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/crypto.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/dns.js b/node_modules/@aws-sdk/types/dist-es/dns.js new file mode 100644 index 00000000..c6a2cd96 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/dns.js @@ -0,0 +1,5 @@ +export var HostAddressType; +(function (HostAddressType) { + HostAddressType["AAAA"] = "AAAA"; + HostAddressType["A"] = "A"; +})(HostAddressType || (HostAddressType = {})); diff --git a/node_modules/@aws-sdk/types/dist-es/encode.js b/node_modules/@aws-sdk/types/dist-es/encode.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/encode.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/endpoint.js b/node_modules/@aws-sdk/types/dist-es/endpoint.js new file mode 100644 index 00000000..ec53acc8 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/endpoint.js @@ -0,0 +1 @@ +export { EndpointURLScheme, } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-es/eventStream.js b/node_modules/@aws-sdk/types/dist-es/eventStream.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/eventStream.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/extensions/index.js b/node_modules/@aws-sdk/types/dist-es/extensions/index.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/extensions/index.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/feature-ids.js b/node_modules/@aws-sdk/types/dist-es/feature-ids.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/feature-ids.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/function.js b/node_modules/@aws-sdk/types/dist-es/function.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/function.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/http.js b/node_modules/@aws-sdk/types/dist-es/http.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/http.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/identity/AnonymousIdentity.js b/node_modules/@aws-sdk/types/dist-es/identity/AnonymousIdentity.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/identity/AnonymousIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/identity/AwsCredentialIdentity.js b/node_modules/@aws-sdk/types/dist-es/identity/AwsCredentialIdentity.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/identity/AwsCredentialIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/identity/Identity.js b/node_modules/@aws-sdk/types/dist-es/identity/Identity.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/identity/Identity.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/identity/LoginIdentity.js b/node_modules/@aws-sdk/types/dist-es/identity/LoginIdentity.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/identity/LoginIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/identity/TokenIdentity.js b/node_modules/@aws-sdk/types/dist-es/identity/TokenIdentity.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/identity/TokenIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/identity/index.js b/node_modules/@aws-sdk/types/dist-es/identity/index.js new file mode 100644 index 00000000..863e78e8 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/identity/index.js @@ -0,0 +1,5 @@ +export * from "./AnonymousIdentity"; +export * from "./AwsCredentialIdentity"; +export * from "./Identity"; +export * from "./LoginIdentity"; +export * from "./TokenIdentity"; diff --git a/node_modules/@aws-sdk/types/dist-es/index.js b/node_modules/@aws-sdk/types/dist-es/index.js new file mode 100644 index 00000000..a7f99d93 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/index.js @@ -0,0 +1,34 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./credentials"; +export * from "./crypto"; +export * from "./dns"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./function"; +export * from "./http"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./request"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./token"; +export * from "./transfer"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/node_modules/@aws-sdk/types/dist-es/logger.js b/node_modules/@aws-sdk/types/dist-es/logger.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/logger.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/middleware.js b/node_modules/@aws-sdk/types/dist-es/middleware.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/middleware.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/pagination.js b/node_modules/@aws-sdk/types/dist-es/pagination.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/pagination.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/profile.js b/node_modules/@aws-sdk/types/dist-es/profile.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/profile.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/request.js b/node_modules/@aws-sdk/types/dist-es/request.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/request.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/response.js b/node_modules/@aws-sdk/types/dist-es/response.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/response.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/retry.js b/node_modules/@aws-sdk/types/dist-es/retry.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/retry.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/serde.js b/node_modules/@aws-sdk/types/dist-es/serde.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/serde.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/shapes.js b/node_modules/@aws-sdk/types/dist-es/shapes.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/shapes.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/signature.js b/node_modules/@aws-sdk/types/dist-es/signature.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/signature.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/stream.js b/node_modules/@aws-sdk/types/dist-es/stream.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/stream.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/token.js b/node_modules/@aws-sdk/types/dist-es/token.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/token.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/transfer.js b/node_modules/@aws-sdk/types/dist-es/transfer.js new file mode 100644 index 00000000..ba575896 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/transfer.js @@ -0,0 +1 @@ +export { RequestHandlerProtocol, } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-es/uri.js b/node_modules/@aws-sdk/types/dist-es/uri.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/uri.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/util.js b/node_modules/@aws-sdk/types/dist-es/util.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/util.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-es/waiter.js b/node_modules/@aws-sdk/types/dist-es/waiter.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-es/waiter.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/types/dist-types/abort.d.ts b/node_modules/@aws-sdk/types/dist-types/abort.d.ts new file mode 100644 index 00000000..dad6079f --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/abort.d.ts @@ -0,0 +1 @@ +export { AbortController, AbortHandler, AbortSignal } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/auth.d.ts b/node_modules/@aws-sdk/types/dist-types/auth.d.ts new file mode 100644 index 00000000..6626c16d --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/auth.d.ts @@ -0,0 +1 @@ +export { AuthScheme, HttpAuthDefinition, HttpAuthLocation } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/blob/blob-types.d.ts b/node_modules/@aws-sdk/types/dist-types/blob/blob-types.d.ts new file mode 100644 index 00000000..fedb3d52 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/blob/blob-types.d.ts @@ -0,0 +1,2 @@ +import { BlobTypes } from '@smithy/types'; +export { BlobTypes }; diff --git a/node_modules/@aws-sdk/types/dist-types/checksum.d.ts b/node_modules/@aws-sdk/types/dist-types/checksum.d.ts new file mode 100644 index 00000000..f805d729 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/checksum.d.ts @@ -0,0 +1 @@ +export { Checksum, ChecksumConstructor } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/client.d.ts b/node_modules/@aws-sdk/types/dist-types/client.d.ts new file mode 100644 index 00000000..d6b3dcf9 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/client.d.ts @@ -0,0 +1 @@ +export { Client } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/command.d.ts b/node_modules/@aws-sdk/types/dist-types/command.d.ts new file mode 100644 index 00000000..38872670 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/command.d.ts @@ -0,0 +1 @@ +export { Command } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/connection.d.ts b/node_modules/@aws-sdk/types/dist-types/connection.d.ts new file mode 100644 index 00000000..efcb4d77 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/connection.d.ts @@ -0,0 +1 @@ +export { ConnectConfiguration, ConnectionManager, ConnectionManagerConfiguration, ConnectionPool } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/credentials.d.ts b/node_modules/@aws-sdk/types/dist-types/credentials.d.ts new file mode 100644 index 00000000..181bf8b4 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/credentials.d.ts @@ -0,0 +1,50 @@ +import { Logger } from "@smithy/types"; +import { AwsCredentialIdentity } from "./identity"; +import { Provider } from "./util"; +/** + * @public + * + * An object representing temporary or permanent AWS credentials. + * + * @deprecated Use {@link AwsCredentialIdentity} + */ +export interface Credentials extends AwsCredentialIdentity { +} +/** + * @public + * + * @deprecated Use {@link AwsCredentialIdentityProvider} + */ +export type CredentialProvider = Provider; +/** + * @public + * + * Common options for credential providers. + */ +export type CredentialProviderOptions = { + /** + * This logger is only used to provide information + * on what credential providers were used during resolution. + * + * It does not log credentials. + */ + logger?: Logger; + /** + * Present if the credential provider was created by calling + * the defaultCredentialProvider in a client's middleware, having + * access to the client's config. + * + * The region of that parent or outer client is important because + * an inner client used by the credential provider may need + * to match its default partition or region with that of + * the outer client. + * + * @internal + * @deprecated - not truly deprecated, marked as a warning to not use this. + */ + parentClientConfig?: { + region?: string | Provider; + profile?: string; + [key: string]: unknown; + }; +}; diff --git a/node_modules/@aws-sdk/types/dist-types/crypto.d.ts b/node_modules/@aws-sdk/types/dist-types/crypto.d.ts new file mode 100644 index 00000000..aeeea502 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/crypto.d.ts @@ -0,0 +1 @@ +export { Hash, HashConstructor, StreamHasher, randomValues, SourceData } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/dns.d.ts b/node_modules/@aws-sdk/types/dist-types/dns.d.ts new file mode 100644 index 00000000..8348cc49 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/dns.d.ts @@ -0,0 +1,85 @@ +/** + * @public + * + * DNS record types + */ +export declare enum HostAddressType { + /** + * IPv6 + */ + AAAA = "AAAA", + /** + * IPv4 + */ + A = "A" +} +/** + * @public + */ +export interface HostAddress { + /** + * The {@link HostAddressType} of the host address. + */ + addressType: HostAddressType; + /** + * The resolved numerical address represented as a + * string. + */ + address: string; + /** + * The host name the {@link address} was resolved from. + */ + hostName: string; + /** + * The service record of {@link hostName}. + */ + service?: string; +} +/** + * @public + */ +export interface HostResolverArguments { + /** + * The host name to resolve. + */ + hostName: string; + /** + * The service record of {@link hostName}. + */ + service?: string; +} +/** + * @public + * + * Host Resolver interface for DNS queries + */ +export interface HostResolver { + /** + * Resolves the address(es) for {@link HostResolverArguments} and returns a + * list of addresses with (most likely) two addresses, one {@link HostAddressType.AAAA} + * and one {@link HostAddressType.A}. Calls to this function will likely alter + * the cache (if implemented) so that if there's multiple addresses, a different + * set will be returned on the next call. + * In the case of multi-answer, still only a maximum of two records should be + * returned. The resolver implementation is responsible for caching and rotation + * of the multiple addresses that get returned. + * Implementations don't have to explictly call getaddrinfo(), they can use + * high level abstractions provided in their language runtimes/libraries. + * @param args - arguments with host name query addresses for + * @returns promise with a list of {@link HostAddress} + */ + resolveAddress(args: HostResolverArguments): Promise; + /** + * Reports a failure on a {@link HostAddress} so that the cache (if implemented) + * can accomodate the failure and likely not return the address until it recovers. + * @param addr - host address to report a failure on + */ + reportFailureOnAddress(addr: HostAddress): void; + /** + * Empties the cache (if implemented) for a {@link HostResolverArguments.hostName}. + * If {@link HostResolverArguments.hostName} is not provided, the cache (if + * implemented) is emptied for all host names. + * @param args - optional arguments to empty the cache for + */ + purgeCache(args?: HostResolverArguments): void; +} diff --git a/node_modules/@aws-sdk/types/dist-types/encode.d.ts b/node_modules/@aws-sdk/types/dist-types/encode.d.ts new file mode 100644 index 00000000..128ee570 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/encode.d.ts @@ -0,0 +1 @@ +export { MessageDecoder, MessageEncoder, AvailableMessage, AvailableMessages } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/endpoint.d.ts b/node_modules/@aws-sdk/types/dist-types/endpoint.d.ts new file mode 100644 index 00000000..f2ffaf55 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/endpoint.d.ts @@ -0,0 +1 @@ +export { EndpointARN, EndpointPartition, EndpointURLScheme, EndpointURL, EndpointObjectProperty, EndpointV2, EndpointParameters, } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/eventStream.d.ts b/node_modules/@aws-sdk/types/dist-types/eventStream.d.ts new file mode 100644 index 00000000..cee02f7e --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/eventStream.d.ts @@ -0,0 +1 @@ +export { Message, MessageHeaders, BooleanHeaderValue, ByteHeaderValue, ShortHeaderValue, IntegerHeaderValue, LongHeaderValue, BinaryHeaderValue, StringHeaderValue, TimestampHeaderValue, UuidHeaderValue, MessageHeaderValue, Int64, EventStreamSerdeContext, EventStreamMarshaller, EventStreamMarshallerDeserFn, EventStreamMarshallerSerFn, EventStreamPayloadHandler, EventStreamPayloadHandlerProvider, EventStreamRequestSigner, EventStreamSerdeProvider, EventStreamSignerProvider, } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/extensions/index.d.ts b/node_modules/@aws-sdk/types/dist-types/extensions/index.d.ts new file mode 100644 index 00000000..5a45bcbc --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/extensions/index.d.ts @@ -0,0 +1,8 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface AwsRegionExtensionConfiguration { + setRegion(region: Provider): void; + region(): Provider; +} diff --git a/node_modules/@aws-sdk/types/dist-types/feature-ids.d.ts b/node_modules/@aws-sdk/types/dist-types/feature-ids.d.ts new file mode 100644 index 00000000..f1679fa7 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/feature-ids.d.ts @@ -0,0 +1,59 @@ +/** + * @internal + */ +export type AwsSdkFeatures = Partial<{ + RESOURCE_MODEL: "A"; + WAITER: "B"; + PAGINATOR: "C"; + RETRY_MODE_LEGACY: "D"; + RETRY_MODE_STANDARD: "E"; + RETRY_MODE_ADAPTIVE: "F"; + S3_EXPRESS_BUCKET: "J"; + S3_ACCESS_GRANTS: "K"; + GZIP_REQUEST_COMPRESSION: "L"; + PROTOCOL_RPC_V2_CBOR: "M"; + ENDPOINT_OVERRIDE: "N"; + ACCOUNT_ID_ENDPOINT: "O"; + ACCOUNT_ID_MODE_PREFERRED: "P"; + ACCOUNT_ID_MODE_DISABLED: "Q"; + ACCOUNT_ID_MODE_REQUIRED: "R"; + SIGV4A_SIGNING: "S"; + FLEXIBLE_CHECKSUMS_REQ_CRC32: "U"; + FLEXIBLE_CHECKSUMS_REQ_CRC32C: "V"; + FLEXIBLE_CHECKSUMS_REQ_CRC64: "W"; + FLEXIBLE_CHECKSUMS_REQ_SHA1: "X"; + FLEXIBLE_CHECKSUMS_REQ_SHA256: "Y"; + FLEXIBLE_CHECKSUMS_REQ_WHEN_SUPPORTED: "Z"; + FLEXIBLE_CHECKSUMS_REQ_WHEN_REQUIRED: "a"; + FLEXIBLE_CHECKSUMS_RES_WHEN_SUPPORTED: "b"; + FLEXIBLE_CHECKSUMS_RES_WHEN_REQUIRED: "c"; + DDB_MAPPER: "d"; +}> & AwsSdkCredentialsFeatures; +/** + * @internal + */ +export type AwsSdkCredentialsFeatures = Partial<{ + RESOLVED_ACCOUNT_ID: "T"; + CREDENTIALS_CODE: "e"; + CREDENTIALS_ENV_VARS: "g"; + CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN: "h"; + CREDENTIALS_STS_ASSUME_ROLE: "i"; + CREDENTIALS_STS_ASSUME_ROLE_SAML: "j"; + CREDENTIALS_STS_ASSUME_ROLE_WEB_ID: "k"; + CREDENTIALS_STS_FEDERATION_TOKEN: "l"; + CREDENTIALS_STS_SESSION_TOKEN: "m"; + CREDENTIALS_PROFILE: "n"; + CREDENTIALS_PROFILE_SOURCE_PROFILE: "o"; + CREDENTIALS_PROFILE_NAMED_PROVIDER: "p"; + CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN: "q"; + CREDENTIALS_PROFILE_SSO: "r"; + CREDENTIALS_SSO: "s"; + CREDENTIALS_PROFILE_SSO_LEGACY: "t"; + CREDENTIALS_SSO_LEGACY: "u"; + CREDENTIALS_PROFILE_PROCESS: "v"; + CREDENTIALS_PROCESS: "w"; + CREDENTIALS_BOTO2_CONFIG_FILE: "x"; + CREDENTIALS_AWS_SDK_STORE: "y"; + CREDENTIALS_HTTP: "z"; + CREDENTIALS_IMDS: "0"; +}>; diff --git a/node_modules/@aws-sdk/types/dist-types/function.d.ts b/node_modules/@aws-sdk/types/dist-types/function.d.ts new file mode 100644 index 00000000..3c777fae --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/function.d.ts @@ -0,0 +1,7 @@ +/** + * Resolves a function that accepts both the object argument fields of F1 and F2. + * The function returns an intersection of what F1 and F2 return. + * + * @public + */ +export type MergeFunctions = F1 extends (arg: infer A1) => infer R1 ? F2 extends (arg: infer A2) => infer R2 ? R1 extends Promise ? (arg?: A1 & A2) => Promise & Awaited> : (arg?: A1 & A2) => R1 & R2 : never : never; diff --git a/node_modules/@aws-sdk/types/dist-types/http.d.ts b/node_modules/@aws-sdk/types/dist-types/http.d.ts new file mode 100644 index 00000000..7594b5ad --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/http.d.ts @@ -0,0 +1,33 @@ +import { HttpResponse } from "@smithy/types"; +export { Endpoint, HeaderBag, HttpHandlerOptions, HttpMessage, HttpRequest, HttpResponse, QueryParameterBag, } from "@smithy/types"; +/** + * @public + * + * A collection of key/value pairs with case-insensitive keys. + */ +export interface Headers extends Map { + /** + * Returns a new instance of Headers with the specified header set to the + * provided value. Does not modify the original Headers instance. + * + * @param headerName - The name of the header to add or overwrite + * @param headerValue - The value to which the header should be set + */ + withHeader(headerName: string, headerValue: string): Headers; + /** + * Returns a new instance of Headers without the specified header. Does not + * modify the original Headers instance. + * + * @param headerName - The name of the header to remove + */ + withoutHeader(headerName: string): Headers; +} +/** + * @public + * + * Represents HTTP message whose body has been resolved to a string. This is + * used in parsing http message. + */ +export interface ResolvedHttpResponse extends HttpResponse { + body: string; +} diff --git a/node_modules/@aws-sdk/types/dist-types/identity/AnonymousIdentity.d.ts b/node_modules/@aws-sdk/types/dist-types/identity/AnonymousIdentity.d.ts new file mode 100644 index 00000000..c7006e3d --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/identity/AnonymousIdentity.d.ts @@ -0,0 +1,6 @@ +import { Identity } from "./Identity"; +/** + * @public + */ +export interface AnonymousIdentity extends Identity { +} diff --git a/node_modules/@aws-sdk/types/dist-types/identity/AwsCredentialIdentity.d.ts b/node_modules/@aws-sdk/types/dist-types/identity/AwsCredentialIdentity.d.ts new file mode 100644 index 00000000..c94b6c4d --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/identity/AwsCredentialIdentity.d.ts @@ -0,0 +1,60 @@ +import type { AwsCredentialIdentity, AwsCredentialIdentityProvider, Logger, RequestHandler } from "@smithy/types"; +import type { AwsSdkCredentialsFeatures } from "../feature-ids"; +export { AwsCredentialIdentity, AwsCredentialIdentityProvider, IdentityProvider } from "@smithy/types"; +/** + * @public + */ +export interface AwsIdentityProperties { + /** + * These are resolved client config values, and may be async providers. + */ + callerClientConfig?: { + /** + * It is likely a programming error if you use + * the caller client config credentials in a credential provider, since + * it will recurse. + * + * @deprecated do not use. + */ + credentials?: AwsCredentialIdentity | AwsCredentialIdentityProvider; + /** + * @internal + * @deprecated minimize use. + */ + credentialDefaultProvider?: (input?: any) => AwsCredentialIdentityProvider; + logger?: Logger; + profile?: string; + region(): Promise; + requestHandler?: RequestHandler; + }; +} +/** + * @public + * + * Variation of {@link IdentityProvider} which accepts a contextual + * client configuration that includes an AWS region and potentially other + * configurable fields. + * + * Used to link a credential provider to a client if it is being called + * in the context of a client. + */ +export type RuntimeConfigIdentityProvider = (awsIdentityProperties?: AwsIdentityProperties) => Promise; +/** + * @public + * + * Variation of {@link AwsCredentialIdentityProvider} which accepts a contextual + * client configuration that includes an AWS region and potentially other + * configurable fields. + * + * Used to link a credential provider to a client if it is being called + * in the context of a client. + */ +export type RuntimeConfigAwsCredentialIdentityProvider = RuntimeConfigIdentityProvider; +/** + * @public + * + * AwsCredentialIdentity with source attribution metadata. + */ +export type AttributedAwsCredentialIdentity = AwsCredentialIdentity & { + $source?: AwsSdkCredentialsFeatures; +}; diff --git a/node_modules/@aws-sdk/types/dist-types/identity/Identity.d.ts b/node_modules/@aws-sdk/types/dist-types/identity/Identity.d.ts new file mode 100644 index 00000000..4175fd34 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/identity/Identity.d.ts @@ -0,0 +1 @@ +export { Identity, IdentityProvider } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/identity/LoginIdentity.d.ts b/node_modules/@aws-sdk/types/dist-types/identity/LoginIdentity.d.ts new file mode 100644 index 00000000..13793f98 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/identity/LoginIdentity.d.ts @@ -0,0 +1,18 @@ +import { Identity, IdentityProvider } from "./Identity"; +/** + * @public + */ +export interface LoginIdentity extends Identity { + /** + * Identity username + */ + readonly username: string; + /** + * Identity password + */ + readonly password: string; +} +/** + * @public + */ +export type LoginIdentityProvider = IdentityProvider; diff --git a/node_modules/@aws-sdk/types/dist-types/identity/TokenIdentity.d.ts b/node_modules/@aws-sdk/types/dist-types/identity/TokenIdentity.d.ts new file mode 100644 index 00000000..66301bc7 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/identity/TokenIdentity.d.ts @@ -0,0 +1 @@ +export { TokenIdentity, TokenIdentityProvider } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/identity/index.d.ts b/node_modules/@aws-sdk/types/dist-types/identity/index.d.ts new file mode 100644 index 00000000..863e78e8 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/identity/index.d.ts @@ -0,0 +1,5 @@ +export * from "./AnonymousIdentity"; +export * from "./AwsCredentialIdentity"; +export * from "./Identity"; +export * from "./LoginIdentity"; +export * from "./TokenIdentity"; diff --git a/node_modules/@aws-sdk/types/dist-types/index.d.ts b/node_modules/@aws-sdk/types/dist-types/index.d.ts new file mode 100644 index 00000000..a7f99d93 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/index.d.ts @@ -0,0 +1,34 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./credentials"; +export * from "./crypto"; +export * from "./dns"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./function"; +export * from "./http"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./request"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./token"; +export * from "./transfer"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/node_modules/@aws-sdk/types/dist-types/logger.d.ts b/node_modules/@aws-sdk/types/dist-types/logger.d.ts new file mode 100644 index 00000000..11a33c69 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/logger.d.ts @@ -0,0 +1,22 @@ +import type { Logger } from "@smithy/types"; +export type { Logger } from "@smithy/types"; +/** + * @public + * + * A list of logger's log level. These levels are sorted in + * order of increasing severity. Each log level includes itself and all + * the levels behind itself. + * + * @example `new Logger({logLevel: 'warn'})` will print all the warn and error + * message. + */ +export type LogLevel = "all" | "trace" | "debug" | "log" | "info" | "warn" | "error" | "off"; +/** + * @public + * + * An object consumed by Logger constructor to initiate a logger object. + */ +export interface LoggerOptions { + logger?: Logger; + logLevel?: LogLevel; +} diff --git a/node_modules/@aws-sdk/types/dist-types/middleware.d.ts b/node_modules/@aws-sdk/types/dist-types/middleware.d.ts new file mode 100644 index 00000000..06ba3e23 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/middleware.d.ts @@ -0,0 +1,13 @@ +import { HandlerExecutionContext } from "@smithy/types"; +import { AwsSdkFeatures } from "./feature-ids"; +export { AbsoluteLocation, BuildHandler, BuildHandlerArguments, BuildHandlerOptions, BuildHandlerOutput, BuildMiddleware, DeserializeHandler, DeserializeHandlerArguments, DeserializeHandlerOptions, DeserializeHandlerOutput, DeserializeMiddleware, FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput, FinalizeRequestHandlerOptions, FinalizeRequestMiddleware, Handler, HandlerExecutionContext, HandlerOptions, InitializeHandler, InitializeHandlerArguments, InitializeHandlerOptions, InitializeHandlerOutput, InitializeMiddleware, MiddlewareStack, MiddlewareType, Pluggable, Priority, Relation, RelativeLocation, RelativeMiddlewareOptions, SerializeHandler, SerializeHandlerArguments, SerializeHandlerOptions, SerializeHandlerOutput, SerializeMiddleware, Step, Terminalware, } from "@smithy/types"; +/** + * @internal + * Contains reserved keys for AWS SDK internal usage of the + * handler execution context object. + */ +export interface AwsHandlerExecutionContext extends HandlerExecutionContext { + __aws_sdk_context?: { + features?: AwsSdkFeatures; + }; +} diff --git a/node_modules/@aws-sdk/types/dist-types/pagination.d.ts b/node_modules/@aws-sdk/types/dist-types/pagination.d.ts new file mode 100644 index 00000000..af791b0f --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/pagination.d.ts @@ -0,0 +1 @@ +export { PaginationConfiguration, Paginator } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/profile.d.ts b/node_modules/@aws-sdk/types/dist-types/profile.d.ts new file mode 100644 index 00000000..9916f3bf --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/profile.d.ts @@ -0,0 +1 @@ +export { IniSection, Profile, ParsedIniData, SharedConfigFiles } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/request.d.ts b/node_modules/@aws-sdk/types/dist-types/request.d.ts new file mode 100644 index 00000000..95405d1b --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/request.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export interface Request { + destination: URL; + body?: any; +} diff --git a/node_modules/@aws-sdk/types/dist-types/response.d.ts b/node_modules/@aws-sdk/types/dist-types/response.d.ts new file mode 100644 index 00000000..8d993504 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/response.d.ts @@ -0,0 +1,7 @@ +export { MetadataBearer, ResponseMetadata } from "@smithy/types"; +/** + * @internal + */ +export interface Response { + body: any; +} diff --git a/node_modules/@aws-sdk/types/dist-types/retry.d.ts b/node_modules/@aws-sdk/types/dist-types/retry.d.ts new file mode 100644 index 00000000..4b7eb98b --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/retry.d.ts @@ -0,0 +1 @@ +export { ExponentialBackoffJitterType, ExponentialBackoffStrategyOptions, RetryBackoffStrategy, RetryErrorInfo, RetryErrorType, RetryStrategyOptions, RetryStrategyV2, RetryToken, StandardRetryBackoffStrategy, StandardRetryToken, } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/serde.d.ts b/node_modules/@aws-sdk/types/dist-types/serde.d.ts new file mode 100644 index 00000000..c4cab796 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/serde.d.ts @@ -0,0 +1,24 @@ +export { EndpointBearer, StreamCollector, SerdeContext, ResponseDeserializer, RequestSerializer, SdkStreamMixin, SdkStream, WithSdkStreamMixin, SdkStreamMixinInjector, SdkStreamSerdeContext, } from "@smithy/types"; +/** + * @public + * + * Declare DOM interfaces in case dom.d.ts is not added to the tsconfig lib, causing + * interfaces to not be defined. For developers with dom.d.ts added, the interfaces will + * be merged correctly. + * + * This is also required for any clients with streaming interfaces where the corresponding + * types are also referred. The type is only declared here once since this `@aws-sdk/types` + * is depended by all `@aws-sdk` packages. + */ +declare global { + /** + * @public + */ + export interface ReadableStream { + } + /** + * @public + */ + export interface Blob { + } +} diff --git a/node_modules/@aws-sdk/types/dist-types/shapes.d.ts b/node_modules/@aws-sdk/types/dist-types/shapes.d.ts new file mode 100644 index 00000000..bc19cc78 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/shapes.d.ts @@ -0,0 +1 @@ +export { DocumentType, RetryableTrait, SmithyException, SdkError } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/signature.d.ts b/node_modules/@aws-sdk/types/dist-types/signature.d.ts new file mode 100644 index 00000000..23cbe97e --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/signature.d.ts @@ -0,0 +1 @@ +export { DateInput, EventSigner, EventSigningArguments, FormattedEvent, MessageSigner, RequestSigningArguments, RequestPresigner, RequestPresigningArguments, RequestSigner, SignableMessage, SignedMessage, SigningArguments, StringSigner, } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/stream.d.ts b/node_modules/@aws-sdk/types/dist-types/stream.d.ts new file mode 100644 index 00000000..9092844a --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/stream.d.ts @@ -0,0 +1 @@ +export { GetAwsChunkedEncodingStream, GetAwsChunkedEncodingStreamOptions } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/token.d.ts b/node_modules/@aws-sdk/types/dist-types/token.d.ts new file mode 100644 index 00000000..a68d58f8 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/token.d.ts @@ -0,0 +1,17 @@ +import { TokenIdentity } from "./identity"; +import { Provider } from "./util"; +/** + * @public + * + * An object representing temporary or permanent AWS token. + * + * @deprecated Use {@link TokenIdentity} + */ +export interface Token extends TokenIdentity { +} +/** + * @public + * + * @deprecated Use {@link TokenIdentityProvider} + */ +export type TokenProvider = Provider; diff --git a/node_modules/@aws-sdk/types/dist-types/transfer.d.ts b/node_modules/@aws-sdk/types/dist-types/transfer.d.ts new file mode 100644 index 00000000..ba781903 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/transfer.d.ts @@ -0,0 +1 @@ +export { RequestContext, RequestHandler, RequestHandlerMetadata, RequestHandlerOutput, RequestHandlerProtocol, } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/abort.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/abort.d.ts new file mode 100644 index 00000000..dad6079f --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/abort.d.ts @@ -0,0 +1 @@ +export { AbortController, AbortHandler, AbortSignal } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/auth.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/auth.d.ts new file mode 100644 index 00000000..8a02dbc8 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/auth.d.ts @@ -0,0 +1,5 @@ +export { + AuthScheme, + HttpAuthDefinition, + HttpAuthLocation, +} from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/blob/blob-types.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/blob/blob-types.d.ts new file mode 100644 index 00000000..df39efe0 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/blob/blob-types.d.ts @@ -0,0 +1,2 @@ +import { BlobTypes } from "@smithy/types"; +export { BlobTypes }; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/checksum.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/checksum.d.ts new file mode 100644 index 00000000..f805d729 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/checksum.d.ts @@ -0,0 +1 @@ +export { Checksum, ChecksumConstructor } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/client.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/client.d.ts new file mode 100644 index 00000000..d6b3dcf9 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/client.d.ts @@ -0,0 +1 @@ +export { Client } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/command.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/command.d.ts new file mode 100644 index 00000000..38872670 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/command.d.ts @@ -0,0 +1 @@ +export { Command } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/connection.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/connection.d.ts new file mode 100644 index 00000000..36ebd004 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/connection.d.ts @@ -0,0 +1,6 @@ +export { + ConnectConfiguration, + ConnectionManager, + ConnectionManagerConfiguration, + ConnectionPool, +} from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/credentials.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/credentials.d.ts new file mode 100644 index 00000000..6c91a353 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/credentials.d.ts @@ -0,0 +1,13 @@ +import { Logger } from "@smithy/types"; +import { AwsCredentialIdentity } from "./identity"; +import { Provider } from "./util"; +export interface Credentials extends AwsCredentialIdentity {} +export type CredentialProvider = Provider; +export type CredentialProviderOptions = { + logger?: Logger; + parentClientConfig?: { + region?: string | Provider; + profile?: string; + [key: string]: unknown; + }; +}; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/crypto.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/crypto.d.ts new file mode 100644 index 00000000..dfe61bfe --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/crypto.d.ts @@ -0,0 +1,7 @@ +export { + Hash, + HashConstructor, + StreamHasher, + randomValues, + SourceData, +} from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/dns.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/dns.d.ts new file mode 100644 index 00000000..d8999492 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/dns.d.ts @@ -0,0 +1,19 @@ +export declare enum HostAddressType { + AAAA = "AAAA", + A = "A", +} +export interface HostAddress { + addressType: HostAddressType; + address: string; + hostName: string; + service?: string; +} +export interface HostResolverArguments { + hostName: string; + service?: string; +} +export interface HostResolver { + resolveAddress(args: HostResolverArguments): Promise; + reportFailureOnAddress(addr: HostAddress): void; + purgeCache(args?: HostResolverArguments): void; +} diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/encode.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/encode.d.ts new file mode 100644 index 00000000..76966f9b --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/encode.d.ts @@ -0,0 +1,6 @@ +export { + MessageDecoder, + MessageEncoder, + AvailableMessage, + AvailableMessages, +} from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/endpoint.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/endpoint.d.ts new file mode 100644 index 00000000..ff3c7ded --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/endpoint.d.ts @@ -0,0 +1,9 @@ +export { + EndpointARN, + EndpointPartition, + EndpointURLScheme, + EndpointURL, + EndpointObjectProperty, + EndpointV2, + EndpointParameters, +} from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/eventStream.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/eventStream.d.ts new file mode 100644 index 00000000..e4c04a9b --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/eventStream.d.ts @@ -0,0 +1,24 @@ +export { + Message, + MessageHeaders, + BooleanHeaderValue, + ByteHeaderValue, + ShortHeaderValue, + IntegerHeaderValue, + LongHeaderValue, + BinaryHeaderValue, + StringHeaderValue, + TimestampHeaderValue, + UuidHeaderValue, + MessageHeaderValue, + Int64, + EventStreamSerdeContext, + EventStreamMarshaller, + EventStreamMarshallerDeserFn, + EventStreamMarshallerSerFn, + EventStreamPayloadHandler, + EventStreamPayloadHandlerProvider, + EventStreamRequestSigner, + EventStreamSerdeProvider, + EventStreamSignerProvider, +} from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/extensions/index.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 00000000..accf5ec7 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +export interface AwsRegionExtensionConfiguration { + setRegion(region: Provider): void; + region(): Provider; +} diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/feature-ids.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/feature-ids.d.ts new file mode 100644 index 00000000..6d575096 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/feature-ids.d.ts @@ -0,0 +1,54 @@ +export type AwsSdkFeatures = Partial<{ + RESOURCE_MODEL: "A"; + WAITER: "B"; + PAGINATOR: "C"; + RETRY_MODE_LEGACY: "D"; + RETRY_MODE_STANDARD: "E"; + RETRY_MODE_ADAPTIVE: "F"; + S3_EXPRESS_BUCKET: "J"; + S3_ACCESS_GRANTS: "K"; + GZIP_REQUEST_COMPRESSION: "L"; + PROTOCOL_RPC_V2_CBOR: "M"; + ENDPOINT_OVERRIDE: "N"; + ACCOUNT_ID_ENDPOINT: "O"; + ACCOUNT_ID_MODE_PREFERRED: "P"; + ACCOUNT_ID_MODE_DISABLED: "Q"; + ACCOUNT_ID_MODE_REQUIRED: "R"; + SIGV4A_SIGNING: "S"; + FLEXIBLE_CHECKSUMS_REQ_CRC32: "U"; + FLEXIBLE_CHECKSUMS_REQ_CRC32C: "V"; + FLEXIBLE_CHECKSUMS_REQ_CRC64: "W"; + FLEXIBLE_CHECKSUMS_REQ_SHA1: "X"; + FLEXIBLE_CHECKSUMS_REQ_SHA256: "Y"; + FLEXIBLE_CHECKSUMS_REQ_WHEN_SUPPORTED: "Z"; + FLEXIBLE_CHECKSUMS_REQ_WHEN_REQUIRED: "a"; + FLEXIBLE_CHECKSUMS_RES_WHEN_SUPPORTED: "b"; + FLEXIBLE_CHECKSUMS_RES_WHEN_REQUIRED: "c"; + DDB_MAPPER: "d"; +}> & + AwsSdkCredentialsFeatures; +export type AwsSdkCredentialsFeatures = Partial<{ + RESOLVED_ACCOUNT_ID: "T"; + CREDENTIALS_CODE: "e"; + CREDENTIALS_ENV_VARS: "g"; + CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN: "h"; + CREDENTIALS_STS_ASSUME_ROLE: "i"; + CREDENTIALS_STS_ASSUME_ROLE_SAML: "j"; + CREDENTIALS_STS_ASSUME_ROLE_WEB_ID: "k"; + CREDENTIALS_STS_FEDERATION_TOKEN: "l"; + CREDENTIALS_STS_SESSION_TOKEN: "m"; + CREDENTIALS_PROFILE: "n"; + CREDENTIALS_PROFILE_SOURCE_PROFILE: "o"; + CREDENTIALS_PROFILE_NAMED_PROVIDER: "p"; + CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN: "q"; + CREDENTIALS_PROFILE_SSO: "r"; + CREDENTIALS_SSO: "s"; + CREDENTIALS_PROFILE_SSO_LEGACY: "t"; + CREDENTIALS_SSO_LEGACY: "u"; + CREDENTIALS_PROFILE_PROCESS: "v"; + CREDENTIALS_PROCESS: "w"; + CREDENTIALS_BOTO2_CONFIG_FILE: "x"; + CREDENTIALS_AWS_SDK_STORE: "y"; + CREDENTIALS_HTTP: "z"; + CREDENTIALS_IMDS: "0"; +}>; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/function.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/function.d.ts new file mode 100644 index 00000000..d6efac50 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/function.d.ts @@ -0,0 +1,7 @@ +export type MergeFunctions = F1 extends (arg: infer A1) => infer R1 + ? F2 extends (arg: infer A2) => infer R2 + ? R1 extends Promise + ? (arg?: A1 & A2) => Promise & Awaited> + : (arg?: A1 & A2) => R1 & R2 + : never + : never; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/http.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/http.d.ts new file mode 100644 index 00000000..d8e0eab8 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/http.d.ts @@ -0,0 +1,17 @@ +import { HttpResponse } from "@smithy/types"; +export { + Endpoint, + HeaderBag, + HttpHandlerOptions, + HttpMessage, + HttpRequest, + HttpResponse, + QueryParameterBag, +} from "@smithy/types"; +export interface Headers extends Map { + withHeader(headerName: string, headerValue: string): Headers; + withoutHeader(headerName: string): Headers; +} +export interface ResolvedHttpResponse extends HttpResponse { + body: string; +} diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AnonymousIdentity.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AnonymousIdentity.d.ts new file mode 100644 index 00000000..5b175f60 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AnonymousIdentity.d.ts @@ -0,0 +1,2 @@ +import { Identity } from "./Identity"; +export interface AnonymousIdentity extends Identity {} diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AwsCredentialIdentity.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AwsCredentialIdentity.d.ts new file mode 100644 index 00000000..aaec3583 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AwsCredentialIdentity.d.ts @@ -0,0 +1,30 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + Logger, + RequestHandler, +} from "@smithy/types"; +import { AwsSdkCredentialsFeatures } from "../feature-ids"; +export { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + IdentityProvider, +} from "@smithy/types"; +export interface AwsIdentityProperties { + callerClientConfig?: { + credentials?: AwsCredentialIdentity | AwsCredentialIdentityProvider; + credentialDefaultProvider?: (input?: any) => AwsCredentialIdentityProvider; + logger?: Logger; + profile?: string; + region(): Promise; + requestHandler?: RequestHandler; + }; +} +export type RuntimeConfigIdentityProvider = ( + awsIdentityProperties?: AwsIdentityProperties +) => Promise; +export type RuntimeConfigAwsCredentialIdentityProvider = + RuntimeConfigIdentityProvider; +export type AttributedAwsCredentialIdentity = AwsCredentialIdentity & { + $source?: AwsSdkCredentialsFeatures; +}; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/Identity.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/Identity.d.ts new file mode 100644 index 00000000..4175fd34 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/Identity.d.ts @@ -0,0 +1 @@ +export { Identity, IdentityProvider } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/LoginIdentity.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/LoginIdentity.d.ts new file mode 100644 index 00000000..3258bbb2 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/LoginIdentity.d.ts @@ -0,0 +1,6 @@ +import { Identity, IdentityProvider } from "./Identity"; +export interface LoginIdentity extends Identity { + readonly username: string; + readonly password: string; +} +export type LoginIdentityProvider = IdentityProvider; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/TokenIdentity.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/TokenIdentity.d.ts new file mode 100644 index 00000000..66301bc7 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/TokenIdentity.d.ts @@ -0,0 +1 @@ +export { TokenIdentity, TokenIdentityProvider } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/index.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/index.d.ts new file mode 100644 index 00000000..863e78e8 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/index.d.ts @@ -0,0 +1,5 @@ +export * from "./AnonymousIdentity"; +export * from "./AwsCredentialIdentity"; +export * from "./Identity"; +export * from "./LoginIdentity"; +export * from "./TokenIdentity"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..a7f99d93 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/index.d.ts @@ -0,0 +1,34 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./credentials"; +export * from "./crypto"; +export * from "./dns"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./function"; +export * from "./http"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./request"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./token"; +export * from "./transfer"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/logger.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/logger.d.ts new file mode 100644 index 00000000..c7149157 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/logger.d.ts @@ -0,0 +1,15 @@ +import { Logger } from "@smithy/types"; +export { Logger } from "@smithy/types"; +export type LogLevel = + | "all" + | "trace" + | "debug" + | "log" + | "info" + | "warn" + | "error" + | "off"; +export interface LoggerOptions { + logger?: Logger; + logLevel?: LogLevel; +} diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/middleware.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/middleware.d.ts new file mode 100644 index 00000000..e101e9bb --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/middleware.d.ts @@ -0,0 +1,47 @@ +import { HandlerExecutionContext } from "@smithy/types"; +import { AwsSdkFeatures } from "./feature-ids"; +export { + AbsoluteLocation, + BuildHandler, + BuildHandlerArguments, + BuildHandlerOptions, + BuildHandlerOutput, + BuildMiddleware, + DeserializeHandler, + DeserializeHandlerArguments, + DeserializeHandlerOptions, + DeserializeHandlerOutput, + DeserializeMiddleware, + FinalizeHandler, + FinalizeHandlerArguments, + FinalizeHandlerOutput, + FinalizeRequestHandlerOptions, + FinalizeRequestMiddleware, + Handler, + HandlerExecutionContext, + HandlerOptions, + InitializeHandler, + InitializeHandlerArguments, + InitializeHandlerOptions, + InitializeHandlerOutput, + InitializeMiddleware, + MiddlewareStack, + MiddlewareType, + Pluggable, + Priority, + Relation, + RelativeLocation, + RelativeMiddlewareOptions, + SerializeHandler, + SerializeHandlerArguments, + SerializeHandlerOptions, + SerializeHandlerOutput, + SerializeMiddleware, + Step, + Terminalware, +} from "@smithy/types"; +export interface AwsHandlerExecutionContext extends HandlerExecutionContext { + __aws_sdk_context?: { + features?: AwsSdkFeatures; + }; +} diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/pagination.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/pagination.d.ts new file mode 100644 index 00000000..af791b0f --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/pagination.d.ts @@ -0,0 +1 @@ +export { PaginationConfiguration, Paginator } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/profile.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/profile.d.ts new file mode 100644 index 00000000..b3813d83 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/profile.d.ts @@ -0,0 +1,6 @@ +export { + IniSection, + Profile, + ParsedIniData, + SharedConfigFiles, +} from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/request.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/request.d.ts new file mode 100644 index 00000000..5c6e7938 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/request.d.ts @@ -0,0 +1,4 @@ +export interface Request { + destination: URL; + body?: any; +} diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/response.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/response.d.ts new file mode 100644 index 00000000..4e5fcd0e --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/response.d.ts @@ -0,0 +1,4 @@ +export { MetadataBearer, ResponseMetadata } from "@smithy/types"; +export interface Response { + body: any; +} diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/retry.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/retry.d.ts new file mode 100644 index 00000000..8fc946ad --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/retry.d.ts @@ -0,0 +1,12 @@ +export { + ExponentialBackoffJitterType, + ExponentialBackoffStrategyOptions, + RetryBackoffStrategy, + RetryErrorInfo, + RetryErrorType, + RetryStrategyOptions, + RetryStrategyV2, + RetryToken, + StandardRetryBackoffStrategy, + StandardRetryToken, +} from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/serde.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/serde.d.ts new file mode 100644 index 00000000..a7ed76f3 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/serde.d.ts @@ -0,0 +1,16 @@ +export { + EndpointBearer, + StreamCollector, + SerdeContext, + ResponseDeserializer, + RequestSerializer, + SdkStreamMixin, + SdkStream, + WithSdkStreamMixin, + SdkStreamMixinInjector, + SdkStreamSerdeContext, +} from "@smithy/types"; +declare global { + export interface ReadableStream {} + export interface Blob {} +} diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/shapes.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/shapes.d.ts new file mode 100644 index 00000000..d1efa9a0 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/shapes.d.ts @@ -0,0 +1,6 @@ +export { + DocumentType, + RetryableTrait, + SmithyException, + SdkError, +} from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/signature.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/signature.d.ts new file mode 100644 index 00000000..cbabd757 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/signature.d.ts @@ -0,0 +1,15 @@ +export { + DateInput, + EventSigner, + EventSigningArguments, + FormattedEvent, + MessageSigner, + RequestSigningArguments, + RequestPresigner, + RequestPresigningArguments, + RequestSigner, + SignableMessage, + SignedMessage, + SigningArguments, + StringSigner, +} from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/stream.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/stream.d.ts new file mode 100644 index 00000000..1b794136 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/stream.d.ts @@ -0,0 +1,4 @@ +export { + GetAwsChunkedEncodingStream, + GetAwsChunkedEncodingStreamOptions, +} from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/token.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/token.d.ts new file mode 100644 index 00000000..c33e5066 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/token.d.ts @@ -0,0 +1,4 @@ +import { TokenIdentity } from "./identity"; +import { Provider } from "./util"; +export interface Token extends TokenIdentity {} +export type TokenProvider = Provider; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/transfer.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/transfer.d.ts new file mode 100644 index 00000000..04a7f87a --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/transfer.d.ts @@ -0,0 +1,7 @@ +export { + RequestContext, + RequestHandler, + RequestHandlerMetadata, + RequestHandlerOutput, + RequestHandlerProtocol, +} from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/uri.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/uri.d.ts new file mode 100644 index 00000000..297dfe46 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/uri.d.ts @@ -0,0 +1 @@ +export { URI } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/util.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/util.d.ts new file mode 100644 index 00000000..e7e43e62 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/util.d.ts @@ -0,0 +1,14 @@ +export { + Encoder, + Decoder, + Provider, + UserAgentPair, + UserAgent, + UrlParser, + MemoizedProvider, + BodyLengthCalculator, + RegionInfo, + RegionInfoProviderOptions, + RegionInfoProvider, + RetryStrategy, +} from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/ts3.4/waiter.d.ts b/node_modules/@aws-sdk/types/dist-types/ts3.4/waiter.d.ts new file mode 100644 index 00000000..bb98020d --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/ts3.4/waiter.d.ts @@ -0,0 +1 @@ +export { WaiterConfiguration } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/uri.d.ts b/node_modules/@aws-sdk/types/dist-types/uri.d.ts new file mode 100644 index 00000000..297dfe46 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/uri.d.ts @@ -0,0 +1 @@ +export { URI } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/util.d.ts b/node_modules/@aws-sdk/types/dist-types/util.d.ts new file mode 100644 index 00000000..fd059b64 --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/util.d.ts @@ -0,0 +1 @@ +export { Encoder, Decoder, Provider, UserAgentPair, UserAgent, UrlParser, MemoizedProvider, BodyLengthCalculator, RegionInfo, RegionInfoProviderOptions, RegionInfoProvider, RetryStrategy, } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/dist-types/waiter.d.ts b/node_modules/@aws-sdk/types/dist-types/waiter.d.ts new file mode 100644 index 00000000..bb98020d --- /dev/null +++ b/node_modules/@aws-sdk/types/dist-types/waiter.d.ts @@ -0,0 +1 @@ +export { WaiterConfiguration } from "@smithy/types"; diff --git a/node_modules/@aws-sdk/types/package.json b/node_modules/@aws-sdk/types/package.json new file mode 100755 index 00000000..eaf5c446 --- /dev/null +++ b/node_modules/@aws-sdk/types/package.json @@ -0,0 +1,56 @@ +{ + "name": "@aws-sdk/types", + "version": "3.775.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "description": "Types for the AWS SDK", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline types", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "tsc -p tsconfig.test.json" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/types", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/types" + }, + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "browser": {}, + "react-native": {} +} diff --git a/node_modules/@aws-sdk/util-arn-parser/LICENSE b/node_modules/@aws-sdk/util-arn-parser/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-sdk/util-arn-parser/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-sdk/util-arn-parser/README.md b/node_modules/@aws-sdk/util-arn-parser/README.md new file mode 100644 index 00000000..450cd867 --- /dev/null +++ b/node_modules/@aws-sdk/util-arn-parser/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/util-arn-parser + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-arn-parser/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-arn-parser) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-arn-parser.svg)](https://www.npmjs.com/package/@aws-sdk/util-arn-parser) diff --git a/node_modules/@aws-sdk/util-arn-parser/dist-cjs/index.js b/node_modules/@aws-sdk/util-arn-parser/dist-cjs/index.js new file mode 100644 index 00000000..d5fbb9f0 --- /dev/null +++ b/node_modules/@aws-sdk/util-arn-parser/dist-cjs/index.js @@ -0,0 +1,65 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + build: () => build, + parse: () => parse, + validate: () => validate +}); +module.exports = __toCommonJS(src_exports); +var validate = /* @__PURE__ */ __name((str) => typeof str === "string" && str.indexOf("arn:") === 0 && str.split(":").length >= 6, "validate"); +var parse = /* @__PURE__ */ __name((arn) => { + const segments = arn.split(":"); + if (segments.length < 6 || segments[0] !== "arn") + throw new Error("Malformed ARN"); + const [ + , + //Skip "arn" literal + partition, + service, + region, + accountId, + ...resource + ] = segments; + return { + partition, + service, + region, + accountId, + resource: resource.join(":") + }; +}, "parse"); +var build = /* @__PURE__ */ __name((arnObject) => { + const { partition = "aws", service, region, accountId, resource } = arnObject; + if ([service, region, accountId, resource].some((segment) => typeof segment !== "string")) { + throw new Error("Input ARN object is invalid"); + } + return `arn:${partition}:${service}:${region}:${accountId}:${resource}`; +}, "build"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + validate, + parse, + build +}); + diff --git a/node_modules/@aws-sdk/util-arn-parser/dist-es/index.js b/node_modules/@aws-sdk/util-arn-parser/dist-es/index.js new file mode 100644 index 00000000..1f24b915 --- /dev/null +++ b/node_modules/@aws-sdk/util-arn-parser/dist-es/index.js @@ -0,0 +1,21 @@ +export const validate = (str) => typeof str === "string" && str.indexOf("arn:") === 0 && str.split(":").length >= 6; +export const parse = (arn) => { + const segments = arn.split(":"); + if (segments.length < 6 || segments[0] !== "arn") + throw new Error("Malformed ARN"); + const [, partition, service, region, accountId, ...resource] = segments; + return { + partition, + service, + region, + accountId, + resource: resource.join(":"), + }; +}; +export const build = (arnObject) => { + const { partition = "aws", service, region, accountId, resource } = arnObject; + if ([service, region, accountId, resource].some((segment) => typeof segment !== "string")) { + throw new Error("Input ARN object is invalid"); + } + return `arn:${partition}:${service}:${region}:${accountId}:${resource}`; +}; diff --git a/node_modules/@aws-sdk/util-arn-parser/dist-types/index.d.ts b/node_modules/@aws-sdk/util-arn-parser/dist-types/index.d.ts new file mode 100644 index 00000000..3bcc3e56 --- /dev/null +++ b/node_modules/@aws-sdk/util-arn-parser/dist-types/index.d.ts @@ -0,0 +1,23 @@ +export interface ARN { + partition: string; + service: string; + region: string; + accountId: string; + resource: string; +} +/** + * Validate whether a string is an ARN. + */ +export declare const validate: (str: any) => boolean; +/** + * Parse an ARN string into structure with partition, service, region, accountId and resource values + */ +export declare const parse: (arn: string) => ARN; +type buildOptions = Omit & { + partition?: string; +}; +/** + * Build an ARN with service, partition, region, accountId, and resources strings + */ +export declare const build: (arnObject: buildOptions) => string; +export {}; diff --git a/node_modules/@aws-sdk/util-arn-parser/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/util-arn-parser/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..56c943c3 --- /dev/null +++ b/node_modules/@aws-sdk/util-arn-parser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,14 @@ +export interface ARN { + partition: string; + service: string; + region: string; + accountId: string; + resource: string; +} +export declare const validate: (str: any) => boolean; +export declare const parse: (arn: string) => ARN; +type buildOptions = Pick> & { + partition?: string; +}; +export declare const build: (arnObject: buildOptions) => string; +export {}; diff --git a/node_modules/@aws-sdk/util-arn-parser/package.json b/node_modules/@aws-sdk/util-arn-parser/package.json new file mode 100644 index 00000000..6d137969 --- /dev/null +++ b/node_modules/@aws-sdk/util-arn-parser/package.json @@ -0,0 +1,54 @@ +{ + "name": "@aws-sdk/util-arn-parser", + "version": "3.723.0", + "description": "A parser to Amazon Resource Names", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-arn-parser", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-arn-parser", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-arn-parser" + } +} diff --git a/node_modules/@aws-sdk/util-endpoints/LICENSE b/node_modules/@aws-sdk/util-endpoints/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-sdk/util-endpoints/README.md b/node_modules/@aws-sdk/util-endpoints/README.md new file mode 100644 index 00000000..641f54a2 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/README.md @@ -0,0 +1,6 @@ +# @aws-sdk/util-endpoints + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-endpoints/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-endpoints) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-endpoints.svg)](https://www.npmjs.com/package/@aws-sdk/util-endpoints) + +> An internal package diff --git a/node_modules/@aws-sdk/util-endpoints/dist-cjs/index.js b/node_modules/@aws-sdk/util-endpoints/dist-cjs/index.js new file mode 100644 index 00000000..ee0a9321 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-cjs/index.js @@ -0,0 +1,450 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + ConditionObject: () => import_util_endpoints.ConditionObject, + DeprecatedObject: () => import_util_endpoints.DeprecatedObject, + EndpointError: () => import_util_endpoints.EndpointError, + EndpointObject: () => import_util_endpoints.EndpointObject, + EndpointObjectHeaders: () => import_util_endpoints.EndpointObjectHeaders, + EndpointObjectProperties: () => import_util_endpoints.EndpointObjectProperties, + EndpointParams: () => import_util_endpoints.EndpointParams, + EndpointResolverOptions: () => import_util_endpoints.EndpointResolverOptions, + EndpointRuleObject: () => import_util_endpoints.EndpointRuleObject, + ErrorRuleObject: () => import_util_endpoints.ErrorRuleObject, + EvaluateOptions: () => import_util_endpoints.EvaluateOptions, + Expression: () => import_util_endpoints.Expression, + FunctionArgv: () => import_util_endpoints.FunctionArgv, + FunctionObject: () => import_util_endpoints.FunctionObject, + FunctionReturn: () => import_util_endpoints.FunctionReturn, + ParameterObject: () => import_util_endpoints.ParameterObject, + ReferenceObject: () => import_util_endpoints.ReferenceObject, + ReferenceRecord: () => import_util_endpoints.ReferenceRecord, + RuleSetObject: () => import_util_endpoints.RuleSetObject, + RuleSetRules: () => import_util_endpoints.RuleSetRules, + TreeRuleObject: () => import_util_endpoints.TreeRuleObject, + awsEndpointFunctions: () => awsEndpointFunctions, + getUserAgentPrefix: () => getUserAgentPrefix, + isIpAddress: () => import_util_endpoints.isIpAddress, + partition: () => partition, + resolveEndpoint: () => import_util_endpoints.resolveEndpoint, + setPartitionInfo: () => setPartitionInfo, + useDefaultPartitionInfo: () => useDefaultPartitionInfo +}); +module.exports = __toCommonJS(index_exports); + +// src/aws.ts + + +// src/lib/aws/isVirtualHostableS3Bucket.ts + + +// src/lib/isIpAddress.ts +var import_util_endpoints = require("@smithy/util-endpoints"); + +// src/lib/aws/isVirtualHostableS3Bucket.ts +var isVirtualHostableS3Bucket = /* @__PURE__ */ __name((value, allowSubDomains = false) => { + if (allowSubDomains) { + for (const label of value.split(".")) { + if (!isVirtualHostableS3Bucket(label)) { + return false; + } + } + return true; + } + if (!(0, import_util_endpoints.isValidHostLabel)(value)) { + return false; + } + if (value.length < 3 || value.length > 63) { + return false; + } + if (value !== value.toLowerCase()) { + return false; + } + if ((0, import_util_endpoints.isIpAddress)(value)) { + return false; + } + return true; +}, "isVirtualHostableS3Bucket"); + +// src/lib/aws/parseArn.ts +var ARN_DELIMITER = ":"; +var RESOURCE_DELIMITER = "/"; +var parseArn = /* @__PURE__ */ __name((value) => { + const segments = value.split(ARN_DELIMITER); + if (segments.length < 6) return null; + const [arn, partition2, service, region, accountId, ...resourcePath] = segments; + if (arn !== "arn" || partition2 === "" || service === "" || resourcePath.join(ARN_DELIMITER) === "") return null; + const resourceId = resourcePath.map((resource) => resource.split(RESOURCE_DELIMITER)).flat(); + return { + partition: partition2, + service, + region, + accountId, + resourceId + }; +}, "parseArn"); + +// src/lib/aws/partitions.json +var partitions_default = { + partitions: [{ + id: "aws", + outputs: { + dnsSuffix: "amazonaws.com", + dualStackDnsSuffix: "api.aws", + implicitGlobalRegion: "us-east-1", + name: "aws", + supportsDualStack: true, + supportsFIPS: true + }, + regionRegex: "^(us|eu|ap|sa|ca|me|af|il|mx)\\-\\w+\\-\\d+$", + regions: { + "af-south-1": { + description: "Africa (Cape Town)" + }, + "ap-east-1": { + description: "Asia Pacific (Hong Kong)" + }, + "ap-northeast-1": { + description: "Asia Pacific (Tokyo)" + }, + "ap-northeast-2": { + description: "Asia Pacific (Seoul)" + }, + "ap-northeast-3": { + description: "Asia Pacific (Osaka)" + }, + "ap-south-1": { + description: "Asia Pacific (Mumbai)" + }, + "ap-south-2": { + description: "Asia Pacific (Hyderabad)" + }, + "ap-southeast-1": { + description: "Asia Pacific (Singapore)" + }, + "ap-southeast-2": { + description: "Asia Pacific (Sydney)" + }, + "ap-southeast-3": { + description: "Asia Pacific (Jakarta)" + }, + "ap-southeast-4": { + description: "Asia Pacific (Melbourne)" + }, + "ap-southeast-5": { + description: "Asia Pacific (Malaysia)" + }, + "ap-southeast-7": { + description: "Asia Pacific (Thailand)" + }, + "aws-global": { + description: "AWS Standard global region" + }, + "ca-central-1": { + description: "Canada (Central)" + }, + "ca-west-1": { + description: "Canada West (Calgary)" + }, + "eu-central-1": { + description: "Europe (Frankfurt)" + }, + "eu-central-2": { + description: "Europe (Zurich)" + }, + "eu-north-1": { + description: "Europe (Stockholm)" + }, + "eu-south-1": { + description: "Europe (Milan)" + }, + "eu-south-2": { + description: "Europe (Spain)" + }, + "eu-west-1": { + description: "Europe (Ireland)" + }, + "eu-west-2": { + description: "Europe (London)" + }, + "eu-west-3": { + description: "Europe (Paris)" + }, + "il-central-1": { + description: "Israel (Tel Aviv)" + }, + "me-central-1": { + description: "Middle East (UAE)" + }, + "me-south-1": { + description: "Middle East (Bahrain)" + }, + "mx-central-1": { + description: "Mexico (Central)" + }, + "sa-east-1": { + description: "South America (Sao Paulo)" + }, + "us-east-1": { + description: "US East (N. Virginia)" + }, + "us-east-2": { + description: "US East (Ohio)" + }, + "us-west-1": { + description: "US West (N. California)" + }, + "us-west-2": { + description: "US West (Oregon)" + } + } + }, { + id: "aws-cn", + outputs: { + dnsSuffix: "amazonaws.com.cn", + dualStackDnsSuffix: "api.amazonwebservices.com.cn", + implicitGlobalRegion: "cn-northwest-1", + name: "aws-cn", + supportsDualStack: true, + supportsFIPS: true + }, + regionRegex: "^cn\\-\\w+\\-\\d+$", + regions: { + "aws-cn-global": { + description: "AWS China global region" + }, + "cn-north-1": { + description: "China (Beijing)" + }, + "cn-northwest-1": { + description: "China (Ningxia)" + } + } + }, { + id: "aws-us-gov", + outputs: { + dnsSuffix: "amazonaws.com", + dualStackDnsSuffix: "api.aws", + implicitGlobalRegion: "us-gov-west-1", + name: "aws-us-gov", + supportsDualStack: true, + supportsFIPS: true + }, + regionRegex: "^us\\-gov\\-\\w+\\-\\d+$", + regions: { + "aws-us-gov-global": { + description: "AWS GovCloud (US) global region" + }, + "us-gov-east-1": { + description: "AWS GovCloud (US-East)" + }, + "us-gov-west-1": { + description: "AWS GovCloud (US-West)" + } + } + }, { + id: "aws-iso", + outputs: { + dnsSuffix: "c2s.ic.gov", + dualStackDnsSuffix: "c2s.ic.gov", + implicitGlobalRegion: "us-iso-east-1", + name: "aws-iso", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^us\\-iso\\-\\w+\\-\\d+$", + regions: { + "aws-iso-global": { + description: "AWS ISO (US) global region" + }, + "us-iso-east-1": { + description: "US ISO East" + }, + "us-iso-west-1": { + description: "US ISO WEST" + } + } + }, { + id: "aws-iso-b", + outputs: { + dnsSuffix: "sc2s.sgov.gov", + dualStackDnsSuffix: "sc2s.sgov.gov", + implicitGlobalRegion: "us-isob-east-1", + name: "aws-iso-b", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^us\\-isob\\-\\w+\\-\\d+$", + regions: { + "aws-iso-b-global": { + description: "AWS ISOB (US) global region" + }, + "us-isob-east-1": { + description: "US ISOB East (Ohio)" + } + } + }, { + id: "aws-iso-e", + outputs: { + dnsSuffix: "cloud.adc-e.uk", + dualStackDnsSuffix: "cloud.adc-e.uk", + implicitGlobalRegion: "eu-isoe-west-1", + name: "aws-iso-e", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^eu\\-isoe\\-\\w+\\-\\d+$", + regions: { + "aws-iso-e-global": { + description: "AWS ISOE (Europe) global region" + }, + "eu-isoe-west-1": { + description: "EU ISOE West" + } + } + }, { + id: "aws-iso-f", + outputs: { + dnsSuffix: "csp.hci.ic.gov", + dualStackDnsSuffix: "csp.hci.ic.gov", + implicitGlobalRegion: "us-isof-south-1", + name: "aws-iso-f", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^us\\-isof\\-\\w+\\-\\d+$", + regions: { + "aws-iso-f-global": { + description: "AWS ISOF global region" + }, + "us-isof-east-1": { + description: "US ISOF EAST" + }, + "us-isof-south-1": { + description: "US ISOF SOUTH" + } + } + }, { + id: "aws-eusc", + outputs: { + dnsSuffix: "amazonaws.eu", + dualStackDnsSuffix: "amazonaws.eu", + implicitGlobalRegion: "eusc-de-east-1", + name: "aws-eusc", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^eusc\\-(de)\\-\\w+\\-\\d+$", + regions: { + "eusc-de-east-1": { + description: "EU (Germany)" + } + } + }], + version: "1.1" +}; + +// src/lib/aws/partition.ts +var selectedPartitionsInfo = partitions_default; +var selectedUserAgentPrefix = ""; +var partition = /* @__PURE__ */ __name((value) => { + const { partitions } = selectedPartitionsInfo; + for (const partition2 of partitions) { + const { regions, outputs } = partition2; + for (const [region, regionData] of Object.entries(regions)) { + if (region === value) { + return { + ...outputs, + ...regionData + }; + } + } + } + for (const partition2 of partitions) { + const { regionRegex, outputs } = partition2; + if (new RegExp(regionRegex).test(value)) { + return { + ...outputs + }; + } + } + const DEFAULT_PARTITION = partitions.find((partition2) => partition2.id === "aws"); + if (!DEFAULT_PARTITION) { + throw new Error( + "Provided region was not found in the partition array or regex, and default partition with id 'aws' doesn't exist." + ); + } + return { + ...DEFAULT_PARTITION.outputs + }; +}, "partition"); +var setPartitionInfo = /* @__PURE__ */ __name((partitionsInfo, userAgentPrefix = "") => { + selectedPartitionsInfo = partitionsInfo; + selectedUserAgentPrefix = userAgentPrefix; +}, "setPartitionInfo"); +var useDefaultPartitionInfo = /* @__PURE__ */ __name(() => { + setPartitionInfo(partitions_default, ""); +}, "useDefaultPartitionInfo"); +var getUserAgentPrefix = /* @__PURE__ */ __name(() => selectedUserAgentPrefix, "getUserAgentPrefix"); + +// src/aws.ts +var awsEndpointFunctions = { + isVirtualHostableS3Bucket, + parseArn, + partition +}; +import_util_endpoints.customEndpointFunctions.aws = awsEndpointFunctions; + +// src/resolveEndpoint.ts + + +// src/types/EndpointError.ts + + +// src/types/EndpointRuleObject.ts + + +// src/types/ErrorRuleObject.ts + + +// src/types/RuleSetObject.ts + + +// src/types/TreeRuleObject.ts + + +// src/types/shared.ts + +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + awsEndpointFunctions, + partition, + setPartitionInfo, + useDefaultPartitionInfo, + getUserAgentPrefix, + isIpAddress, + resolveEndpoint, + EndpointError +}); + diff --git a/node_modules/@aws-sdk/util-endpoints/dist-cjs/lib/aws/partitions.json b/node_modules/@aws-sdk/util-endpoints/dist-cjs/lib/aws/partitions.json new file mode 100644 index 00000000..a11705a0 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-cjs/lib/aws/partitions.json @@ -0,0 +1,258 @@ +{ + "partitions": [{ + "id": "aws", + "outputs": { + "dnsSuffix": "amazonaws.com", + "dualStackDnsSuffix": "api.aws", + "implicitGlobalRegion": "us-east-1", + "name": "aws", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^(us|eu|ap|sa|ca|me|af|il|mx)\\-\\w+\\-\\d+$", + "regions": { + "af-south-1": { + "description": "Africa (Cape Town)" + }, + "ap-east-1": { + "description": "Asia Pacific (Hong Kong)" + }, + "ap-northeast-1": { + "description": "Asia Pacific (Tokyo)" + }, + "ap-northeast-2": { + "description": "Asia Pacific (Seoul)" + }, + "ap-northeast-3": { + "description": "Asia Pacific (Osaka)" + }, + "ap-south-1": { + "description": "Asia Pacific (Mumbai)" + }, + "ap-south-2": { + "description": "Asia Pacific (Hyderabad)" + }, + "ap-southeast-1": { + "description": "Asia Pacific (Singapore)" + }, + "ap-southeast-2": { + "description": "Asia Pacific (Sydney)" + }, + "ap-southeast-3": { + "description": "Asia Pacific (Jakarta)" + }, + "ap-southeast-4": { + "description": "Asia Pacific (Melbourne)" + }, + "ap-southeast-5": { + "description": "Asia Pacific (Malaysia)" + }, + "ap-southeast-7": { + "description": "Asia Pacific (Thailand)" + }, + "aws-global": { + "description": "AWS Standard global region" + }, + "ca-central-1": { + "description": "Canada (Central)" + }, + "ca-west-1": { + "description": "Canada West (Calgary)" + }, + "eu-central-1": { + "description": "Europe (Frankfurt)" + }, + "eu-central-2": { + "description": "Europe (Zurich)" + }, + "eu-north-1": { + "description": "Europe (Stockholm)" + }, + "eu-south-1": { + "description": "Europe (Milan)" + }, + "eu-south-2": { + "description": "Europe (Spain)" + }, + "eu-west-1": { + "description": "Europe (Ireland)" + }, + "eu-west-2": { + "description": "Europe (London)" + }, + "eu-west-3": { + "description": "Europe (Paris)" + }, + "il-central-1": { + "description": "Israel (Tel Aviv)" + }, + "me-central-1": { + "description": "Middle East (UAE)" + }, + "me-south-1": { + "description": "Middle East (Bahrain)" + }, + "mx-central-1": { + "description": "Mexico (Central)" + }, + "sa-east-1": { + "description": "South America (Sao Paulo)" + }, + "us-east-1": { + "description": "US East (N. Virginia)" + }, + "us-east-2": { + "description": "US East (Ohio)" + }, + "us-west-1": { + "description": "US West (N. California)" + }, + "us-west-2": { + "description": "US West (Oregon)" + } + } + }, { + "id": "aws-cn", + "outputs": { + "dnsSuffix": "amazonaws.com.cn", + "dualStackDnsSuffix": "api.amazonwebservices.com.cn", + "implicitGlobalRegion": "cn-northwest-1", + "name": "aws-cn", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^cn\\-\\w+\\-\\d+$", + "regions": { + "aws-cn-global": { + "description": "AWS China global region" + }, + "cn-north-1": { + "description": "China (Beijing)" + }, + "cn-northwest-1": { + "description": "China (Ningxia)" + } + } + }, { + "id": "aws-us-gov", + "outputs": { + "dnsSuffix": "amazonaws.com", + "dualStackDnsSuffix": "api.aws", + "implicitGlobalRegion": "us-gov-west-1", + "name": "aws-us-gov", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^us\\-gov\\-\\w+\\-\\d+$", + "regions": { + "aws-us-gov-global": { + "description": "AWS GovCloud (US) global region" + }, + "us-gov-east-1": { + "description": "AWS GovCloud (US-East)" + }, + "us-gov-west-1": { + "description": "AWS GovCloud (US-West)" + } + } + }, { + "id": "aws-iso", + "outputs": { + "dnsSuffix": "c2s.ic.gov", + "dualStackDnsSuffix": "c2s.ic.gov", + "implicitGlobalRegion": "us-iso-east-1", + "name": "aws-iso", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-iso\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-global": { + "description": "AWS ISO (US) global region" + }, + "us-iso-east-1": { + "description": "US ISO East" + }, + "us-iso-west-1": { + "description": "US ISO WEST" + } + } + }, { + "id": "aws-iso-b", + "outputs": { + "dnsSuffix": "sc2s.sgov.gov", + "dualStackDnsSuffix": "sc2s.sgov.gov", + "implicitGlobalRegion": "us-isob-east-1", + "name": "aws-iso-b", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-isob\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-b-global": { + "description": "AWS ISOB (US) global region" + }, + "us-isob-east-1": { + "description": "US ISOB East (Ohio)" + } + } + }, { + "id": "aws-iso-e", + "outputs": { + "dnsSuffix": "cloud.adc-e.uk", + "dualStackDnsSuffix": "cloud.adc-e.uk", + "implicitGlobalRegion": "eu-isoe-west-1", + "name": "aws-iso-e", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^eu\\-isoe\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-e-global": { + "description": "AWS ISOE (Europe) global region" + }, + "eu-isoe-west-1": { + "description": "EU ISOE West" + } + } + }, { + "id": "aws-iso-f", + "outputs": { + "dnsSuffix": "csp.hci.ic.gov", + "dualStackDnsSuffix": "csp.hci.ic.gov", + "implicitGlobalRegion": "us-isof-south-1", + "name": "aws-iso-f", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-isof\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-f-global": { + "description": "AWS ISOF global region" + }, + "us-isof-east-1": { + "description": "US ISOF EAST" + }, + "us-isof-south-1": { + "description": "US ISOF SOUTH" + } + } + }, { + "id": "aws-eusc", + "outputs": { + "dnsSuffix": "amazonaws.eu", + "dualStackDnsSuffix": "amazonaws.eu", + "implicitGlobalRegion": "eusc-de-east-1", + "name": "aws-eusc", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^eusc\\-(de)\\-\\w+\\-\\d+$", + "regions": { + "eusc-de-east-1": { + "description": "EU (Germany)" + } + } + }], + "version": "1.1" +} diff --git a/node_modules/@aws-sdk/util-endpoints/dist-es/aws.js b/node_modules/@aws-sdk/util-endpoints/dist-es/aws.js new file mode 100644 index 00000000..49a408e6 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-es/aws.js @@ -0,0 +1,10 @@ +import { customEndpointFunctions } from "@smithy/util-endpoints"; +import { isVirtualHostableS3Bucket } from "./lib/aws/isVirtualHostableS3Bucket"; +import { parseArn } from "./lib/aws/parseArn"; +import { partition } from "./lib/aws/partition"; +export const awsEndpointFunctions = { + isVirtualHostableS3Bucket: isVirtualHostableS3Bucket, + parseArn: parseArn, + partition: partition, +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-es/index.js b/node_modules/@aws-sdk/util-endpoints/dist-es/index.js new file mode 100644 index 00000000..d046d907 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-es/index.js @@ -0,0 +1,5 @@ +export * from "./aws"; +export * from "./lib/aws/partition"; +export * from "./lib/isIpAddress"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/index.js b/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/index.js new file mode 100644 index 00000000..03be049d --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/index.js @@ -0,0 +1,3 @@ +export * from "./isVirtualHostableS3Bucket"; +export * from "./parseArn"; +export * from "./partition"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/isVirtualHostableS3Bucket.js b/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/isVirtualHostableS3Bucket.js new file mode 100644 index 00000000..f2bacc0b --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/isVirtualHostableS3Bucket.js @@ -0,0 +1,25 @@ +import { isValidHostLabel } from "@smithy/util-endpoints"; +import { isIpAddress } from "../isIpAddress"; +export const isVirtualHostableS3Bucket = (value, allowSubDomains = false) => { + if (allowSubDomains) { + for (const label of value.split(".")) { + if (!isVirtualHostableS3Bucket(label)) { + return false; + } + } + return true; + } + if (!isValidHostLabel(value)) { + return false; + } + if (value.length < 3 || value.length > 63) { + return false; + } + if (value !== value.toLowerCase()) { + return false; + } + if (isIpAddress(value)) { + return false; + } + return true; +}; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/parseArn.js b/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/parseArn.js new file mode 100644 index 00000000..6b128875 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/parseArn.js @@ -0,0 +1,18 @@ +const ARN_DELIMITER = ":"; +const RESOURCE_DELIMITER = "/"; +export const parseArn = (value) => { + const segments = value.split(ARN_DELIMITER); + if (segments.length < 6) + return null; + const [arn, partition, service, region, accountId, ...resourcePath] = segments; + if (arn !== "arn" || partition === "" || service === "" || resourcePath.join(ARN_DELIMITER) === "") + return null; + const resourceId = resourcePath.map((resource) => resource.split(RESOURCE_DELIMITER)).flat(); + return { + partition, + service, + region, + accountId, + resourceId, + }; +}; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partition.js b/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partition.js new file mode 100644 index 00000000..8d39d812 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partition.js @@ -0,0 +1,41 @@ +import partitionsInfo from "./partitions.json"; +let selectedPartitionsInfo = partitionsInfo; +let selectedUserAgentPrefix = ""; +export const partition = (value) => { + const { partitions } = selectedPartitionsInfo; + for (const partition of partitions) { + const { regions, outputs } = partition; + for (const [region, regionData] of Object.entries(regions)) { + if (region === value) { + return { + ...outputs, + ...regionData, + }; + } + } + } + for (const partition of partitions) { + const { regionRegex, outputs } = partition; + if (new RegExp(regionRegex).test(value)) { + return { + ...outputs, + }; + } + } + const DEFAULT_PARTITION = partitions.find((partition) => partition.id === "aws"); + if (!DEFAULT_PARTITION) { + throw new Error("Provided region was not found in the partition array or regex," + + " and default partition with id 'aws' doesn't exist."); + } + return { + ...DEFAULT_PARTITION.outputs, + }; +}; +export const setPartitionInfo = (partitionsInfo, userAgentPrefix = "") => { + selectedPartitionsInfo = partitionsInfo; + selectedUserAgentPrefix = userAgentPrefix; +}; +export const useDefaultPartitionInfo = () => { + setPartitionInfo(partitionsInfo, ""); +}; +export const getUserAgentPrefix = () => selectedUserAgentPrefix; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partitions.json b/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partitions.json new file mode 100644 index 00000000..a11705a0 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partitions.json @@ -0,0 +1,258 @@ +{ + "partitions": [{ + "id": "aws", + "outputs": { + "dnsSuffix": "amazonaws.com", + "dualStackDnsSuffix": "api.aws", + "implicitGlobalRegion": "us-east-1", + "name": "aws", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^(us|eu|ap|sa|ca|me|af|il|mx)\\-\\w+\\-\\d+$", + "regions": { + "af-south-1": { + "description": "Africa (Cape Town)" + }, + "ap-east-1": { + "description": "Asia Pacific (Hong Kong)" + }, + "ap-northeast-1": { + "description": "Asia Pacific (Tokyo)" + }, + "ap-northeast-2": { + "description": "Asia Pacific (Seoul)" + }, + "ap-northeast-3": { + "description": "Asia Pacific (Osaka)" + }, + "ap-south-1": { + "description": "Asia Pacific (Mumbai)" + }, + "ap-south-2": { + "description": "Asia Pacific (Hyderabad)" + }, + "ap-southeast-1": { + "description": "Asia Pacific (Singapore)" + }, + "ap-southeast-2": { + "description": "Asia Pacific (Sydney)" + }, + "ap-southeast-3": { + "description": "Asia Pacific (Jakarta)" + }, + "ap-southeast-4": { + "description": "Asia Pacific (Melbourne)" + }, + "ap-southeast-5": { + "description": "Asia Pacific (Malaysia)" + }, + "ap-southeast-7": { + "description": "Asia Pacific (Thailand)" + }, + "aws-global": { + "description": "AWS Standard global region" + }, + "ca-central-1": { + "description": "Canada (Central)" + }, + "ca-west-1": { + "description": "Canada West (Calgary)" + }, + "eu-central-1": { + "description": "Europe (Frankfurt)" + }, + "eu-central-2": { + "description": "Europe (Zurich)" + }, + "eu-north-1": { + "description": "Europe (Stockholm)" + }, + "eu-south-1": { + "description": "Europe (Milan)" + }, + "eu-south-2": { + "description": "Europe (Spain)" + }, + "eu-west-1": { + "description": "Europe (Ireland)" + }, + "eu-west-2": { + "description": "Europe (London)" + }, + "eu-west-3": { + "description": "Europe (Paris)" + }, + "il-central-1": { + "description": "Israel (Tel Aviv)" + }, + "me-central-1": { + "description": "Middle East (UAE)" + }, + "me-south-1": { + "description": "Middle East (Bahrain)" + }, + "mx-central-1": { + "description": "Mexico (Central)" + }, + "sa-east-1": { + "description": "South America (Sao Paulo)" + }, + "us-east-1": { + "description": "US East (N. Virginia)" + }, + "us-east-2": { + "description": "US East (Ohio)" + }, + "us-west-1": { + "description": "US West (N. California)" + }, + "us-west-2": { + "description": "US West (Oregon)" + } + } + }, { + "id": "aws-cn", + "outputs": { + "dnsSuffix": "amazonaws.com.cn", + "dualStackDnsSuffix": "api.amazonwebservices.com.cn", + "implicitGlobalRegion": "cn-northwest-1", + "name": "aws-cn", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^cn\\-\\w+\\-\\d+$", + "regions": { + "aws-cn-global": { + "description": "AWS China global region" + }, + "cn-north-1": { + "description": "China (Beijing)" + }, + "cn-northwest-1": { + "description": "China (Ningxia)" + } + } + }, { + "id": "aws-us-gov", + "outputs": { + "dnsSuffix": "amazonaws.com", + "dualStackDnsSuffix": "api.aws", + "implicitGlobalRegion": "us-gov-west-1", + "name": "aws-us-gov", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^us\\-gov\\-\\w+\\-\\d+$", + "regions": { + "aws-us-gov-global": { + "description": "AWS GovCloud (US) global region" + }, + "us-gov-east-1": { + "description": "AWS GovCloud (US-East)" + }, + "us-gov-west-1": { + "description": "AWS GovCloud (US-West)" + } + } + }, { + "id": "aws-iso", + "outputs": { + "dnsSuffix": "c2s.ic.gov", + "dualStackDnsSuffix": "c2s.ic.gov", + "implicitGlobalRegion": "us-iso-east-1", + "name": "aws-iso", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-iso\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-global": { + "description": "AWS ISO (US) global region" + }, + "us-iso-east-1": { + "description": "US ISO East" + }, + "us-iso-west-1": { + "description": "US ISO WEST" + } + } + }, { + "id": "aws-iso-b", + "outputs": { + "dnsSuffix": "sc2s.sgov.gov", + "dualStackDnsSuffix": "sc2s.sgov.gov", + "implicitGlobalRegion": "us-isob-east-1", + "name": "aws-iso-b", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-isob\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-b-global": { + "description": "AWS ISOB (US) global region" + }, + "us-isob-east-1": { + "description": "US ISOB East (Ohio)" + } + } + }, { + "id": "aws-iso-e", + "outputs": { + "dnsSuffix": "cloud.adc-e.uk", + "dualStackDnsSuffix": "cloud.adc-e.uk", + "implicitGlobalRegion": "eu-isoe-west-1", + "name": "aws-iso-e", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^eu\\-isoe\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-e-global": { + "description": "AWS ISOE (Europe) global region" + }, + "eu-isoe-west-1": { + "description": "EU ISOE West" + } + } + }, { + "id": "aws-iso-f", + "outputs": { + "dnsSuffix": "csp.hci.ic.gov", + "dualStackDnsSuffix": "csp.hci.ic.gov", + "implicitGlobalRegion": "us-isof-south-1", + "name": "aws-iso-f", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-isof\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-f-global": { + "description": "AWS ISOF global region" + }, + "us-isof-east-1": { + "description": "US ISOF EAST" + }, + "us-isof-south-1": { + "description": "US ISOF SOUTH" + } + } + }, { + "id": "aws-eusc", + "outputs": { + "dnsSuffix": "amazonaws.eu", + "dualStackDnsSuffix": "amazonaws.eu", + "implicitGlobalRegion": "eusc-de-east-1", + "name": "aws-eusc", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^eusc\\-(de)\\-\\w+\\-\\d+$", + "regions": { + "eusc-de-east-1": { + "description": "EU (Germany)" + } + } + }], + "version": "1.1" +} diff --git a/node_modules/@aws-sdk/util-endpoints/dist-es/lib/isIpAddress.js b/node_modules/@aws-sdk/util-endpoints/dist-es/lib/isIpAddress.js new file mode 100644 index 00000000..59bfcd8c --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-es/lib/isIpAddress.js @@ -0,0 +1 @@ +export { isIpAddress } from "@smithy/util-endpoints"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-es/resolveEndpoint.js b/node_modules/@aws-sdk/util-endpoints/dist-es/resolveEndpoint.js new file mode 100644 index 00000000..e2453f7f --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-es/resolveEndpoint.js @@ -0,0 +1 @@ +export { resolveEndpoint } from "@smithy/util-endpoints"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointError.js b/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointError.js new file mode 100644 index 00000000..521e688b --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointError.js @@ -0,0 +1 @@ +export { EndpointError } from "@smithy/util-endpoints"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointRuleObject.js b/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointRuleObject.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-es/types/ErrorRuleObject.js b/node_modules/@aws-sdk/util-endpoints/dist-es/types/ErrorRuleObject.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-es/types/ErrorRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-es/types/RuleSetObject.js b/node_modules/@aws-sdk/util-endpoints/dist-es/types/RuleSetObject.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-es/types/RuleSetObject.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-es/types/TreeRuleObject.js b/node_modules/@aws-sdk/util-endpoints/dist-es/types/TreeRuleObject.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-es/types/TreeRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-es/types/index.js b/node_modules/@aws-sdk/util-endpoints/dist-es/types/index.js new file mode 100644 index 00000000..daba5019 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-es/types/index.js @@ -0,0 +1,6 @@ +export * from "./EndpointError"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-es/types/shared.js b/node_modules/@aws-sdk/util-endpoints/dist-es/types/shared.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-es/types/shared.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/aws.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/aws.d.ts new file mode 100644 index 00000000..13c64a97 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/aws.d.ts @@ -0,0 +1,2 @@ +import { EndpointFunctions } from "@smithy/util-endpoints"; +export declare const awsEndpointFunctions: EndpointFunctions; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/index.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/index.d.ts new file mode 100644 index 00000000..d046d907 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/index.d.ts @@ -0,0 +1,5 @@ +export * from "./aws"; +export * from "./lib/aws/partition"; +export * from "./lib/isIpAddress"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/index.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/index.d.ts new file mode 100644 index 00000000..03be049d --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/index.d.ts @@ -0,0 +1,3 @@ +export * from "./isVirtualHostableS3Bucket"; +export * from "./parseArn"; +export * from "./partition"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/isVirtualHostableS3Bucket.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/isVirtualHostableS3Bucket.d.ts new file mode 100644 index 00000000..25d46e4b --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/isVirtualHostableS3Bucket.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates whether a string is a DNS compatible bucket name and can be used with + * virtual hosted style addressing. + */ +export declare const isVirtualHostableS3Bucket: (value: string, allowSubDomains?: boolean) => boolean; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/parseArn.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/parseArn.d.ts new file mode 100644 index 00000000..fa5af83b --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/parseArn.d.ts @@ -0,0 +1,7 @@ +import { EndpointARN } from "@smithy/types"; +/** + * Evaluates a single string argument value, and returns an object containing + * details about the parsed ARN. + * If the input was not a valid ARN, the function returns null. + */ +export declare const parseArn: (value: string) => EndpointARN | null; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/partition.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/partition.d.ts new file mode 100644 index 00000000..96d14e41 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/partition.d.ts @@ -0,0 +1,38 @@ +import { EndpointPartition } from "@smithy/types"; +export type PartitionsInfo = { + partitions: Array<{ + id: string; + outputs: { + dnsSuffix: string; + dualStackDnsSuffix: string; + name: string; + supportsDualStack: boolean; + supportsFIPS: boolean; + }; + regionRegex: string; + regions: Record; + }>; +}; +/** + * Evaluates a single string argument value as a region, and matches the + * string value to an AWS partition. + * The matcher MUST always return a successful object describing the partition + * that the region has been determined to be a part of. + */ +export declare const partition: (value: string) => EndpointPartition; +/** + * Set custom partitions.json data. + * @internal + */ +export declare const setPartitionInfo: (partitionsInfo: PartitionsInfo, userAgentPrefix?: string) => void; +/** + * Reset to the default partitions.json data. + * @internal + */ +export declare const useDefaultPartitionInfo: () => void; +/** + * @internal + */ +export declare const getUserAgentPrefix: () => string; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/lib/isIpAddress.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/lib/isIpAddress.d.ts new file mode 100644 index 00000000..59bfcd8c --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/lib/isIpAddress.d.ts @@ -0,0 +1 @@ +export { isIpAddress } from "@smithy/util-endpoints"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/resolveEndpoint.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/resolveEndpoint.d.ts new file mode 100644 index 00000000..e2453f7f --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/resolveEndpoint.d.ts @@ -0,0 +1 @@ +export { resolveEndpoint } from "@smithy/util-endpoints"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/aws.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/aws.d.ts new file mode 100644 index 00000000..13c64a97 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/aws.d.ts @@ -0,0 +1,2 @@ +import { EndpointFunctions } from "@smithy/util-endpoints"; +export declare const awsEndpointFunctions: EndpointFunctions; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..d046d907 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +export * from "./aws"; +export * from "./lib/aws/partition"; +export * from "./lib/isIpAddress"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/index.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/index.d.ts new file mode 100644 index 00000000..03be049d --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/index.d.ts @@ -0,0 +1,3 @@ +export * from "./isVirtualHostableS3Bucket"; +export * from "./parseArn"; +export * from "./partition"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/isVirtualHostableS3Bucket.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/isVirtualHostableS3Bucket.d.ts new file mode 100644 index 00000000..5ef32963 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/isVirtualHostableS3Bucket.d.ts @@ -0,0 +1,4 @@ +export declare const isVirtualHostableS3Bucket: ( + value: string, + allowSubDomains?: boolean +) => boolean; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/parseArn.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/parseArn.d.ts new file mode 100644 index 00000000..690d4595 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/parseArn.d.ts @@ -0,0 +1,2 @@ +import { EndpointARN } from "@smithy/types"; +export declare const parseArn: (value: string) => EndpointARN | null; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/partition.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/partition.d.ts new file mode 100644 index 00000000..0683113c --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/partition.d.ts @@ -0,0 +1,28 @@ +import { EndpointPartition } from "@smithy/types"; +export type PartitionsInfo = { + partitions: Array<{ + id: string; + outputs: { + dnsSuffix: string; + dualStackDnsSuffix: string; + name: string; + supportsDualStack: boolean; + supportsFIPS: boolean; + }; + regionRegex: string; + regions: Record< + string, + | { + description?: string; + } + | undefined + >; + }>; +}; +export declare const partition: (value: string) => EndpointPartition; +export declare const setPartitionInfo: ( + partitionsInfo: PartitionsInfo, + userAgentPrefix?: string +) => void; +export declare const useDefaultPartitionInfo: () => void; +export declare const getUserAgentPrefix: () => string; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts new file mode 100644 index 00000000..59bfcd8c --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts @@ -0,0 +1 @@ +export { isIpAddress } from "@smithy/util-endpoints"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts new file mode 100644 index 00000000..e2453f7f --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts @@ -0,0 +1 @@ +export { resolveEndpoint } from "@smithy/util-endpoints"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts new file mode 100644 index 00000000..521e688b --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts @@ -0,0 +1 @@ +export { EndpointError } from "@smithy/util-endpoints"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts new file mode 100644 index 00000000..b48af7fa --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts @@ -0,0 +1,6 @@ +export { + EndpointObjectProperties, + EndpointObjectHeaders, + EndpointObject, + EndpointRuleObject, +} from "@smithy/util-endpoints"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts new file mode 100644 index 00000000..e7b8881b --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts @@ -0,0 +1 @@ +export { ErrorRuleObject } from "@smithy/util-endpoints"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts new file mode 100644 index 00000000..2a489c67 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts @@ -0,0 +1,5 @@ +export { + DeprecatedObject, + ParameterObject, + RuleSetObject, +} from "@smithy/util-endpoints"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts new file mode 100644 index 00000000..716ddcfc --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts @@ -0,0 +1 @@ +export { RuleSetRules, TreeRuleObject } from "@smithy/util-endpoints"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/index.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/index.d.ts new file mode 100644 index 00000000..daba5019 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/index.d.ts @@ -0,0 +1,6 @@ +export * from "./EndpointError"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/shared.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/shared.d.ts new file mode 100644 index 00000000..cfd2248a --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/shared.d.ts @@ -0,0 +1,12 @@ +export { + ReferenceObject, + FunctionObject, + FunctionArgv, + FunctionReturn, + ConditionObject, + Expression, + EndpointParams, + EndpointResolverOptions, + ReferenceRecord, + EvaluateOptions, +} from "@smithy/util-endpoints"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointError.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointError.d.ts new file mode 100644 index 00000000..521e688b --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointError.d.ts @@ -0,0 +1 @@ +export { EndpointError } from "@smithy/util-endpoints"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointRuleObject.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointRuleObject.d.ts new file mode 100644 index 00000000..ef666fe0 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointRuleObject.d.ts @@ -0,0 +1 @@ +export { EndpointObjectProperties, EndpointObjectHeaders, EndpointObject, EndpointRuleObject, } from "@smithy/util-endpoints"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/types/ErrorRuleObject.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/types/ErrorRuleObject.d.ts new file mode 100644 index 00000000..e7b8881b --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/types/ErrorRuleObject.d.ts @@ -0,0 +1 @@ +export { ErrorRuleObject } from "@smithy/util-endpoints"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/types/RuleSetObject.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/types/RuleSetObject.d.ts new file mode 100644 index 00000000..c052af07 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/types/RuleSetObject.d.ts @@ -0,0 +1 @@ +export { DeprecatedObject, ParameterObject, RuleSetObject } from "@smithy/util-endpoints"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/types/TreeRuleObject.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/types/TreeRuleObject.d.ts new file mode 100644 index 00000000..716ddcfc --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/types/TreeRuleObject.d.ts @@ -0,0 +1 @@ +export { RuleSetRules, TreeRuleObject } from "@smithy/util-endpoints"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/types/index.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/types/index.d.ts new file mode 100644 index 00000000..daba5019 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/types/index.d.ts @@ -0,0 +1,6 @@ +export * from "./EndpointError"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/node_modules/@aws-sdk/util-endpoints/dist-types/types/shared.d.ts b/node_modules/@aws-sdk/util-endpoints/dist-types/types/shared.d.ts new file mode 100644 index 00000000..af7cc53b --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/dist-types/types/shared.d.ts @@ -0,0 +1 @@ +export { ReferenceObject, FunctionObject, FunctionArgv, FunctionReturn, ConditionObject, Expression, EndpointParams, EndpointResolverOptions, ReferenceRecord, EvaluateOptions, } from "@smithy/util-endpoints"; diff --git a/node_modules/@aws-sdk/util-endpoints/package.json b/node_modules/@aws-sdk/util-endpoints/package.json new file mode 100644 index 00000000..36d8d2a6 --- /dev/null +++ b/node_modules/@aws-sdk/util-endpoints/package.json @@ -0,0 +1,58 @@ +{ + "name": "@aws-sdk/util-endpoints", + "version": "3.787.0", + "description": "Utilities to help with endpoint resolution", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-endpoints", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "@smithy/util-endpoints": "^3.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-endpoints", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-endpoints" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + } +} diff --git a/node_modules/@aws-sdk/util-format-url/LICENSE b/node_modules/@aws-sdk/util-format-url/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-sdk/util-format-url/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-sdk/util-format-url/README.md b/node_modules/@aws-sdk/util-format-url/README.md new file mode 100644 index 00000000..f83f1445 --- /dev/null +++ b/node_modules/@aws-sdk/util-format-url/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/util-format-url + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-format-url/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-format-url) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-format-url.svg)](https://www.npmjs.com/package/@aws-sdk/util-format-url) diff --git a/node_modules/@aws-sdk/util-format-url/dist-cjs/index.js b/node_modules/@aws-sdk/util-format-url/dist-cjs/index.js new file mode 100644 index 00000000..7d530262 --- /dev/null +++ b/node_modules/@aws-sdk/util-format-url/dist-cjs/index.js @@ -0,0 +1,62 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + formatUrl: () => formatUrl +}); +module.exports = __toCommonJS(index_exports); +var import_querystring_builder = require("@smithy/querystring-builder"); +function formatUrl(request) { + const { port, query } = request; + let { protocol, path, hostname } = request; + if (protocol && protocol.slice(-1) !== ":") { + protocol += ":"; + } + if (port) { + hostname += `:${port}`; + } + if (path && path.charAt(0) !== "/") { + path = `/${path}`; + } + let queryString = query ? (0, import_querystring_builder.buildQueryString)(query) : ""; + if (queryString && queryString[0] !== "?") { + queryString = `?${queryString}`; + } + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + let fragment = ""; + if (request.fragment) { + fragment = `#${request.fragment}`; + } + return `${protocol}//${auth}${hostname}${path}${queryString}${fragment}`; +} +__name(formatUrl, "formatUrl"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + formatUrl +}); + diff --git a/node_modules/@aws-sdk/util-format-url/dist-es/index.js b/node_modules/@aws-sdk/util-format-url/dist-es/index.js new file mode 100644 index 00000000..e540cd46 --- /dev/null +++ b/node_modules/@aws-sdk/util-format-url/dist-es/index.js @@ -0,0 +1,29 @@ +import { buildQueryString } from "@smithy/querystring-builder"; +export function formatUrl(request) { + const { port, query } = request; + let { protocol, path, hostname } = request; + if (protocol && protocol.slice(-1) !== ":") { + protocol += ":"; + } + if (port) { + hostname += `:${port}`; + } + if (path && path.charAt(0) !== "/") { + path = `/${path}`; + } + let queryString = query ? buildQueryString(query) : ""; + if (queryString && queryString[0] !== "?") { + queryString = `?${queryString}`; + } + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + let fragment = ""; + if (request.fragment) { + fragment = `#${request.fragment}`; + } + return `${protocol}//${auth}${hostname}${path}${queryString}${fragment}`; +} diff --git a/node_modules/@aws-sdk/util-format-url/dist-types/index.d.ts b/node_modules/@aws-sdk/util-format-url/dist-types/index.d.ts new file mode 100644 index 00000000..e0e1e1c1 --- /dev/null +++ b/node_modules/@aws-sdk/util-format-url/dist-types/index.d.ts @@ -0,0 +1,2 @@ +import { HttpRequest } from "@smithy/types"; +export declare function formatUrl(request: Omit): string; diff --git a/node_modules/@aws-sdk/util-format-url/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/util-format-url/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..b09d4644 --- /dev/null +++ b/node_modules/@aws-sdk/util-format-url/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +import { HttpRequest } from "@smithy/types"; +export declare function formatUrl( + request: Pick> +): string; diff --git a/node_modules/@aws-sdk/util-format-url/package.json b/node_modules/@aws-sdk/util-format-url/package.json new file mode 100644 index 00000000..af9bcb4e --- /dev/null +++ b/node_modules/@aws-sdk/util-format-url/package.json @@ -0,0 +1,55 @@ +{ + "name": "@aws-sdk/util-format-url", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-format-url", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-format-url", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-format-url" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + } +} diff --git a/node_modules/@aws-sdk/util-locate-window/LICENSE b/node_modules/@aws-sdk/util-locate-window/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-sdk/util-locate-window/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-sdk/util-locate-window/README.md b/node_modules/@aws-sdk/util-locate-window/README.md new file mode 100644 index 00000000..cac53d3f --- /dev/null +++ b/node_modules/@aws-sdk/util-locate-window/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/util-locate-window + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-locate-window/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-locate-window) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-locate-window.svg)](https://www.npmjs.com/package/@aws-sdk/util-locate-window) diff --git a/node_modules/@aws-sdk/util-locate-window/dist-cjs/index.js b/node_modules/@aws-sdk/util-locate-window/dist-cjs/index.js new file mode 100644 index 00000000..95a64233 --- /dev/null +++ b/node_modules/@aws-sdk/util-locate-window/dist-cjs/index.js @@ -0,0 +1,42 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + locateWindow: () => locateWindow +}); +module.exports = __toCommonJS(src_exports); +var fallbackWindow = {}; +function locateWindow() { + if (typeof window !== "undefined") { + return window; + } else if (typeof self !== "undefined") { + return self; + } + return fallbackWindow; +} +__name(locateWindow, "locateWindow"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + locateWindow +}); + diff --git a/node_modules/@aws-sdk/util-locate-window/dist-es/index.js b/node_modules/@aws-sdk/util-locate-window/dist-es/index.js new file mode 100644 index 00000000..a51e6442 --- /dev/null +++ b/node_modules/@aws-sdk/util-locate-window/dist-es/index.js @@ -0,0 +1,10 @@ +const fallbackWindow = {}; +export function locateWindow() { + if (typeof window !== "undefined") { + return window; + } + else if (typeof self !== "undefined") { + return self; + } + return fallbackWindow; +} diff --git a/node_modules/@aws-sdk/util-locate-window/dist-types/index.d.ts b/node_modules/@aws-sdk/util-locate-window/dist-types/index.d.ts new file mode 100644 index 00000000..2b02d7f4 --- /dev/null +++ b/node_modules/@aws-sdk/util-locate-window/dist-types/index.d.ts @@ -0,0 +1,6 @@ +/** + * Locates the global scope for a browser or browser-like environment. If + * neither `window` nor `self` is defined by the environment, the same object + * will be returned on each invocation. + */ +export declare function locateWindow(): Window; diff --git a/node_modules/@aws-sdk/util-locate-window/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/util-locate-window/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..a5bbba31 --- /dev/null +++ b/node_modules/@aws-sdk/util-locate-window/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export declare function locateWindow(): Window; diff --git a/node_modules/@aws-sdk/util-locate-window/package.json b/node_modules/@aws-sdk/util-locate-window/package.json new file mode 100644 index 00000000..2835b094 --- /dev/null +++ b/node_modules/@aws-sdk/util-locate-window/package.json @@ -0,0 +1,53 @@ +{ + "name": "@aws-sdk/util-locate-window", + "version": "3.723.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-locate-window", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-locate-window", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-locate-window" + } +} diff --git a/node_modules/@aws-sdk/util-user-agent-browser/LICENSE b/node_modules/@aws-sdk/util-user-agent-browser/LICENSE new file mode 100644 index 00000000..dd65ae06 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-browser/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-sdk/util-user-agent-browser/README.md b/node_modules/@aws-sdk/util-user-agent-browser/README.md new file mode 100644 index 00000000..f2b6c628 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-browser/README.md @@ -0,0 +1,10 @@ +# @aws-sdk/util-user-agent-browser + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-user-agent-browser/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-user-agent-browser) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-user-agent-browser.svg)](https://www.npmjs.com/package/@aws-sdk/util-user-agent-browser) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/configurations.js b/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/configurations.js new file mode 100644 index 00000000..c8ad2e54 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/configurations.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.js b/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.js new file mode 100644 index 00000000..aaf7621d --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultUserAgent = exports.createDefaultUserAgentProvider = void 0; +const tslib_1 = require("tslib"); +const bowser_1 = tslib_1.__importDefault(require("bowser")); +const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => async (config) => { + const parsedUA = typeof window !== "undefined" && window?.navigator?.userAgent + ? bowser_1.default.parse(window.navigator.userAgent) + : undefined; + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + [`os/${parsedUA?.os?.name || "other"}`, parsedUA?.os?.version], + ["lang/js"], + ["md/browser", `${parsedUA?.browser?.name ?? "unknown"}_${parsedUA?.browser?.version ?? "unknown"}`], + ]; + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + const appId = await config?.userAgentAppId?.(); + if (appId) { + sections.push([`app/${appId}`]); + } + return sections; +}; +exports.createDefaultUserAgentProvider = createDefaultUserAgentProvider; +exports.defaultUserAgent = exports.createDefaultUserAgentProvider; diff --git a/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.native.js b/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.native.js new file mode 100644 index 00000000..4d06e369 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.native.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultUserAgent = exports.createDefaultUserAgentProvider = void 0; +const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => async (config) => { + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + ["os/other"], + ["lang/js"], + ["md/rn"], + ]; + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + const appId = await config?.userAgentAppId?.(); + if (appId) { + sections.push([`app/${appId}`]); + } + return sections; +}; +exports.createDefaultUserAgentProvider = createDefaultUserAgentProvider; +exports.defaultUserAgent = exports.createDefaultUserAgentProvider; diff --git a/node_modules/@aws-sdk/util-user-agent-browser/dist-es/configurations.js b/node_modules/@aws-sdk/util-user-agent-browser/dist-es/configurations.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-browser/dist-es/configurations.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.js b/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.js new file mode 100644 index 00000000..1584d7e3 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.js @@ -0,0 +1,22 @@ +import bowser from "bowser"; +export const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => async (config) => { + const parsedUA = typeof window !== "undefined" && window?.navigator?.userAgent + ? bowser.parse(window.navigator.userAgent) + : undefined; + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + [`os/${parsedUA?.os?.name || "other"}`, parsedUA?.os?.version], + ["lang/js"], + ["md/browser", `${parsedUA?.browser?.name ?? "unknown"}_${parsedUA?.browser?.version ?? "unknown"}`], + ]; + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + const appId = await config?.userAgentAppId?.(); + if (appId) { + sections.push([`app/${appId}`]); + } + return sections; +}; +export const defaultUserAgent = createDefaultUserAgentProvider; diff --git a/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.native.js b/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.native.js new file mode 100644 index 00000000..04c7ae57 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.native.js @@ -0,0 +1,18 @@ +export const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => async (config) => { + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + ["os/other"], + ["lang/js"], + ["md/rn"], + ]; + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + const appId = await config?.userAgentAppId?.(); + if (appId) { + sections.push([`app/${appId}`]); + } + return sections; +}; +export const defaultUserAgent = createDefaultUserAgentProvider; diff --git a/node_modules/@aws-sdk/util-user-agent-browser/dist-types/configurations.d.ts b/node_modules/@aws-sdk/util-user-agent-browser/dist-types/configurations.d.ts new file mode 100644 index 00000000..00537a90 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-browser/dist-types/configurations.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export interface DefaultUserAgentOptions { + serviceId?: string; + clientVersion: string; +} diff --git a/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.d.ts b/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.d.ts new file mode 100644 index 00000000..fb107d45 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.d.ts @@ -0,0 +1,17 @@ +import { Provider, UserAgent } from "@smithy/types"; +import { DefaultUserAgentOptions } from "./configurations"; +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +/** + * @internal + * + * Default provider to the user agent in browsers. It's a best effort to infer + * the device information. It uses bowser library to detect the browser and version + */ +export declare const createDefaultUserAgentProvider: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; +/** + * @internal + * @deprecated use createDefaultUserAgentProvider + */ +export declare const defaultUserAgent: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; diff --git a/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.native.d.ts b/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.native.d.ts new file mode 100644 index 00000000..5b4926b7 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.native.d.ts @@ -0,0 +1,17 @@ +import { Provider, UserAgent } from "@smithy/types"; +import { DefaultUserAgentOptions } from "./configurations"; +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +/** + * @internal + * + * Default provider to the user agent in ReactNative. It's a best effort to infer + * the device information. It uses bowser library to detect the browser and virsion + */ +export declare const createDefaultUserAgentProvider: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; +/** + * @internal + * @deprecated use createDefaultUserAgentProvider + */ +export declare const defaultUserAgent: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; diff --git a/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/configurations.d.ts b/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/configurations.d.ts new file mode 100644 index 00000000..1428231d --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/configurations.d.ts @@ -0,0 +1,4 @@ +export interface DefaultUserAgentOptions { + serviceId?: string; + clientVersion: string; +} diff --git a/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..32e643a3 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,17 @@ +import { Provider, UserAgent } from "@smithy/types"; +import { DefaultUserAgentOptions } from "./configurations"; +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +export declare const createDefaultUserAgentProvider: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; +export declare const defaultUserAgent: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; diff --git a/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.native.d.ts b/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.native.d.ts new file mode 100644 index 00000000..32e643a3 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.native.d.ts @@ -0,0 +1,17 @@ +import { Provider, UserAgent } from "@smithy/types"; +import { DefaultUserAgentOptions } from "./configurations"; +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +export declare const createDefaultUserAgentProvider: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; +export declare const defaultUserAgent: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; diff --git a/node_modules/@aws-sdk/util-user-agent-browser/package.json b/node_modules/@aws-sdk/util-user-agent-browser/package.json new file mode 100644 index 00000000..4065f6db --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-browser/package.json @@ -0,0 +1,54 @@ +{ + "name": "@aws-sdk/util-user-agent-browser", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-user-agent-browser", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "browser": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "react-native": "dist-es/index.native.js", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-user-agent-browser", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-user-agent-browser" + } +} diff --git a/node_modules/@aws-sdk/util-user-agent-node/LICENSE b/node_modules/@aws-sdk/util-user-agent-node/LICENSE new file mode 100644 index 00000000..dd65ae06 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-node/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@aws-sdk/util-user-agent-node/README.md b/node_modules/@aws-sdk/util-user-agent-node/README.md new file mode 100644 index 00000000..fccfbb54 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-node/README.md @@ -0,0 +1,10 @@ +# @aws-sdk/util-user-agent-node + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-user-agent-node/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-user-agent-node) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-user-agent-node.svg)](https://www.npmjs.com/package/@aws-sdk/util-user-agent-node) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@aws-sdk/util-user-agent-node/dist-cjs/index.js b/node_modules/@aws-sdk/util-user-agent-node/dist-cjs/index.js new file mode 100644 index 00000000..083dccb6 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-node/dist-cjs/index.js @@ -0,0 +1,102 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + NODE_APP_ID_CONFIG_OPTIONS: () => NODE_APP_ID_CONFIG_OPTIONS, + UA_APP_ID_ENV_NAME: () => UA_APP_ID_ENV_NAME, + UA_APP_ID_INI_NAME: () => UA_APP_ID_INI_NAME, + createDefaultUserAgentProvider: () => createDefaultUserAgentProvider, + crtAvailability: () => crtAvailability, + defaultUserAgent: () => defaultUserAgent +}); +module.exports = __toCommonJS(index_exports); + +// src/defaultUserAgent.ts +var import_os = require("os"); +var import_process = require("process"); + +// src/crt-availability.ts +var crtAvailability = { + isCrtAvailable: false +}; + +// src/is-crt-available.ts +var isCrtAvailable = /* @__PURE__ */ __name(() => { + if (crtAvailability.isCrtAvailable) { + return ["md/crt-avail"]; + } + return null; +}, "isCrtAvailable"); + +// src/defaultUserAgent.ts +var createDefaultUserAgentProvider = /* @__PURE__ */ __name(({ serviceId, clientVersion }) => { + return async (config) => { + const sections = [ + // sdk-metadata + ["aws-sdk-js", clientVersion], + // ua-metadata + ["ua", "2.1"], + // os-metadata + [`os/${(0, import_os.platform)()}`, (0, import_os.release)()], + // language-metadata + // ECMAScript edition doesn't matter in JS, so no version needed. + ["lang/js"], + ["md/nodejs", `${import_process.versions.node}`] + ]; + const crtAvailable = isCrtAvailable(); + if (crtAvailable) { + sections.push(crtAvailable); + } + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + if (import_process.env.AWS_EXECUTION_ENV) { + sections.push([`exec-env/${import_process.env.AWS_EXECUTION_ENV}`]); + } + const appId = await config?.userAgentAppId?.(); + const resolvedUserAgent = appId ? [...sections, [`app/${appId}`]] : [...sections]; + return resolvedUserAgent; + }; +}, "createDefaultUserAgentProvider"); +var defaultUserAgent = createDefaultUserAgentProvider; + +// src/nodeAppIdConfigOptions.ts +var import_middleware_user_agent = require("@aws-sdk/middleware-user-agent"); +var UA_APP_ID_ENV_NAME = "AWS_SDK_UA_APP_ID"; +var UA_APP_ID_INI_NAME = "sdk_ua_app_id"; +var UA_APP_ID_INI_NAME_DEPRECATED = "sdk-ua-app-id"; +var NODE_APP_ID_CONFIG_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env2) => env2[UA_APP_ID_ENV_NAME], "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => profile[UA_APP_ID_INI_NAME] ?? profile[UA_APP_ID_INI_NAME_DEPRECATED], "configFileSelector"), + default: import_middleware_user_agent.DEFAULT_UA_APP_ID +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + crtAvailability, + createDefaultUserAgentProvider, + defaultUserAgent, + UA_APP_ID_ENV_NAME, + UA_APP_ID_INI_NAME, + NODE_APP_ID_CONFIG_OPTIONS +}); + diff --git a/node_modules/@aws-sdk/util-user-agent-node/dist-es/crt-availability.js b/node_modules/@aws-sdk/util-user-agent-node/dist-es/crt-availability.js new file mode 100644 index 00000000..99ebeb97 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-node/dist-es/crt-availability.js @@ -0,0 +1,3 @@ +export const crtAvailability = { + isCrtAvailable: false, +}; diff --git a/node_modules/@aws-sdk/util-user-agent-node/dist-es/defaultUserAgent.js b/node_modules/@aws-sdk/util-user-agent-node/dist-es/defaultUserAgent.js new file mode 100644 index 00000000..d92681d9 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-node/dist-es/defaultUserAgent.js @@ -0,0 +1,29 @@ +import { platform, release } from "os"; +import { env, versions } from "process"; +import { isCrtAvailable } from "./is-crt-available"; +export { crtAvailability } from "./crt-availability"; +export const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => { + return async (config) => { + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + [`os/${platform()}`, release()], + ["lang/js"], + ["md/nodejs", `${versions.node}`], + ]; + const crtAvailable = isCrtAvailable(); + if (crtAvailable) { + sections.push(crtAvailable); + } + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + if (env.AWS_EXECUTION_ENV) { + sections.push([`exec-env/${env.AWS_EXECUTION_ENV}`]); + } + const appId = await config?.userAgentAppId?.(); + const resolvedUserAgent = appId ? [...sections, [`app/${appId}`]] : [...sections]; + return resolvedUserAgent; + }; +}; +export const defaultUserAgent = createDefaultUserAgentProvider; diff --git a/node_modules/@aws-sdk/util-user-agent-node/dist-es/index.js b/node_modules/@aws-sdk/util-user-agent-node/dist-es/index.js new file mode 100644 index 00000000..cbf37f23 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-node/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./defaultUserAgent"; +export * from "./nodeAppIdConfigOptions"; diff --git a/node_modules/@aws-sdk/util-user-agent-node/dist-es/is-crt-available.js b/node_modules/@aws-sdk/util-user-agent-node/dist-es/is-crt-available.js new file mode 100644 index 00000000..e9f8b0de --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-node/dist-es/is-crt-available.js @@ -0,0 +1,7 @@ +import { crtAvailability } from "./crt-availability"; +export const isCrtAvailable = () => { + if (crtAvailability.isCrtAvailable) { + return ["md/crt-avail"]; + } + return null; +}; diff --git a/node_modules/@aws-sdk/util-user-agent-node/dist-es/nodeAppIdConfigOptions.js b/node_modules/@aws-sdk/util-user-agent-node/dist-es/nodeAppIdConfigOptions.js new file mode 100644 index 00000000..f270db9b --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-node/dist-es/nodeAppIdConfigOptions.js @@ -0,0 +1,9 @@ +import { DEFAULT_UA_APP_ID } from "@aws-sdk/middleware-user-agent"; +export const UA_APP_ID_ENV_NAME = "AWS_SDK_UA_APP_ID"; +export const UA_APP_ID_INI_NAME = "sdk_ua_app_id"; +const UA_APP_ID_INI_NAME_DEPRECATED = "sdk-ua-app-id"; +export const NODE_APP_ID_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[UA_APP_ID_ENV_NAME], + configFileSelector: (profile) => profile[UA_APP_ID_INI_NAME] ?? profile[UA_APP_ID_INI_NAME_DEPRECATED], + default: DEFAULT_UA_APP_ID, +}; diff --git a/node_modules/@aws-sdk/util-user-agent-node/dist-types/crt-availability.d.ts b/node_modules/@aws-sdk/util-user-agent-node/dist-types/crt-availability.d.ts new file mode 100644 index 00000000..c2033a05 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-node/dist-types/crt-availability.d.ts @@ -0,0 +1,9 @@ +/** + * @internal + * + * If \@aws-sdk/signature-v4-crt is installed and loaded, it will register + * this value to true. + */ +export declare const crtAvailability: { + isCrtAvailable: boolean; +}; diff --git a/node_modules/@aws-sdk/util-user-agent-node/dist-types/defaultUserAgent.d.ts b/node_modules/@aws-sdk/util-user-agent-node/dist-types/defaultUserAgent.d.ts new file mode 100644 index 00000000..28537a65 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-node/dist-types/defaultUserAgent.d.ts @@ -0,0 +1,23 @@ +import { Provider, UserAgent } from "@smithy/types"; +export { crtAvailability } from "./crt-availability"; +export interface DefaultUserAgentOptions { + serviceId?: string; + clientVersion: string; +} +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +/** + * @internal + * + * Collect metrics from runtime to put into user agent. + */ +export declare const createDefaultUserAgentProvider: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; +/** + * + * @internal + * + * @deprecated use createDefaultUserAgentProvider + * + */ +export declare const defaultUserAgent: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; diff --git a/node_modules/@aws-sdk/util-user-agent-node/dist-types/index.d.ts b/node_modules/@aws-sdk/util-user-agent-node/dist-types/index.d.ts new file mode 100644 index 00000000..cbf37f23 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-node/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./defaultUserAgent"; +export * from "./nodeAppIdConfigOptions"; diff --git a/node_modules/@aws-sdk/util-user-agent-node/dist-types/is-crt-available.d.ts b/node_modules/@aws-sdk/util-user-agent-node/dist-types/is-crt-available.d.ts new file mode 100644 index 00000000..675ffa8e --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-node/dist-types/is-crt-available.d.ts @@ -0,0 +1,5 @@ +import { UserAgentPair } from "@smithy/types"; +/** + * @internal + */ +export declare const isCrtAvailable: () => UserAgentPair | null; diff --git a/node_modules/@aws-sdk/util-user-agent-node/dist-types/nodeAppIdConfigOptions.d.ts b/node_modules/@aws-sdk/util-user-agent-node/dist-types/nodeAppIdConfigOptions.d.ts new file mode 100644 index 00000000..92a8edc4 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-node/dist-types/nodeAppIdConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const UA_APP_ID_ENV_NAME = "AWS_SDK_UA_APP_ID"; +/** + * @internal + */ +export declare const UA_APP_ID_INI_NAME = "sdk_ua_app_id"; +/** + * @internal + */ +export declare const NODE_APP_ID_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/crt-availability.d.ts b/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/crt-availability.d.ts new file mode 100644 index 00000000..9dccfb03 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/crt-availability.d.ts @@ -0,0 +1,3 @@ +export declare const crtAvailability: { + isCrtAvailable: boolean; +}; diff --git a/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/defaultUserAgent.d.ts b/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/defaultUserAgent.d.ts new file mode 100644 index 00000000..6e4884f3 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/defaultUserAgent.d.ts @@ -0,0 +1,21 @@ +import { Provider, UserAgent } from "@smithy/types"; +export { crtAvailability } from "./crt-availability"; +export interface DefaultUserAgentOptions { + serviceId?: string; + clientVersion: string; +} +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +export declare const createDefaultUserAgentProvider: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; +export declare const defaultUserAgent: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; diff --git a/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..cbf37f23 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./defaultUserAgent"; +export * from "./nodeAppIdConfigOptions"; diff --git a/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/is-crt-available.d.ts b/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/is-crt-available.d.ts new file mode 100644 index 00000000..d28355c1 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/is-crt-available.d.ts @@ -0,0 +1,2 @@ +import { UserAgentPair } from "@smithy/types"; +export declare const isCrtAvailable: () => UserAgentPair | null; diff --git a/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/nodeAppIdConfigOptions.d.ts b/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/nodeAppIdConfigOptions.d.ts new file mode 100644 index 00000000..b9fa1235 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/nodeAppIdConfigOptions.d.ts @@ -0,0 +1,6 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const UA_APP_ID_ENV_NAME = "AWS_SDK_UA_APP_ID"; +export declare const UA_APP_ID_INI_NAME = "sdk_ua_app_id"; +export declare const NODE_APP_ID_CONFIG_OPTIONS: LoadedConfigSelectors< + string | undefined +>; diff --git a/node_modules/@aws-sdk/util-user-agent-node/package.json b/node_modules/@aws-sdk/util-user-agent-node/package.json new file mode 100644 index 00000000..14742a50 --- /dev/null +++ b/node_modules/@aws-sdk/util-user-agent-node/package.json @@ -0,0 +1,65 @@ +{ + "name": "@aws-sdk/util-user-agent-node", + "version": "3.799.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-user-agent-node", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-user-agent-node", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-user-agent-node" + } +} diff --git a/node_modules/@aws-sdk/xml-builder/LICENSE b/node_modules/@aws-sdk/xml-builder/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@aws-sdk/xml-builder/README.md b/node_modules/@aws-sdk/xml-builder/README.md new file mode 100644 index 00000000..0aabbc14 --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/README.md @@ -0,0 +1,10 @@ +# @aws-sdk/xml-builder + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/xml-builder/latest.svg)](https://www.npmjs.com/package/@aws-sdk/xml-builder) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/xml-builder.svg)](https://www.npmjs.com/package/@aws-sdk/xml-builder) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@aws-sdk/xml-builder/dist-cjs/index.js b/node_modules/@aws-sdk/xml-builder/dist-cjs/index.js new file mode 100644 index 00000000..9edb68b8 --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/dist-cjs/index.js @@ -0,0 +1,173 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + XmlNode: () => XmlNode, + XmlText: () => XmlText +}); +module.exports = __toCommonJS(index_exports); + +// src/escape-attribute.ts +function escapeAttribute(value) { + return value.replace(/&/g, "&").replace(//g, ">").replace(/"/g, """); +} +__name(escapeAttribute, "escapeAttribute"); + +// src/escape-element.ts +function escapeElement(value) { + return value.replace(/&/g, "&").replace(/"/g, """).replace(/'/g, "'").replace(//g, ">").replace(/\r/g, " ").replace(/\n/g, " ").replace(/\u0085/g, "…").replace(/\u2028/, "
"); +} +__name(escapeElement, "escapeElement"); + +// src/XmlText.ts +var XmlText = class { + constructor(value) { + this.value = value; + } + static { + __name(this, "XmlText"); + } + toString() { + return escapeElement("" + this.value); + } +}; + +// src/XmlNode.ts +var XmlNode = class _XmlNode { + constructor(name, children = []) { + this.name = name; + this.children = children; + } + static { + __name(this, "XmlNode"); + } + attributes = {}; + static of(name, childText, withName) { + const node = new _XmlNode(name); + if (childText !== void 0) { + node.addChildNode(new XmlText(childText)); + } + if (withName !== void 0) { + node.withName(withName); + } + return node; + } + withName(name) { + this.name = name; + return this; + } + addAttribute(name, value) { + this.attributes[name] = value; + return this; + } + addChildNode(child) { + this.children.push(child); + return this; + } + removeAttribute(name) { + delete this.attributes[name]; + return this; + } + /** + * @internal + * Alias of {@link XmlNode#withName(string)} for codegen brevity. + */ + n(name) { + this.name = name; + return this; + } + /** + * @internal + * Alias of {@link XmlNode#addChildNode(string)} for codegen brevity. + */ + c(child) { + this.children.push(child); + return this; + } + /** + * @internal + * Checked version of {@link XmlNode#addAttribute(string)} for codegen brevity. + */ + a(name, value) { + if (value != null) { + this.attributes[name] = value; + } + return this; + } + /** + * Create a child node. + * Used in serialization of string fields. + * @internal + */ + cc(input, field, withName = field) { + if (input[field] != null) { + const node = _XmlNode.of(field, input[field]).withName(withName); + this.c(node); + } + } + /** + * Creates list child nodes. + * @internal + */ + l(input, listName, memberName, valueProvider) { + if (input[listName] != null) { + const nodes = valueProvider(); + nodes.map((node) => { + node.withName(memberName); + this.c(node); + }); + } + } + /** + * Creates list child nodes with container. + * @internal + */ + lc(input, listName, memberName, valueProvider) { + if (input[listName] != null) { + const nodes = valueProvider(); + const containerNode = new _XmlNode(memberName); + nodes.map((node) => { + containerNode.c(node); + }); + this.c(containerNode); + } + } + toString() { + const hasChildren = Boolean(this.children.length); + let xmlText = `<${this.name}`; + const attributes = this.attributes; + for (const attributeName of Object.keys(attributes)) { + const attribute = attributes[attributeName]; + if (attribute != null) { + xmlText += ` ${attributeName}="${escapeAttribute("" + attribute)}"`; + } + } + return xmlText += !hasChildren ? "/>" : `>${this.children.map((c) => c.toString()).join("")}`; + } +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + XmlNode, + XmlText +}); + diff --git a/node_modules/@aws-sdk/xml-builder/dist-es/XmlNode.js b/node_modules/@aws-sdk/xml-builder/dist-es/XmlNode.js new file mode 100644 index 00000000..4c8d9971 --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/dist-es/XmlNode.js @@ -0,0 +1,88 @@ +import { escapeAttribute } from "./escape-attribute"; +import { XmlText } from "./XmlText"; +export class XmlNode { + name; + children; + attributes = {}; + static of(name, childText, withName) { + const node = new XmlNode(name); + if (childText !== undefined) { + node.addChildNode(new XmlText(childText)); + } + if (withName !== undefined) { + node.withName(withName); + } + return node; + } + constructor(name, children = []) { + this.name = name; + this.children = children; + } + withName(name) { + this.name = name; + return this; + } + addAttribute(name, value) { + this.attributes[name] = value; + return this; + } + addChildNode(child) { + this.children.push(child); + return this; + } + removeAttribute(name) { + delete this.attributes[name]; + return this; + } + n(name) { + this.name = name; + return this; + } + c(child) { + this.children.push(child); + return this; + } + a(name, value) { + if (value != null) { + this.attributes[name] = value; + } + return this; + } + cc(input, field, withName = field) { + if (input[field] != null) { + const node = XmlNode.of(field, input[field]).withName(withName); + this.c(node); + } + } + l(input, listName, memberName, valueProvider) { + if (input[listName] != null) { + const nodes = valueProvider(); + nodes.map((node) => { + node.withName(memberName); + this.c(node); + }); + } + } + lc(input, listName, memberName, valueProvider) { + if (input[listName] != null) { + const nodes = valueProvider(); + const containerNode = new XmlNode(memberName); + nodes.map((node) => { + containerNode.c(node); + }); + this.c(containerNode); + } + } + toString() { + const hasChildren = Boolean(this.children.length); + let xmlText = `<${this.name}`; + const attributes = this.attributes; + for (const attributeName of Object.keys(attributes)) { + const attribute = attributes[attributeName]; + if (attribute != null) { + xmlText += ` ${attributeName}="${escapeAttribute("" + attribute)}"`; + } + } + return (xmlText += !hasChildren ? "/>" : `>${this.children.map((c) => c.toString()).join("")}`); + } +} diff --git a/node_modules/@aws-sdk/xml-builder/dist-es/XmlText.js b/node_modules/@aws-sdk/xml-builder/dist-es/XmlText.js new file mode 100644 index 00000000..e019b3fb --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/dist-es/XmlText.js @@ -0,0 +1,10 @@ +import { escapeElement } from "./escape-element"; +export class XmlText { + value; + constructor(value) { + this.value = value; + } + toString() { + return escapeElement("" + this.value); + } +} diff --git a/node_modules/@aws-sdk/xml-builder/dist-es/escape-attribute.js b/node_modules/@aws-sdk/xml-builder/dist-es/escape-attribute.js new file mode 100644 index 00000000..b54adab4 --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/dist-es/escape-attribute.js @@ -0,0 +1,3 @@ +export function escapeAttribute(value) { + return value.replace(/&/g, "&").replace(//g, ">").replace(/"/g, """); +} diff --git a/node_modules/@aws-sdk/xml-builder/dist-es/escape-element.js b/node_modules/@aws-sdk/xml-builder/dist-es/escape-element.js new file mode 100644 index 00000000..0746be12 --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/dist-es/escape-element.js @@ -0,0 +1,12 @@ +export function escapeElement(value) { + return value + .replace(/&/g, "&") + .replace(/"/g, """) + .replace(/'/g, "'") + .replace(//g, ">") + .replace(/\r/g, " ") + .replace(/\n/g, " ") + .replace(/\u0085/g, "…") + .replace(/\u2028/, "
"); +} diff --git a/node_modules/@aws-sdk/xml-builder/dist-es/index.js b/node_modules/@aws-sdk/xml-builder/dist-es/index.js new file mode 100644 index 00000000..330ddc5b --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./XmlNode"; +export * from "./XmlText"; diff --git a/node_modules/@aws-sdk/xml-builder/dist-es/stringable.js b/node_modules/@aws-sdk/xml-builder/dist-es/stringable.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/dist-es/stringable.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@aws-sdk/xml-builder/dist-types/XmlNode.d.ts b/node_modules/@aws-sdk/xml-builder/dist-types/XmlNode.d.ts new file mode 100644 index 00000000..6aaebd0a --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/dist-types/XmlNode.d.ts @@ -0,0 +1,49 @@ +import { Stringable } from "./stringable"; +/** + * @internal + * + * Represents an XML node. + */ +export declare class XmlNode { + private name; + readonly children: Stringable[]; + private attributes; + static of(name: string, childText?: string, withName?: string): XmlNode; + constructor(name: string, children?: Stringable[]); + withName(name: string): XmlNode; + addAttribute(name: string, value: any): XmlNode; + addChildNode(child: Stringable): XmlNode; + removeAttribute(name: string): XmlNode; + /** + * @internal + * Alias of {@link XmlNode#withName(string)} for codegen brevity. + */ + n(name: string): XmlNode; + /** + * @internal + * Alias of {@link XmlNode#addChildNode(string)} for codegen brevity. + */ + c(child: Stringable): XmlNode; + /** + * @internal + * Checked version of {@link XmlNode#addAttribute(string)} for codegen brevity. + */ + a(name: string, value: any): XmlNode; + /** + * Create a child node. + * Used in serialization of string fields. + * @internal + */ + cc(input: any, field: string, withName?: string): void; + /** + * Creates list child nodes. + * @internal + */ + l(input: any, listName: string, memberName: string, valueProvider: Function): void; + /** + * Creates list child nodes with container. + * @internal + */ + lc(input: any, listName: string, memberName: string, valueProvider: Function): void; + toString(): string; +} diff --git a/node_modules/@aws-sdk/xml-builder/dist-types/XmlText.d.ts b/node_modules/@aws-sdk/xml-builder/dist-types/XmlText.d.ts new file mode 100644 index 00000000..be3de835 --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/dist-types/XmlText.d.ts @@ -0,0 +1,11 @@ +import { Stringable } from "./stringable"; +/** + * @internal + * + * Represents an XML text value. + */ +export declare class XmlText implements Stringable { + private value; + constructor(value: string); + toString(): string; +} diff --git a/node_modules/@aws-sdk/xml-builder/dist-types/escape-attribute.d.ts b/node_modules/@aws-sdk/xml-builder/dist-types/escape-attribute.d.ts new file mode 100644 index 00000000..4a08e7fa --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/dist-types/escape-attribute.d.ts @@ -0,0 +1,6 @@ +/** + * @internal + * + * Escapes characters that can not be in an XML attribute. + */ +export declare function escapeAttribute(value: string): string; diff --git a/node_modules/@aws-sdk/xml-builder/dist-types/escape-element.d.ts b/node_modules/@aws-sdk/xml-builder/dist-types/escape-element.d.ts new file mode 100644 index 00000000..d43e10ec --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/dist-types/escape-element.d.ts @@ -0,0 +1,6 @@ +/** + * @internal + * + * Escapes characters that can not be in an XML element. + */ +export declare function escapeElement(value: string): string; diff --git a/node_modules/@aws-sdk/xml-builder/dist-types/index.d.ts b/node_modules/@aws-sdk/xml-builder/dist-types/index.d.ts new file mode 100644 index 00000000..ed993382 --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./XmlNode"; +/** + * @internal + */ +export * from "./XmlText"; diff --git a/node_modules/@aws-sdk/xml-builder/dist-types/stringable.d.ts b/node_modules/@aws-sdk/xml-builder/dist-types/stringable.d.ts new file mode 100644 index 00000000..08f42d1b --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/dist-types/stringable.d.ts @@ -0,0 +1,6 @@ +/** + * @internal + */ +export interface Stringable { + toString(): string; +} diff --git a/node_modules/@aws-sdk/xml-builder/dist-types/ts3.4/XmlNode.d.ts b/node_modules/@aws-sdk/xml-builder/dist-types/ts3.4/XmlNode.d.ts new file mode 100644 index 00000000..164d6c10 --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/dist-types/ts3.4/XmlNode.d.ts @@ -0,0 +1,29 @@ +import { Stringable } from "./stringable"; +export declare class XmlNode { + private name; + readonly children: Stringable[]; + private attributes; + static of(name: string, childText?: string, withName?: string): XmlNode; + constructor(name: string, children?: Stringable[]); + withName(name: string): XmlNode; + addAttribute(name: string, value: any): XmlNode; + addChildNode(child: Stringable): XmlNode; + removeAttribute(name: string): XmlNode; + n(name: string): XmlNode; + c(child: Stringable): XmlNode; + a(name: string, value: any): XmlNode; + cc(input: any, field: string, withName?: string): void; + l( + input: any, + listName: string, + memberName: string, + valueProvider: Function + ): void; + lc( + input: any, + listName: string, + memberName: string, + valueProvider: Function + ): void; + toString(): string; +} diff --git a/node_modules/@aws-sdk/xml-builder/dist-types/ts3.4/XmlText.d.ts b/node_modules/@aws-sdk/xml-builder/dist-types/ts3.4/XmlText.d.ts new file mode 100644 index 00000000..f53373c1 --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/dist-types/ts3.4/XmlText.d.ts @@ -0,0 +1,6 @@ +import { Stringable } from "./stringable"; +export declare class XmlText implements Stringable { + private value; + constructor(value: string); + toString(): string; +} diff --git a/node_modules/@aws-sdk/xml-builder/dist-types/ts3.4/escape-attribute.d.ts b/node_modules/@aws-sdk/xml-builder/dist-types/ts3.4/escape-attribute.d.ts new file mode 100644 index 00000000..f9f9a95f --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/dist-types/ts3.4/escape-attribute.d.ts @@ -0,0 +1 @@ +export declare function escapeAttribute(value: string): string; diff --git a/node_modules/@aws-sdk/xml-builder/dist-types/ts3.4/escape-element.d.ts b/node_modules/@aws-sdk/xml-builder/dist-types/ts3.4/escape-element.d.ts new file mode 100644 index 00000000..b09ba89e --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/dist-types/ts3.4/escape-element.d.ts @@ -0,0 +1 @@ +export declare function escapeElement(value: string): string; diff --git a/node_modules/@aws-sdk/xml-builder/dist-types/ts3.4/index.d.ts b/node_modules/@aws-sdk/xml-builder/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..330ddc5b --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./XmlNode"; +export * from "./XmlText"; diff --git a/node_modules/@aws-sdk/xml-builder/dist-types/ts3.4/stringable.d.ts b/node_modules/@aws-sdk/xml-builder/dist-types/ts3.4/stringable.d.ts new file mode 100644 index 00000000..ba9b1f72 --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/dist-types/ts3.4/stringable.d.ts @@ -0,0 +1,3 @@ +export interface Stringable { + toString(): string; +} diff --git a/node_modules/@aws-sdk/xml-builder/package.json b/node_modules/@aws-sdk/xml-builder/package.json new file mode 100644 index 00000000..697bc5f3 --- /dev/null +++ b/node_modules/@aws-sdk/xml-builder/package.json @@ -0,0 +1,54 @@ +{ + "name": "@aws-sdk/xml-builder", + "version": "3.775.0", + "description": "XML builder for the AWS SDK", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline xml-builder", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/xml-builder", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/xml-builder" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + } +} diff --git a/node_modules/@smithy/abort-controller/LICENSE b/node_modules/@smithy/abort-controller/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/abort-controller/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/abort-controller/README.md b/node_modules/@smithy/abort-controller/README.md new file mode 100644 index 00000000..175bc37f --- /dev/null +++ b/node_modules/@smithy/abort-controller/README.md @@ -0,0 +1,4 @@ +# @smithy/abort-controller + +[![NPM version](https://img.shields.io/npm/v/@smithy/abort-controller/latest.svg)](https://www.npmjs.com/package/@smithy/abort-controller) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/abort-controller.svg)](https://www.npmjs.com/package/@smithy/abort-controller) diff --git a/node_modules/@smithy/abort-controller/dist-cjs/AbortController.js b/node_modules/@smithy/abort-controller/dist-cjs/AbortController.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/abort-controller/dist-cjs/AbortController.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/abort-controller/dist-cjs/AbortSignal.js b/node_modules/@smithy/abort-controller/dist-cjs/AbortSignal.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/abort-controller/dist-cjs/AbortSignal.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/abort-controller/dist-cjs/index.js b/node_modules/@smithy/abort-controller/dist-cjs/index.js new file mode 100644 index 00000000..e2f7caa5 --- /dev/null +++ b/node_modules/@smithy/abort-controller/dist-cjs/index.js @@ -0,0 +1,84 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AbortController: () => AbortController, + AbortHandler: () => import_types.AbortHandler, + AbortSignal: () => AbortSignal, + IAbortController: () => import_types.AbortController, + IAbortSignal: () => import_types.AbortSignal +}); +module.exports = __toCommonJS(src_exports); + +// src/AbortController.ts + + +// src/AbortSignal.ts +var import_types = require("@smithy/types"); +var AbortSignal = class { + constructor() { + this.onabort = null; + this._aborted = false; + Object.defineProperty(this, "_aborted", { + value: false, + writable: true + }); + } + static { + __name(this, "AbortSignal"); + } + /** + * Whether the associated operation has already been cancelled. + */ + get aborted() { + return this._aborted; + } + /** + * @internal + */ + abort() { + this._aborted = true; + if (this.onabort) { + this.onabort(this); + this.onabort = null; + } + } +}; + +// src/AbortController.ts +var AbortController = class { + constructor() { + this.signal = new AbortSignal(); + } + static { + __name(this, "AbortController"); + } + abort() { + this.signal.abort(); + } +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + AbortController, + AbortSignal +}); + diff --git a/node_modules/@smithy/abort-controller/dist-es/AbortController.js b/node_modules/@smithy/abort-controller/dist-es/AbortController.js new file mode 100644 index 00000000..696f1371 --- /dev/null +++ b/node_modules/@smithy/abort-controller/dist-es/AbortController.js @@ -0,0 +1,9 @@ +import { AbortSignal } from "./AbortSignal"; +export class AbortController { + constructor() { + this.signal = new AbortSignal(); + } + abort() { + this.signal.abort(); + } +} diff --git a/node_modules/@smithy/abort-controller/dist-es/AbortSignal.js b/node_modules/@smithy/abort-controller/dist-es/AbortSignal.js new file mode 100644 index 00000000..9fc08134 --- /dev/null +++ b/node_modules/@smithy/abort-controller/dist-es/AbortSignal.js @@ -0,0 +1,20 @@ +export class AbortSignal { + constructor() { + this.onabort = null; + this._aborted = false; + Object.defineProperty(this, "_aborted", { + value: false, + writable: true, + }); + } + get aborted() { + return this._aborted; + } + abort() { + this._aborted = true; + if (this.onabort) { + this.onabort(this); + this.onabort = null; + } + } +} diff --git a/node_modules/@smithy/abort-controller/dist-es/index.js b/node_modules/@smithy/abort-controller/dist-es/index.js new file mode 100644 index 00000000..a0f47f72 --- /dev/null +++ b/node_modules/@smithy/abort-controller/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./AbortController"; +export * from "./AbortSignal"; diff --git a/node_modules/@smithy/abort-controller/dist-types/AbortController.d.ts b/node_modules/@smithy/abort-controller/dist-types/AbortController.d.ts new file mode 100644 index 00000000..007f0f64 --- /dev/null +++ b/node_modules/@smithy/abort-controller/dist-types/AbortController.d.ts @@ -0,0 +1,16 @@ +import { AbortController as DeprecatedAbortController } from "@smithy/types"; +import { AbortSignal } from "./AbortSignal"; +/** + * @public + */ +export { DeprecatedAbortController as IAbortController }; +/** + * @deprecated This implementation was added as Node.js didn't support AbortController prior to 15.x + * Use native implementation in browsers or Node.js \>=15.4.0. + * + * @public + */ +export declare class AbortController implements DeprecatedAbortController { + readonly signal: AbortSignal; + abort(): void; +} diff --git a/node_modules/@smithy/abort-controller/dist-types/AbortSignal.d.ts b/node_modules/@smithy/abort-controller/dist-types/AbortSignal.d.ts new file mode 100644 index 00000000..a97c3dc1 --- /dev/null +++ b/node_modules/@smithy/abort-controller/dist-types/AbortSignal.d.ts @@ -0,0 +1,21 @@ +import { AbortHandler, AbortSignal as DeprecatedAbortSignal } from "@smithy/types"; +/** + * @public + */ +export { AbortHandler, DeprecatedAbortSignal as IAbortSignal }; +/** + * @public + */ +export declare class AbortSignal implements DeprecatedAbortSignal { + onabort: AbortHandler | null; + private _aborted; + constructor(); + /** + * Whether the associated operation has already been cancelled. + */ + get aborted(): boolean; + /** + * @internal + */ + abort(): void; +} diff --git a/node_modules/@smithy/abort-controller/dist-types/index.d.ts b/node_modules/@smithy/abort-controller/dist-types/index.d.ts new file mode 100644 index 00000000..8788e2f1 --- /dev/null +++ b/node_modules/@smithy/abort-controller/dist-types/index.d.ts @@ -0,0 +1,9 @@ +/** + * This implementation was added as Node.js didn't support AbortController prior to 15.x + * Use native implementation in browsers or Node.js \>=15.4.0. + * + * @deprecated Use standard implementations in [Browsers](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) and [Node.js](https://nodejs.org/docs/latest/api/globals.html#class-abortcontroller) + * @packageDocumentation + */ +export * from "./AbortController"; +export * from "./AbortSignal"; diff --git a/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortController.d.ts b/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortController.d.ts new file mode 100644 index 00000000..89457d4c --- /dev/null +++ b/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortController.d.ts @@ -0,0 +1,16 @@ +import { AbortController as DeprecatedAbortController } from "@smithy/types"; +import { AbortSignal } from "./AbortSignal"; +/** + * @public + */ +export { DeprecatedAbortController as IAbortController }; +/** + * @deprecated This implementation was added as Node.js didn't support AbortController prior to 15.x + * Use native implementation in browsers or Node.js \>=15.4.0. + * + * @public + */ +export declare class AbortController implements DeprecatedAbortController { + readonly signal: AbortSignal; + abort(): void; +} diff --git a/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortSignal.d.ts b/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortSignal.d.ts new file mode 100644 index 00000000..92130a38 --- /dev/null +++ b/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortSignal.d.ts @@ -0,0 +1,21 @@ +import { AbortHandler, AbortSignal as DeprecatedAbortSignal } from "@smithy/types"; +/** + * @public + */ +export { AbortHandler, DeprecatedAbortSignal as IAbortSignal }; +/** + * @public + */ +export declare class AbortSignal implements DeprecatedAbortSignal { + onabort: AbortHandler | null; + private _aborted; + constructor(); + /* + * Whether the associated operation has already been cancelled. + */ + readonly aborted: boolean; + /** + * @internal + */ + abort(): void; +} diff --git a/node_modules/@smithy/abort-controller/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/abort-controller/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..5a907b0d --- /dev/null +++ b/node_modules/@smithy/abort-controller/dist-types/ts3.4/index.d.ts @@ -0,0 +1,9 @@ +/** + * This implementation was added as Node.js didn't support AbortController prior to 15.x + * Use native implementation in browsers or Node.js \>=15.4.0. + * + * @deprecated Use standard implementations in [Browsers](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) and [Node.js](https://nodejs.org/docs/latest/api/globals.html#class-abortcontroller) + * @packageDocumentation + */ +export * from "./AbortController"; +export * from "./AbortSignal"; diff --git a/node_modules/@smithy/abort-controller/package.json b/node_modules/@smithy/abort-controller/package.json new file mode 100644 index 00000000..b7e57692 --- /dev/null +++ b/node_modules/@smithy/abort-controller/package.json @@ -0,0 +1,62 @@ +{ + "name": "@smithy/abort-controller", + "version": "4.0.2", + "description": "A simple abort controller library", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline abort-controller", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/abort-controller", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/abort-controller" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/chunked-blob-reader-native/LICENSE b/node_modules/@smithy/chunked-blob-reader-native/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/chunked-blob-reader-native/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/chunked-blob-reader-native/README.md b/node_modules/@smithy/chunked-blob-reader-native/README.md new file mode 100644 index 00000000..4ca7fc76 --- /dev/null +++ b/node_modules/@smithy/chunked-blob-reader-native/README.md @@ -0,0 +1,10 @@ +# @smithy/chunked-blob-reader-native + +[![NPM version](https://img.shields.io/npm/v/@smithy/chunked-blob-reader-native/latest.svg)](https://www.npmjs.com/package/@smithy/chunked-blob-reader-native) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/chunked-blob-reader-native.svg)](https://www.npmjs.com/package/@smithy/chunked-blob-reader-native) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/chunked-blob-reader-native/dist-cjs/index.js b/node_modules/@smithy/chunked-blob-reader-native/dist-cjs/index.js new file mode 100644 index 00000000..394cd39c --- /dev/null +++ b/node_modules/@smithy/chunked-blob-reader-native/dist-cjs/index.js @@ -0,0 +1,59 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + blobReader: () => blobReader +}); +module.exports = __toCommonJS(src_exports); +var import_util_base64 = require("@smithy/util-base64"); +function blobReader(blob, onChunk, chunkSize = 1024 * 1024) { + return new Promise((resolve, reject) => { + const fileReader = new FileReader(); + fileReader.onerror = reject; + fileReader.onabort = reject; + const size = blob.size; + let totalBytesRead = 0; + const read = /* @__PURE__ */ __name(() => { + if (totalBytesRead >= size) { + resolve(); + return; + } + fileReader.readAsDataURL(blob.slice(totalBytesRead, Math.min(size, totalBytesRead + chunkSize))); + }, "read"); + fileReader.onload = (event) => { + const result = event.target.result; + const dataOffset = result.indexOf(",") + 1; + const data = result.substring(dataOffset); + const decoded = (0, import_util_base64.fromBase64)(data); + onChunk(decoded); + totalBytesRead += decoded.byteLength; + read(); + }; + read(); + }); +} +__name(blobReader, "blobReader"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + blobReader +}); + diff --git a/node_modules/@smithy/chunked-blob-reader-native/dist-es/index.js b/node_modules/@smithy/chunked-blob-reader-native/dist-es/index.js new file mode 100644 index 00000000..370e2e02 --- /dev/null +++ b/node_modules/@smithy/chunked-blob-reader-native/dist-es/index.js @@ -0,0 +1,27 @@ +import { fromBase64 } from "@smithy/util-base64"; +export function blobReader(blob, onChunk, chunkSize = 1024 * 1024) { + return new Promise((resolve, reject) => { + const fileReader = new FileReader(); + fileReader.onerror = reject; + fileReader.onabort = reject; + const size = blob.size; + let totalBytesRead = 0; + const read = () => { + if (totalBytesRead >= size) { + resolve(); + return; + } + fileReader.readAsDataURL(blob.slice(totalBytesRead, Math.min(size, totalBytesRead + chunkSize))); + }; + fileReader.onload = (event) => { + const result = event.target.result; + const dataOffset = result.indexOf(",") + 1; + const data = result.substring(dataOffset); + const decoded = fromBase64(data); + onChunk(decoded); + totalBytesRead += decoded.byteLength; + read(); + }; + read(); + }); +} diff --git a/node_modules/@smithy/chunked-blob-reader-native/dist-types/index.d.ts b/node_modules/@smithy/chunked-blob-reader-native/dist-types/index.d.ts new file mode 100644 index 00000000..f83a42b3 --- /dev/null +++ b/node_modules/@smithy/chunked-blob-reader-native/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare function blobReader(blob: Blob, onChunk: (chunk: Uint8Array) => void, chunkSize?: number): Promise; diff --git a/node_modules/@smithy/chunked-blob-reader-native/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/chunked-blob-reader-native/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..10b71d05 --- /dev/null +++ b/node_modules/@smithy/chunked-blob-reader-native/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare function blobReader(blob: Blob, onChunk: (chunk: Uint8Array) => void, chunkSize?: number): Promise; diff --git a/node_modules/@smithy/chunked-blob-reader-native/package.json b/node_modules/@smithy/chunked-blob-reader-native/package.json new file mode 100644 index 00000000..43c93d68 --- /dev/null +++ b/node_modules/@smithy/chunked-blob-reader-native/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/chunked-blob-reader-native", + "version": "4.0.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline chunked-blob-reader-native", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-base64": "^4.0.0", + "tslib": "^2.6.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/chunked-blob-reader-native", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/chunked-blob-reader-native" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/chunked-blob-reader/LICENSE b/node_modules/@smithy/chunked-blob-reader/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/chunked-blob-reader/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/chunked-blob-reader/README.md b/node_modules/@smithy/chunked-blob-reader/README.md new file mode 100644 index 00000000..d6c74ac0 --- /dev/null +++ b/node_modules/@smithy/chunked-blob-reader/README.md @@ -0,0 +1,10 @@ +# @smithy/chunked-blob-reader + +[![NPM version](https://img.shields.io/npm/v/@smithy/chunked-blob-reader/latest.svg)](https://www.npmjs.com/package/@smithy/chunked-blob-reader) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/chunked-blob-reader.svg)](https://www.npmjs.com/package/@smithy/chunked-blob-reader) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/chunked-blob-reader/dist-cjs/index.js b/node_modules/@smithy/chunked-blob-reader/dist-cjs/index.js new file mode 100644 index 00000000..b5fbe56c --- /dev/null +++ b/node_modules/@smithy/chunked-blob-reader/dist-cjs/index.js @@ -0,0 +1,41 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + blobReader: () => blobReader +}); +module.exports = __toCommonJS(src_exports); +async function blobReader(blob, onChunk, chunkSize = 1024 * 1024) { + const size = blob.size; + let totalBytesRead = 0; + while (totalBytesRead < size) { + const slice = blob.slice(totalBytesRead, Math.min(size, totalBytesRead + chunkSize)); + onChunk(new Uint8Array(await slice.arrayBuffer())); + totalBytesRead += slice.size; + } +} +__name(blobReader, "blobReader"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + blobReader +}); + diff --git a/node_modules/@smithy/chunked-blob-reader/dist-es/index.js b/node_modules/@smithy/chunked-blob-reader/dist-es/index.js new file mode 100644 index 00000000..1831593f --- /dev/null +++ b/node_modules/@smithy/chunked-blob-reader/dist-es/index.js @@ -0,0 +1,9 @@ +export async function blobReader(blob, onChunk, chunkSize = 1024 * 1024) { + const size = blob.size; + let totalBytesRead = 0; + while (totalBytesRead < size) { + const slice = blob.slice(totalBytesRead, Math.min(size, totalBytesRead + chunkSize)); + onChunk(new Uint8Array(await slice.arrayBuffer())); + totalBytesRead += slice.size; + } +} diff --git a/node_modules/@smithy/chunked-blob-reader/dist-types/index.d.ts b/node_modules/@smithy/chunked-blob-reader/dist-types/index.d.ts new file mode 100644 index 00000000..908c4f3a --- /dev/null +++ b/node_modules/@smithy/chunked-blob-reader/dist-types/index.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Reads the blob data into the onChunk consumer. + */ +export declare function blobReader(blob: Blob, onChunk: (chunk: Uint8Array) => void, chunkSize?: number): Promise; diff --git a/node_modules/@smithy/chunked-blob-reader/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/chunked-blob-reader/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..e2089713 --- /dev/null +++ b/node_modules/@smithy/chunked-blob-reader/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Reads the blob data into the onChunk consumer. + */ +export declare function blobReader(blob: Blob, onChunk: (chunk: Uint8Array) => void, chunkSize?: number): Promise; diff --git a/node_modules/@smithy/chunked-blob-reader/package.json b/node_modules/@smithy/chunked-blob-reader/package.json new file mode 100644 index 00000000..064b1fce --- /dev/null +++ b/node_modules/@smithy/chunked-blob-reader/package.json @@ -0,0 +1,59 @@ +{ + "name": "@smithy/chunked-blob-reader", + "version": "5.0.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline chunked-blob-reader", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/chunked-blob-reader", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/chunked-blob-reader" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/LICENSE b/node_modules/@smithy/config-resolver/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/config-resolver/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/README.md b/node_modules/@smithy/config-resolver/README.md new file mode 100644 index 00000000..2a25da2f --- /dev/null +++ b/node_modules/@smithy/config-resolver/README.md @@ -0,0 +1,10 @@ +# @smithy/config-resolver + +[![NPM version](https://img.shields.io/npm/v/@smithy/config-resolver/latest.svg)](https://www.npmjs.com/package/@smithy/config-resolver) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/config-resolver.svg)](https://www.npmjs.com/package/@smithy/config-resolver) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js b/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseFipsEndpointConfigOptions.js b/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseFipsEndpointConfigOptions.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseFipsEndpointConfigOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/index.js b/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveCustomEndpointsConfig.js b/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveCustomEndpointsConfig.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveCustomEndpointsConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveEndpointsConfig.js b/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveEndpointsConfig.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveEndpointsConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/utils/getEndpointFromRegion.js b/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/utils/getEndpointFromRegion.js new file mode 100644 index 00000000..88174128 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/utils/getEndpointFromRegion.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/index.js b/node_modules/@smithy/config-resolver/dist-cjs/index.js new file mode 100644 index 00000000..42f7a4c4 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/index.js @@ -0,0 +1,228 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + CONFIG_USE_DUALSTACK_ENDPOINT: () => CONFIG_USE_DUALSTACK_ENDPOINT, + CONFIG_USE_FIPS_ENDPOINT: () => CONFIG_USE_FIPS_ENDPOINT, + DEFAULT_USE_DUALSTACK_ENDPOINT: () => DEFAULT_USE_DUALSTACK_ENDPOINT, + DEFAULT_USE_FIPS_ENDPOINT: () => DEFAULT_USE_FIPS_ENDPOINT, + ENV_USE_DUALSTACK_ENDPOINT: () => ENV_USE_DUALSTACK_ENDPOINT, + ENV_USE_FIPS_ENDPOINT: () => ENV_USE_FIPS_ENDPOINT, + NODE_REGION_CONFIG_FILE_OPTIONS: () => NODE_REGION_CONFIG_FILE_OPTIONS, + NODE_REGION_CONFIG_OPTIONS: () => NODE_REGION_CONFIG_OPTIONS, + NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS: () => NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, + NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS: () => NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, + REGION_ENV_NAME: () => REGION_ENV_NAME, + REGION_INI_NAME: () => REGION_INI_NAME, + getRegionInfo: () => getRegionInfo, + resolveCustomEndpointsConfig: () => resolveCustomEndpointsConfig, + resolveEndpointsConfig: () => resolveEndpointsConfig, + resolveRegionConfig: () => resolveRegionConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/endpointsConfig/NodeUseDualstackEndpointConfigOptions.ts +var import_util_config_provider = require("@smithy/util-config-provider"); +var ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +var CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +var DEFAULT_USE_DUALSTACK_ENDPOINT = false; +var NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => (0, import_util_config_provider.booleanSelector)(env, ENV_USE_DUALSTACK_ENDPOINT, import_util_config_provider.SelectorType.ENV), + configFileSelector: (profile) => (0, import_util_config_provider.booleanSelector)(profile, CONFIG_USE_DUALSTACK_ENDPOINT, import_util_config_provider.SelectorType.CONFIG), + default: false +}; + +// src/endpointsConfig/NodeUseFipsEndpointConfigOptions.ts + +var ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +var CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +var DEFAULT_USE_FIPS_ENDPOINT = false; +var NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => (0, import_util_config_provider.booleanSelector)(env, ENV_USE_FIPS_ENDPOINT, import_util_config_provider.SelectorType.ENV), + configFileSelector: (profile) => (0, import_util_config_provider.booleanSelector)(profile, CONFIG_USE_FIPS_ENDPOINT, import_util_config_provider.SelectorType.CONFIG), + default: false +}; + +// src/endpointsConfig/resolveCustomEndpointsConfig.ts +var import_util_middleware = require("@smithy/util-middleware"); +var resolveCustomEndpointsConfig = /* @__PURE__ */ __name((input) => { + const { tls, endpoint, urlParser, useDualstackEndpoint } = input; + return Object.assign(input, { + tls: tls ?? true, + endpoint: (0, import_util_middleware.normalizeProvider)(typeof endpoint === "string" ? urlParser(endpoint) : endpoint), + isCustomEndpoint: true, + useDualstackEndpoint: (0, import_util_middleware.normalizeProvider)(useDualstackEndpoint ?? false) + }); +}, "resolveCustomEndpointsConfig"); + +// src/endpointsConfig/resolveEndpointsConfig.ts + + +// src/endpointsConfig/utils/getEndpointFromRegion.ts +var getEndpointFromRegion = /* @__PURE__ */ __name(async (input) => { + const { tls = true } = input; + const region = await input.region(); + const dnsHostRegex = new RegExp(/^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9])$/); + if (!dnsHostRegex.test(region)) { + throw new Error("Invalid region in client config"); + } + const useDualstackEndpoint = await input.useDualstackEndpoint(); + const useFipsEndpoint = await input.useFipsEndpoint(); + const { hostname } = await input.regionInfoProvider(region, { useDualstackEndpoint, useFipsEndpoint }) ?? {}; + if (!hostname) { + throw new Error("Cannot resolve hostname from client config"); + } + return input.urlParser(`${tls ? "https:" : "http:"}//${hostname}`); +}, "getEndpointFromRegion"); + +// src/endpointsConfig/resolveEndpointsConfig.ts +var resolveEndpointsConfig = /* @__PURE__ */ __name((input) => { + const useDualstackEndpoint = (0, import_util_middleware.normalizeProvider)(input.useDualstackEndpoint ?? false); + const { endpoint, useFipsEndpoint, urlParser, tls } = input; + return Object.assign(input, { + tls: tls ?? true, + endpoint: endpoint ? (0, import_util_middleware.normalizeProvider)(typeof endpoint === "string" ? urlParser(endpoint) : endpoint) : () => getEndpointFromRegion({ ...input, useDualstackEndpoint, useFipsEndpoint }), + isCustomEndpoint: !!endpoint, + useDualstackEndpoint + }); +}, "resolveEndpointsConfig"); + +// src/regionConfig/config.ts +var REGION_ENV_NAME = "AWS_REGION"; +var REGION_INI_NAME = "region"; +var NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[REGION_ENV_NAME], + configFileSelector: (profile) => profile[REGION_INI_NAME], + default: () => { + throw new Error("Region is missing"); + } +}; +var NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials" +}; + +// src/regionConfig/isFipsRegion.ts +var isFipsRegion = /* @__PURE__ */ __name((region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")), "isFipsRegion"); + +// src/regionConfig/getRealRegion.ts +var getRealRegion = /* @__PURE__ */ __name((region) => isFipsRegion(region) ? ["fips-aws-global", "aws-fips"].includes(region) ? "us-east-1" : region.replace(/fips-(dkr-|prod-)?|-fips/, "") : region, "getRealRegion"); + +// src/regionConfig/resolveRegionConfig.ts +var resolveRegionConfig = /* @__PURE__ */ __name((input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return Object.assign(input, { + region: async () => { + if (typeof region === "string") { + return getRealRegion(region); + } + const providedRegion = await region(); + return getRealRegion(providedRegion); + }, + useFipsEndpoint: async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if (isFipsRegion(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + } + }); +}, "resolveRegionConfig"); + +// src/regionInfo/getHostnameFromVariants.ts +var getHostnameFromVariants = /* @__PURE__ */ __name((variants = [], { useFipsEndpoint, useDualstackEndpoint }) => variants.find( + ({ tags }) => useFipsEndpoint === tags.includes("fips") && useDualstackEndpoint === tags.includes("dualstack") +)?.hostname, "getHostnameFromVariants"); + +// src/regionInfo/getResolvedHostname.ts +var getResolvedHostname = /* @__PURE__ */ __name((resolvedRegion, { regionHostname, partitionHostname }) => regionHostname ? regionHostname : partitionHostname ? partitionHostname.replace("{region}", resolvedRegion) : void 0, "getResolvedHostname"); + +// src/regionInfo/getResolvedPartition.ts +var getResolvedPartition = /* @__PURE__ */ __name((region, { partitionHash }) => Object.keys(partitionHash || {}).find((key) => partitionHash[key].regions.includes(region)) ?? "aws", "getResolvedPartition"); + +// src/regionInfo/getResolvedSigningRegion.ts +var getResolvedSigningRegion = /* @__PURE__ */ __name((hostname, { signingRegion, regionRegex, useFipsEndpoint }) => { + if (signingRegion) { + return signingRegion; + } else if (useFipsEndpoint) { + const regionRegexJs = regionRegex.replace("\\\\", "\\").replace(/^\^/g, "\\.").replace(/\$$/g, "\\."); + const regionRegexmatchArray = hostname.match(regionRegexJs); + if (regionRegexmatchArray) { + return regionRegexmatchArray[0].slice(1, -1); + } + } +}, "getResolvedSigningRegion"); + +// src/regionInfo/getRegionInfo.ts +var getRegionInfo = /* @__PURE__ */ __name((region, { + useFipsEndpoint = false, + useDualstackEndpoint = false, + signingService, + regionHash, + partitionHash +}) => { + const partition = getResolvedPartition(region, { partitionHash }); + const resolvedRegion = region in regionHash ? region : partitionHash[partition]?.endpoint ?? region; + const hostnameOptions = { useFipsEndpoint, useDualstackEndpoint }; + const regionHostname = getHostnameFromVariants(regionHash[resolvedRegion]?.variants, hostnameOptions); + const partitionHostname = getHostnameFromVariants(partitionHash[partition]?.variants, hostnameOptions); + const hostname = getResolvedHostname(resolvedRegion, { regionHostname, partitionHostname }); + if (hostname === void 0) { + throw new Error(`Endpoint resolution failed for: ${{ resolvedRegion, useFipsEndpoint, useDualstackEndpoint }}`); + } + const signingRegion = getResolvedSigningRegion(hostname, { + signingRegion: regionHash[resolvedRegion]?.signingRegion, + regionRegex: partitionHash[partition].regionRegex, + useFipsEndpoint + }); + return { + partition, + signingService, + hostname, + ...signingRegion && { signingRegion }, + ...regionHash[resolvedRegion]?.signingService && { + signingService: regionHash[resolvedRegion].signingService + } + }; +}, "getRegionInfo"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + ENV_USE_DUALSTACK_ENDPOINT, + CONFIG_USE_DUALSTACK_ENDPOINT, + DEFAULT_USE_DUALSTACK_ENDPOINT, + NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, + ENV_USE_FIPS_ENDPOINT, + CONFIG_USE_FIPS_ENDPOINT, + DEFAULT_USE_FIPS_ENDPOINT, + NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, + resolveCustomEndpointsConfig, + resolveEndpointsConfig, + REGION_ENV_NAME, + REGION_INI_NAME, + NODE_REGION_CONFIG_OPTIONS, + NODE_REGION_CONFIG_FILE_OPTIONS, + resolveRegionConfig, + getRegionInfo +}); + diff --git a/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/config.js b/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/config.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/config.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/getRealRegion.js b/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/getRealRegion.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/getRealRegion.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/index.js b/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/isFipsRegion.js b/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/isFipsRegion.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/isFipsRegion.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/resolveRegionConfig.js b/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/resolveRegionConfig.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/resolveRegionConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariant.js b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariant.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariant.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariantTag.js b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariantTag.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariantTag.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/PartitionHash.js b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/PartitionHash.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/PartitionHash.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/RegionHash.js b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/RegionHash.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/RegionHash.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getHostnameFromVariants.js b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getHostnameFromVariants.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getHostnameFromVariants.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getRegionInfo.js b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getRegionInfo.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getRegionInfo.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedHostname.js b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedHostname.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedHostname.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedPartition.js b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedPartition.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedPartition.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedSigningRegion.js b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedSigningRegion.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedSigningRegion.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/index.js b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js b/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js new file mode 100644 index 00000000..d0615678 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js @@ -0,0 +1,9 @@ +import { booleanSelector, SelectorType } from "@smithy/util-config-provider"; +export const ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +export const CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +export const DEFAULT_USE_DUALSTACK_ENDPOINT = false; +export const NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => booleanSelector(env, ENV_USE_DUALSTACK_ENDPOINT, SelectorType.ENV), + configFileSelector: (profile) => booleanSelector(profile, CONFIG_USE_DUALSTACK_ENDPOINT, SelectorType.CONFIG), + default: false, +}; diff --git a/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseFipsEndpointConfigOptions.js b/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseFipsEndpointConfigOptions.js new file mode 100644 index 00000000..8cac1e99 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseFipsEndpointConfigOptions.js @@ -0,0 +1,9 @@ +import { booleanSelector, SelectorType } from "@smithy/util-config-provider"; +export const ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +export const CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +export const DEFAULT_USE_FIPS_ENDPOINT = false; +export const NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => booleanSelector(env, ENV_USE_FIPS_ENDPOINT, SelectorType.ENV), + configFileSelector: (profile) => booleanSelector(profile, CONFIG_USE_FIPS_ENDPOINT, SelectorType.CONFIG), + default: false, +}; diff --git a/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/index.js b/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/index.js new file mode 100644 index 00000000..1424c22f --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/index.js @@ -0,0 +1,4 @@ +export * from "./NodeUseDualstackEndpointConfigOptions"; +export * from "./NodeUseFipsEndpointConfigOptions"; +export * from "./resolveCustomEndpointsConfig"; +export * from "./resolveEndpointsConfig"; diff --git a/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveCustomEndpointsConfig.js b/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveCustomEndpointsConfig.js new file mode 100644 index 00000000..7f9a9535 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveCustomEndpointsConfig.js @@ -0,0 +1,10 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +export const resolveCustomEndpointsConfig = (input) => { + const { tls, endpoint, urlParser, useDualstackEndpoint } = input; + return Object.assign(input, { + tls: tls ?? true, + endpoint: normalizeProvider(typeof endpoint === "string" ? urlParser(endpoint) : endpoint), + isCustomEndpoint: true, + useDualstackEndpoint: normalizeProvider(useDualstackEndpoint ?? false), + }); +}; diff --git a/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveEndpointsConfig.js b/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveEndpointsConfig.js new file mode 100644 index 00000000..440657d4 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveEndpointsConfig.js @@ -0,0 +1,14 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +import { getEndpointFromRegion } from "./utils/getEndpointFromRegion"; +export const resolveEndpointsConfig = (input) => { + const useDualstackEndpoint = normalizeProvider(input.useDualstackEndpoint ?? false); + const { endpoint, useFipsEndpoint, urlParser, tls } = input; + return Object.assign(input, { + tls: tls ?? true, + endpoint: endpoint + ? normalizeProvider(typeof endpoint === "string" ? urlParser(endpoint) : endpoint) + : () => getEndpointFromRegion({ ...input, useDualstackEndpoint, useFipsEndpoint }), + isCustomEndpoint: !!endpoint, + useDualstackEndpoint, + }); +}; diff --git a/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/utils/getEndpointFromRegion.js b/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/utils/getEndpointFromRegion.js new file mode 100644 index 00000000..5627c32e --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/utils/getEndpointFromRegion.js @@ -0,0 +1,15 @@ +export const getEndpointFromRegion = async (input) => { + const { tls = true } = input; + const region = await input.region(); + const dnsHostRegex = new RegExp(/^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9])$/); + if (!dnsHostRegex.test(region)) { + throw new Error("Invalid region in client config"); + } + const useDualstackEndpoint = await input.useDualstackEndpoint(); + const useFipsEndpoint = await input.useFipsEndpoint(); + const { hostname } = (await input.regionInfoProvider(region, { useDualstackEndpoint, useFipsEndpoint })) ?? {}; + if (!hostname) { + throw new Error("Cannot resolve hostname from client config"); + } + return input.urlParser(`${tls ? "https:" : "http:"}//${hostname}`); +}; diff --git a/node_modules/@smithy/config-resolver/dist-es/index.js b/node_modules/@smithy/config-resolver/dist-es/index.js new file mode 100644 index 00000000..61456a77 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./endpointsConfig"; +export * from "./regionConfig"; +export * from "./regionInfo"; diff --git a/node_modules/@smithy/config-resolver/dist-es/regionConfig/config.js b/node_modules/@smithy/config-resolver/dist-es/regionConfig/config.js new file mode 100644 index 00000000..7db98960 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/regionConfig/config.js @@ -0,0 +1,12 @@ +export const REGION_ENV_NAME = "AWS_REGION"; +export const REGION_INI_NAME = "region"; +export const NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[REGION_ENV_NAME], + configFileSelector: (profile) => profile[REGION_INI_NAME], + default: () => { + throw new Error("Region is missing"); + }, +}; +export const NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials", +}; diff --git a/node_modules/@smithy/config-resolver/dist-es/regionConfig/getRealRegion.js b/node_modules/@smithy/config-resolver/dist-es/regionConfig/getRealRegion.js new file mode 100644 index 00000000..8d1246bf --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/regionConfig/getRealRegion.js @@ -0,0 +1,6 @@ +import { isFipsRegion } from "./isFipsRegion"; +export const getRealRegion = (region) => isFipsRegion(region) + ? ["fips-aws-global", "aws-fips"].includes(region) + ? "us-east-1" + : region.replace(/fips-(dkr-|prod-)?|-fips/, "") + : region; diff --git a/node_modules/@smithy/config-resolver/dist-es/regionConfig/index.js b/node_modules/@smithy/config-resolver/dist-es/regionConfig/index.js new file mode 100644 index 00000000..83675f77 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/regionConfig/index.js @@ -0,0 +1,2 @@ +export * from "./config"; +export * from "./resolveRegionConfig"; diff --git a/node_modules/@smithy/config-resolver/dist-es/regionConfig/isFipsRegion.js b/node_modules/@smithy/config-resolver/dist-es/regionConfig/isFipsRegion.js new file mode 100644 index 00000000..d758967d --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/regionConfig/isFipsRegion.js @@ -0,0 +1 @@ +export const isFipsRegion = (region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")); diff --git a/node_modules/@smithy/config-resolver/dist-es/regionConfig/resolveRegionConfig.js b/node_modules/@smithy/config-resolver/dist-es/regionConfig/resolveRegionConfig.js new file mode 100644 index 00000000..f88e00fb --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/regionConfig/resolveRegionConfig.js @@ -0,0 +1,24 @@ +import { getRealRegion } from "./getRealRegion"; +import { isFipsRegion } from "./isFipsRegion"; +export const resolveRegionConfig = (input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return Object.assign(input, { + region: async () => { + if (typeof region === "string") { + return getRealRegion(region); + } + const providedRegion = await region(); + return getRealRegion(providedRegion); + }, + useFipsEndpoint: async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if (isFipsRegion(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + }, + }); +}; diff --git a/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariant.js b/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariant.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariant.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariantTag.js b/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariantTag.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariantTag.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/config-resolver/dist-es/regionInfo/PartitionHash.js b/node_modules/@smithy/config-resolver/dist-es/regionInfo/PartitionHash.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/regionInfo/PartitionHash.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/config-resolver/dist-es/regionInfo/RegionHash.js b/node_modules/@smithy/config-resolver/dist-es/regionInfo/RegionHash.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/regionInfo/RegionHash.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/config-resolver/dist-es/regionInfo/getHostnameFromVariants.js b/node_modules/@smithy/config-resolver/dist-es/regionInfo/getHostnameFromVariants.js new file mode 100644 index 00000000..84fc50e8 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/regionInfo/getHostnameFromVariants.js @@ -0,0 +1 @@ +export const getHostnameFromVariants = (variants = [], { useFipsEndpoint, useDualstackEndpoint }) => variants.find(({ tags }) => useFipsEndpoint === tags.includes("fips") && useDualstackEndpoint === tags.includes("dualstack"))?.hostname; diff --git a/node_modules/@smithy/config-resolver/dist-es/regionInfo/getRegionInfo.js b/node_modules/@smithy/config-resolver/dist-es/regionInfo/getRegionInfo.js new file mode 100644 index 00000000..c39e2f74 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/regionInfo/getRegionInfo.js @@ -0,0 +1,29 @@ +import { getHostnameFromVariants } from "./getHostnameFromVariants"; +import { getResolvedHostname } from "./getResolvedHostname"; +import { getResolvedPartition } from "./getResolvedPartition"; +import { getResolvedSigningRegion } from "./getResolvedSigningRegion"; +export const getRegionInfo = (region, { useFipsEndpoint = false, useDualstackEndpoint = false, signingService, regionHash, partitionHash, }) => { + const partition = getResolvedPartition(region, { partitionHash }); + const resolvedRegion = region in regionHash ? region : partitionHash[partition]?.endpoint ?? region; + const hostnameOptions = { useFipsEndpoint, useDualstackEndpoint }; + const regionHostname = getHostnameFromVariants(regionHash[resolvedRegion]?.variants, hostnameOptions); + const partitionHostname = getHostnameFromVariants(partitionHash[partition]?.variants, hostnameOptions); + const hostname = getResolvedHostname(resolvedRegion, { regionHostname, partitionHostname }); + if (hostname === undefined) { + throw new Error(`Endpoint resolution failed for: ${{ resolvedRegion, useFipsEndpoint, useDualstackEndpoint }}`); + } + const signingRegion = getResolvedSigningRegion(hostname, { + signingRegion: regionHash[resolvedRegion]?.signingRegion, + regionRegex: partitionHash[partition].regionRegex, + useFipsEndpoint, + }); + return { + partition, + signingService, + hostname, + ...(signingRegion && { signingRegion }), + ...(regionHash[resolvedRegion]?.signingService && { + signingService: regionHash[resolvedRegion].signingService, + }), + }; +}; diff --git a/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedHostname.js b/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedHostname.js new file mode 100644 index 00000000..35fb9881 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedHostname.js @@ -0,0 +1,5 @@ +export const getResolvedHostname = (resolvedRegion, { regionHostname, partitionHostname }) => regionHostname + ? regionHostname + : partitionHostname + ? partitionHostname.replace("{region}", resolvedRegion) + : undefined; diff --git a/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedPartition.js b/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedPartition.js new file mode 100644 index 00000000..3d7bc557 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedPartition.js @@ -0,0 +1 @@ +export const getResolvedPartition = (region, { partitionHash }) => Object.keys(partitionHash || {}).find((key) => partitionHash[key].regions.includes(region)) ?? "aws"; diff --git a/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedSigningRegion.js b/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedSigningRegion.js new file mode 100644 index 00000000..7977e000 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedSigningRegion.js @@ -0,0 +1,12 @@ +export const getResolvedSigningRegion = (hostname, { signingRegion, regionRegex, useFipsEndpoint }) => { + if (signingRegion) { + return signingRegion; + } + else if (useFipsEndpoint) { + const regionRegexJs = regionRegex.replace("\\\\", "\\").replace(/^\^/g, "\\.").replace(/\$$/g, "\\."); + const regionRegexmatchArray = hostname.match(regionRegexJs); + if (regionRegexmatchArray) { + return regionRegexmatchArray[0].slice(1, -1); + } + } +}; diff --git a/node_modules/@smithy/config-resolver/dist-es/regionInfo/index.js b/node_modules/@smithy/config-resolver/dist-es/regionInfo/index.js new file mode 100644 index 00000000..e29686a3 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-es/regionInfo/index.js @@ -0,0 +1,3 @@ +export * from "./PartitionHash"; +export * from "./RegionHash"; +export * from "./getRegionInfo"; diff --git a/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts b/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts new file mode 100644 index 00000000..172d8c1d --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +/** + * @internal + */ +export declare const DEFAULT_USE_DUALSTACK_ENDPOINT = false; +/** + * @internal + */ +export declare const NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts b/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts new file mode 100644 index 00000000..106bbdbe --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +/** + * @internal + */ +export declare const DEFAULT_USE_FIPS_ENDPOINT = false; +/** + * @internal + */ +export declare const NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/index.d.ts b/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/index.d.ts new file mode 100644 index 00000000..ea1cf59a --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/index.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export * from "./NodeUseDualstackEndpointConfigOptions"; +/** + * @internal + */ +export * from "./NodeUseFipsEndpointConfigOptions"; +/** + * @internal + */ +export * from "./resolveCustomEndpointsConfig"; +/** + * @internal + */ +export * from "./resolveEndpointsConfig"; diff --git a/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveCustomEndpointsConfig.d.ts b/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveCustomEndpointsConfig.d.ts new file mode 100644 index 00000000..477afbc6 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveCustomEndpointsConfig.d.ts @@ -0,0 +1,32 @@ +import { Endpoint, Provider, UrlParser } from "@smithy/types"; +import { EndpointsInputConfig, EndpointsResolvedConfig } from "./resolveEndpointsConfig"; +/** + * @public + */ +export interface CustomEndpointsInputConfig extends EndpointsInputConfig { + /** + * The fully qualified endpoint of the webservice. + */ + endpoint: string | Endpoint | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { + urlParser: UrlParser; +} +/** + * @internal + */ +export interface CustomEndpointsResolvedConfig extends EndpointsResolvedConfig { + /** + * Whether the endpoint is specified by caller. + * @internal + */ + isCustomEndpoint: true; +} +/** + * @internal + */ +export declare const resolveCustomEndpointsConfig: (input: T & CustomEndpointsInputConfig & PreviouslyResolved) => T & CustomEndpointsResolvedConfig; +export {}; diff --git a/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveEndpointsConfig.d.ts b/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveEndpointsConfig.d.ts new file mode 100644 index 00000000..4cd1d8fb --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveEndpointsConfig.d.ts @@ -0,0 +1,54 @@ +import { Endpoint, Provider, RegionInfoProvider, UrlParser } from "@smithy/types"; +/** + * @public + */ +export interface EndpointsInputConfig { + /** + * The fully qualified endpoint of the webservice. This is only required when using + * a custom endpoint (for example, when using a local version of S3). + */ + endpoint?: string | Endpoint | Provider; + /** + * Whether TLS is enabled for requests. + */ + tls?: boolean; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { + regionInfoProvider: RegionInfoProvider; + urlParser: UrlParser; + region: Provider; + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export interface EndpointsResolvedConfig extends Required { + /** + * Resolved value for input {@link EndpointsInputConfig.endpoint} + */ + endpoint: Provider; + /** + * Whether the endpoint is specified by caller. + * @internal + */ + isCustomEndpoint?: boolean; + /** + * Resolved value for input {@link EndpointsInputConfig.useDualstackEndpoint} + */ + useDualstackEndpoint: Provider; +} +/** + * @internal + * + * @deprecated endpoints rulesets use \@smithy/middleware-endpoint resolveEndpointConfig. + * All generated clients should migrate to Endpoints 2.0 endpointRuleSet traits. + */ +export declare const resolveEndpointsConfig: (input: T & EndpointsInputConfig & PreviouslyResolved) => T & EndpointsResolvedConfig; +export {}; diff --git a/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/utils/getEndpointFromRegion.d.ts b/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/utils/getEndpointFromRegion.d.ts new file mode 100644 index 00000000..5ded732c --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/utils/getEndpointFromRegion.d.ts @@ -0,0 +1,11 @@ +import { Provider, RegionInfoProvider, UrlParser } from "@smithy/types"; +interface GetEndpointFromRegionOptions { + region: Provider; + tls?: boolean; + regionInfoProvider: RegionInfoProvider; + urlParser: UrlParser; + useDualstackEndpoint: Provider; + useFipsEndpoint: Provider; +} +export declare const getEndpointFromRegion: (input: GetEndpointFromRegionOptions) => Promise; +export {}; diff --git a/node_modules/@smithy/config-resolver/dist-types/index.d.ts b/node_modules/@smithy/config-resolver/dist-types/index.d.ts new file mode 100644 index 00000000..fde70860 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/index.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export * from "./endpointsConfig"; +/** + * @internal + */ +export * from "./regionConfig"; +/** + * @internal + */ +export * from "./regionInfo"; diff --git a/node_modules/@smithy/config-resolver/dist-types/regionConfig/config.d.ts b/node_modules/@smithy/config-resolver/dist-types/regionConfig/config.d.ts new file mode 100644 index 00000000..d203bb05 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/regionConfig/config.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors, LocalConfigOptions } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const REGION_ENV_NAME = "AWS_REGION"; +/** + * @internal + */ +export declare const REGION_INI_NAME = "region"; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_FILE_OPTIONS: LocalConfigOptions; diff --git a/node_modules/@smithy/config-resolver/dist-types/regionConfig/getRealRegion.d.ts b/node_modules/@smithy/config-resolver/dist-types/regionConfig/getRealRegion.d.ts new file mode 100644 index 00000000..c70fb5b6 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/regionConfig/getRealRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getRealRegion: (region: string) => string; diff --git a/node_modules/@smithy/config-resolver/dist-types/regionConfig/index.d.ts b/node_modules/@smithy/config-resolver/dist-types/regionConfig/index.d.ts new file mode 100644 index 00000000..6dcf5e55 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/regionConfig/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./config"; +/** + * @internal + */ +export * from "./resolveRegionConfig"; diff --git a/node_modules/@smithy/config-resolver/dist-types/regionConfig/isFipsRegion.d.ts b/node_modules/@smithy/config-resolver/dist-types/regionConfig/isFipsRegion.d.ts new file mode 100644 index 00000000..b42cee72 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/regionConfig/isFipsRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isFipsRegion: (region: string) => boolean; diff --git a/node_modules/@smithy/config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts b/node_modules/@smithy/config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts new file mode 100644 index 00000000..c06c9d4e --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts @@ -0,0 +1,34 @@ +import { Provider } from "@smithy/types"; +/** + * @public + */ +export interface RegionInputConfig { + /** + * The AWS region to which this client will send requests + */ + region?: string | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; +} +interface PreviouslyResolved { +} +/** + * @internal + */ +export interface RegionResolvedConfig { + /** + * Resolved value for input config {@link RegionInputConfig.region} + */ + region: Provider; + /** + * Resolved value for input {@link RegionInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export declare const resolveRegionConfig: (input: T & RegionInputConfig & PreviouslyResolved) => T & RegionResolvedConfig; +export {}; diff --git a/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariant.d.ts b/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariant.d.ts new file mode 100644 index 00000000..9b68e93c --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariant.d.ts @@ -0,0 +1,10 @@ +import { EndpointVariantTag } from "./EndpointVariantTag"; +/** + * @internal + * + * Provides hostname information for specific host label. + */ +export type EndpointVariant = { + hostname: string; + tags: EndpointVariantTag[]; +}; diff --git a/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariantTag.d.ts b/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariantTag.d.ts new file mode 100644 index 00000000..ca50e1f8 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariantTag.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * The tag which mentions which area variant is providing information for. + * Can be either "fips" or "dualstack". + */ +export type EndpointVariantTag = "fips" | "dualstack"; diff --git a/node_modules/@smithy/config-resolver/dist-types/regionInfo/PartitionHash.d.ts b/node_modules/@smithy/config-resolver/dist-types/regionInfo/PartitionHash.d.ts new file mode 100644 index 00000000..0a5be173 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/regionInfo/PartitionHash.d.ts @@ -0,0 +1,14 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + * + * The hash of partition with the information specific to that partition. + * The information includes the list of regions belonging to that partition, + * and the hostname to be used for the partition. + */ +export type PartitionHash = Record; diff --git a/node_modules/@smithy/config-resolver/dist-types/regionInfo/RegionHash.d.ts b/node_modules/@smithy/config-resolver/dist-types/regionInfo/RegionHash.d.ts new file mode 100644 index 00000000..01cd843f --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/regionInfo/RegionHash.d.ts @@ -0,0 +1,12 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + * + * The hash of region with the information specific to that region. + * The information can include hostname, signingService and signingRegion. + */ +export type RegionHash = Record; diff --git a/node_modules/@smithy/config-resolver/dist-types/regionInfo/getHostnameFromVariants.d.ts b/node_modules/@smithy/config-resolver/dist-types/regionInfo/getHostnameFromVariants.d.ts new file mode 100644 index 00000000..47bcf700 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/regionInfo/getHostnameFromVariants.d.ts @@ -0,0 +1,12 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + */ +export interface GetHostnameFromVariantsOptions { + useFipsEndpoint: boolean; + useDualstackEndpoint: boolean; +} +/** + * @internal + */ +export declare const getHostnameFromVariants: (variants: EndpointVariant[] | undefined, { useFipsEndpoint, useDualstackEndpoint }: GetHostnameFromVariantsOptions) => string | undefined; diff --git a/node_modules/@smithy/config-resolver/dist-types/regionInfo/getRegionInfo.d.ts b/node_modules/@smithy/config-resolver/dist-types/regionInfo/getRegionInfo.d.ts new file mode 100644 index 00000000..0aaae082 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/regionInfo/getRegionInfo.d.ts @@ -0,0 +1,17 @@ +import { RegionInfo } from "@smithy/types"; +import { PartitionHash } from "./PartitionHash"; +import { RegionHash } from "./RegionHash"; +/** + * @internal + */ +export interface GetRegionInfoOptions { + useFipsEndpoint?: boolean; + useDualstackEndpoint?: boolean; + signingService: string; + regionHash: RegionHash; + partitionHash: PartitionHash; +} +/** + * @internal + */ +export declare const getRegionInfo: (region: string, { useFipsEndpoint, useDualstackEndpoint, signingService, regionHash, partitionHash, }: GetRegionInfoOptions) => RegionInfo; diff --git a/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedHostname.d.ts b/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedHostname.d.ts new file mode 100644 index 00000000..bf7a2b3c --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedHostname.d.ts @@ -0,0 +1,11 @@ +/** + * @internal + */ +export interface GetResolvedHostnameOptions { + regionHostname?: string; + partitionHostname?: string; +} +/** + * @internal + */ +export declare const getResolvedHostname: (resolvedRegion: string, { regionHostname, partitionHostname }: GetResolvedHostnameOptions) => string | undefined; diff --git a/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedPartition.d.ts b/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedPartition.d.ts new file mode 100644 index 00000000..587b4fc7 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedPartition.d.ts @@ -0,0 +1,11 @@ +import { PartitionHash } from "./PartitionHash"; +/** + * @internal + */ +export interface GetResolvedPartitionOptions { + partitionHash: PartitionHash; +} +/** + * @internal + */ +export declare const getResolvedPartition: (region: string, { partitionHash }: GetResolvedPartitionOptions) => string; diff --git a/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedSigningRegion.d.ts b/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedSigningRegion.d.ts new file mode 100644 index 00000000..3f5f7af0 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedSigningRegion.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export interface GetResolvedSigningRegionOptions { + regionRegex: string; + signingRegion?: string; + useFipsEndpoint: boolean; +} +/** + * @internal + */ +export declare const getResolvedSigningRegion: (hostname: string, { signingRegion, regionRegex, useFipsEndpoint }: GetResolvedSigningRegionOptions) => string | undefined; diff --git a/node_modules/@smithy/config-resolver/dist-types/regionInfo/index.d.ts b/node_modules/@smithy/config-resolver/dist-types/regionInfo/index.d.ts new file mode 100644 index 00000000..64ef0d51 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/regionInfo/index.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export * from "./PartitionHash"; +/** + * @internal + */ +export * from "./RegionHash"; +/** + * @internal + */ +export * from "./getRegionInfo"; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts new file mode 100644 index 00000000..169720ab --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +/** + * @internal + */ +export declare const DEFAULT_USE_DUALSTACK_ENDPOINT = false; +/** + * @internal + */ +export declare const NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts new file mode 100644 index 00000000..b17417e7 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +/** + * @internal + */ +export declare const DEFAULT_USE_FIPS_ENDPOINT = false; +/** + * @internal + */ +export declare const NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/index.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/index.d.ts new file mode 100644 index 00000000..cbabe5b8 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/index.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export * from "./NodeUseDualstackEndpointConfigOptions"; +/** + * @internal + */ +export * from "./NodeUseFipsEndpointConfigOptions"; +/** + * @internal + */ +export * from "./resolveCustomEndpointsConfig"; +/** + * @internal + */ +export * from "./resolveEndpointsConfig"; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveCustomEndpointsConfig.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveCustomEndpointsConfig.d.ts new file mode 100644 index 00000000..f49306e3 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveCustomEndpointsConfig.d.ts @@ -0,0 +1,32 @@ +import { Endpoint, Provider, UrlParser } from "@smithy/types"; +import { EndpointsInputConfig, EndpointsResolvedConfig } from "./resolveEndpointsConfig"; +/** + * @public + */ +export interface CustomEndpointsInputConfig extends EndpointsInputConfig { + /** + * The fully qualified endpoint of the webservice. + */ + endpoint: string | Endpoint | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { + urlParser: UrlParser; +} +/** + * @internal + */ +export interface CustomEndpointsResolvedConfig extends EndpointsResolvedConfig { + /** + * Whether the endpoint is specified by caller. + * @internal + */ + isCustomEndpoint: true; +} +/** + * @internal + */ +export declare const resolveCustomEndpointsConfig: (input: T & CustomEndpointsInputConfig & PreviouslyResolved) => T & CustomEndpointsResolvedConfig; +export {}; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveEndpointsConfig.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveEndpointsConfig.d.ts new file mode 100644 index 00000000..388819db --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveEndpointsConfig.d.ts @@ -0,0 +1,54 @@ +import { Endpoint, Provider, RegionInfoProvider, UrlParser } from "@smithy/types"; +/** + * @public + */ +export interface EndpointsInputConfig { + /** + * The fully qualified endpoint of the webservice. This is only required when using + * a custom endpoint (for example, when using a local version of S3). + */ + endpoint?: string | Endpoint | Provider; + /** + * Whether TLS is enabled for requests. + */ + tls?: boolean; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { + regionInfoProvider: RegionInfoProvider; + urlParser: UrlParser; + region: Provider; + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export interface EndpointsResolvedConfig extends Required { + /** + * Resolved value for input {@link EndpointsInputConfig.endpoint} + */ + endpoint: Provider; + /** + * Whether the endpoint is specified by caller. + * @internal + */ + isCustomEndpoint?: boolean; + /** + * Resolved value for input {@link EndpointsInputConfig.useDualstackEndpoint} + */ + useDualstackEndpoint: Provider; +} +/** + * @internal + * + * @deprecated endpoints rulesets use \@smithy/middleware-endpoint resolveEndpointConfig. + * All generated clients should migrate to Endpoints 2.0 endpointRuleSet traits. + */ +export declare const resolveEndpointsConfig: (input: T & EndpointsInputConfig & PreviouslyResolved) => T & EndpointsResolvedConfig; +export {}; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/utils/getEndpointFromRegion.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/utils/getEndpointFromRegion.d.ts new file mode 100644 index 00000000..83d4635e --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/utils/getEndpointFromRegion.d.ts @@ -0,0 +1,11 @@ +import { Provider, RegionInfoProvider, UrlParser } from "@smithy/types"; +interface GetEndpointFromRegionOptions { + region: Provider; + tls?: boolean; + regionInfoProvider: RegionInfoProvider; + urlParser: UrlParser; + useDualstackEndpoint: Provider; + useFipsEndpoint: Provider; +} +export declare const getEndpointFromRegion: (input: GetEndpointFromRegionOptions) => Promise; +export {}; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..e205411c --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/index.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export * from "./endpointsConfig"; +/** + * @internal + */ +export * from "./regionConfig"; +/** + * @internal + */ +export * from "./regionInfo"; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/config.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/config.d.ts new file mode 100644 index 00000000..8f3a9b24 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/config.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors, LocalConfigOptions } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const REGION_ENV_NAME = "AWS_REGION"; +/** + * @internal + */ +export declare const REGION_INI_NAME = "region"; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_FILE_OPTIONS: LocalConfigOptions; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts new file mode 100644 index 00000000..6c11d4d3 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getRealRegion: (region: string) => string; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/index.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/index.d.ts new file mode 100644 index 00000000..0e6f55d2 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./config"; +/** + * @internal + */ +export * from "./resolveRegionConfig"; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts new file mode 100644 index 00000000..1ee8bd49 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isFipsRegion: (region: string) => boolean; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts new file mode 100644 index 00000000..7aaf9e1f --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts @@ -0,0 +1,34 @@ +import { Provider } from "@smithy/types"; +/** + * @public + */ +export interface RegionInputConfig { + /** + * The AWS region to which this client will send requests + */ + region?: string | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; +} +interface PreviouslyResolved { +} +/** + * @internal + */ +export interface RegionResolvedConfig { + /** + * Resolved value for input config {@link RegionInputConfig.region} + */ + region: Provider; + /** + * Resolved value for input {@link RegionInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export declare const resolveRegionConfig: (input: T & RegionInputConfig & PreviouslyResolved) => T & RegionResolvedConfig; +export {}; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariant.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariant.d.ts new file mode 100644 index 00000000..e533cc7a --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariant.d.ts @@ -0,0 +1,10 @@ +import { EndpointVariantTag } from "./EndpointVariantTag"; +/** + * @internal + * + * Provides hostname information for specific host label. + */ +export type EndpointVariant = { + hostname: string; + tags: EndpointVariantTag[]; +}; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariantTag.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariantTag.d.ts new file mode 100644 index 00000000..755bbe52 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariantTag.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * The tag which mentions which area variant is providing information for. + * Can be either "fips" or "dualstack". + */ +export type EndpointVariantTag = "fips" | "dualstack"; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/PartitionHash.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/PartitionHash.d.ts new file mode 100644 index 00000000..6fed65e0 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/PartitionHash.d.ts @@ -0,0 +1,14 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + * + * The hash of partition with the information specific to that partition. + * The information includes the list of regions belonging to that partition, + * and the hostname to be used for the partition. + */ +export type PartitionHash = Record; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/RegionHash.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/RegionHash.d.ts new file mode 100644 index 00000000..cd90c70b --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/RegionHash.d.ts @@ -0,0 +1,12 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + * + * The hash of region with the information specific to that region. + * The information can include hostname, signingService and signingRegion. + */ +export type RegionHash = Record; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getHostnameFromVariants.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getHostnameFromVariants.d.ts new file mode 100644 index 00000000..3d61daaf --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getHostnameFromVariants.d.ts @@ -0,0 +1,12 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + */ +export interface GetHostnameFromVariantsOptions { + useFipsEndpoint: boolean; + useDualstackEndpoint: boolean; +} +/** + * @internal + */ +export declare const getHostnameFromVariants: (variants: EndpointVariant[] | undefined, { useFipsEndpoint, useDualstackEndpoint }: GetHostnameFromVariantsOptions) => string | undefined; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getRegionInfo.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getRegionInfo.d.ts new file mode 100644 index 00000000..820a5488 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getRegionInfo.d.ts @@ -0,0 +1,17 @@ +import { RegionInfo } from "@smithy/types"; +import { PartitionHash } from "./PartitionHash"; +import { RegionHash } from "./RegionHash"; +/** + * @internal + */ +export interface GetRegionInfoOptions { + useFipsEndpoint?: boolean; + useDualstackEndpoint?: boolean; + signingService: string; + regionHash: RegionHash; + partitionHash: PartitionHash; +} +/** + * @internal + */ +export declare const getRegionInfo: (region: string, { useFipsEndpoint, useDualstackEndpoint, signingService, regionHash, partitionHash, }: GetRegionInfoOptions) => RegionInfo; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedHostname.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedHostname.d.ts new file mode 100644 index 00000000..6aae405c --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedHostname.d.ts @@ -0,0 +1,11 @@ +/** + * @internal + */ +export interface GetResolvedHostnameOptions { + regionHostname?: string; + partitionHostname?: string; +} +/** + * @internal + */ +export declare const getResolvedHostname: (resolvedRegion: string, { regionHostname, partitionHostname }: GetResolvedHostnameOptions) => string | undefined; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedPartition.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedPartition.d.ts new file mode 100644 index 00000000..355c318e --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedPartition.d.ts @@ -0,0 +1,11 @@ +import { PartitionHash } from "./PartitionHash"; +/** + * @internal + */ +export interface GetResolvedPartitionOptions { + partitionHash: PartitionHash; +} +/** + * @internal + */ +export declare const getResolvedPartition: (region: string, { partitionHash }: GetResolvedPartitionOptions) => string; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedSigningRegion.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedSigningRegion.d.ts new file mode 100644 index 00000000..a7b1db65 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedSigningRegion.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export interface GetResolvedSigningRegionOptions { + regionRegex: string; + signingRegion?: string; + useFipsEndpoint: boolean; +} +/** + * @internal + */ +export declare const getResolvedSigningRegion: (hostname: string, { signingRegion, regionRegex, useFipsEndpoint }: GetResolvedSigningRegionOptions) => string | undefined; diff --git a/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/index.d.ts b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/index.d.ts new file mode 100644 index 00000000..58263082 --- /dev/null +++ b/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/index.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export * from "./PartitionHash"; +/** + * @internal + */ +export * from "./RegionHash"; +/** + * @internal + */ +export * from "./getRegionInfo"; diff --git a/node_modules/@smithy/config-resolver/package.json b/node_modules/@smithy/config-resolver/package.json new file mode 100644 index 00000000..2c4927f4 --- /dev/null +++ b/node_modules/@smithy/config-resolver/package.json @@ -0,0 +1,64 @@ +{ + "name": "@smithy/config-resolver", + "version": "4.1.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline config-resolver", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "extract:docs": "api-extractor run --local", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/config-resolver", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/config-resolver" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/core/LICENSE b/node_modules/@smithy/core/LICENSE new file mode 100644 index 00000000..e907b586 --- /dev/null +++ b/node_modules/@smithy/core/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@smithy/core/README.md b/node_modules/@smithy/core/README.md new file mode 100644 index 00000000..51f89226 --- /dev/null +++ b/node_modules/@smithy/core/README.md @@ -0,0 +1,45 @@ +# @smithy/core + +[![NPM version](https://img.shields.io/npm/v/@smithy/core/latest.svg)](https://www.npmjs.com/package/@smithy/core) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/core.svg)](https://www.npmjs.com/package/@smithy/core) + +> An internal package. You probably shouldn't use this package, at least directly. + +This package provides common or core functionality for generic Smithy clients. + +You do not need to explicitly install this package, since it will be installed during code generation if used. + +## Development of `@smithy/core` submodules + +Core submodules are organized for distribution via the `package.json` `exports` field. + +`exports` is supported by default by the latest Node.js, webpack, and esbuild. For react-native, it can be +enabled via instructions found at [reactnative.dev/blog](https://reactnative.dev/blog/2023/06/21/package-exports-support), but we also provide a compatibility redirect. + +Think of `@smithy/core` as a mono-package within the monorepo. +It preserves the benefits of modularization, for example to optimize Node.js initialization speed, +while making it easier to have a consistent version of core dependencies, reducing package sprawl when +installing a Smithy runtime client. + +### Guide for submodules + +- Each `index.ts` file corresponding to the pattern `./src/submodules//index.ts` will be + published as a separate `dist-cjs` bundled submodule index using the `Inliner.js` build script. +- create a folder as `./src/submodules/` including an `index.ts` file and a `README.md` file. + - The linter will throw an error on missing submodule metadata in `package.json` and the various `tsconfig.json` files, but it will automatically fix them if possible. +- a submodule is equivalent to a standalone `@smithy/` package in that importing it in Node.js will resolve a separate bundle. +- submodules may not relatively import files from other submodules. Instead, directly use the `@scope/pkg/submodule` name as the import. + - The linter will check for this and throw an error. +- To the extent possible, correctly declaring submodule metadata is validated by the linter in `@smithy/core`. + The linter runs during `yarn build` and also as `yarn lint`. + +### When should I create an `@smithy/core/submodule` vs. `@smithy/new-package`? + +Keep in mind that the core package is installed by all downstream clients. + +If the component functionality is upstream of multiple clients, it is +a good candidate for a core submodule. For example, if `middleware-retry` had been written +after the support for submodules was added, it would have been a submodule. + +If the component's functionality is downstream of a client (rare), or only expected to be used by a very small +subset of clients, it could be written as a standalone package. diff --git a/node_modules/@smithy/core/cbor.d.ts b/node_modules/@smithy/core/cbor.d.ts new file mode 100644 index 00000000..c44b7074 --- /dev/null +++ b/node_modules/@smithy/core/cbor.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@smithy/core/cbor" { + export * from "@smithy/core/dist-types/submodules/cbor/index.d"; +} diff --git a/node_modules/@smithy/core/cbor.js b/node_modules/@smithy/core/cbor.js new file mode 100644 index 00000000..710fb798 --- /dev/null +++ b/node_modules/@smithy/core/cbor.js @@ -0,0 +1,6 @@ + +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/cbor/index.js"); diff --git a/node_modules/@smithy/core/dist-cjs/getSmithyContext.js b/node_modules/@smithy/core/dist-cjs/getSmithyContext.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/getSmithyContext.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-cjs/index.js b/node_modules/@smithy/core/dist-cjs/index.js new file mode 100644 index 00000000..a3735f62 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/index.js @@ -0,0 +1,454 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + DefaultIdentityProviderConfig: () => DefaultIdentityProviderConfig, + EXPIRATION_MS: () => EXPIRATION_MS, + HttpApiKeyAuthSigner: () => HttpApiKeyAuthSigner, + HttpBearerAuthSigner: () => HttpBearerAuthSigner, + NoAuthSigner: () => NoAuthSigner, + createIsIdentityExpiredFunction: () => createIsIdentityExpiredFunction, + createPaginator: () => createPaginator, + doesIdentityRequireRefresh: () => doesIdentityRequireRefresh, + getHttpAuthSchemeEndpointRuleSetPlugin: () => getHttpAuthSchemeEndpointRuleSetPlugin, + getHttpAuthSchemePlugin: () => getHttpAuthSchemePlugin, + getHttpSigningPlugin: () => getHttpSigningPlugin, + getSmithyContext: () => getSmithyContext, + httpAuthSchemeEndpointRuleSetMiddlewareOptions: () => httpAuthSchemeEndpointRuleSetMiddlewareOptions, + httpAuthSchemeMiddleware: () => httpAuthSchemeMiddleware, + httpAuthSchemeMiddlewareOptions: () => httpAuthSchemeMiddlewareOptions, + httpSigningMiddleware: () => httpSigningMiddleware, + httpSigningMiddlewareOptions: () => httpSigningMiddlewareOptions, + isIdentityExpired: () => isIdentityExpired, + memoizeIdentityProvider: () => memoizeIdentityProvider, + normalizeProvider: () => normalizeProvider, + requestBuilder: () => import_protocols.requestBuilder, + setFeature: () => setFeature +}); +module.exports = __toCommonJS(src_exports); + +// src/getSmithyContext.ts +var import_types = require("@smithy/types"); +var getSmithyContext = /* @__PURE__ */ __name((context) => context[import_types.SMITHY_CONTEXT_KEY] || (context[import_types.SMITHY_CONTEXT_KEY] = {}), "getSmithyContext"); + +// src/middleware-http-auth-scheme/httpAuthSchemeMiddleware.ts +var import_util_middleware = require("@smithy/util-middleware"); + +// src/middleware-http-auth-scheme/resolveAuthOptions.ts +var resolveAuthOptions = /* @__PURE__ */ __name((candidateAuthOptions, authSchemePreference) => { + if (!authSchemePreference || authSchemePreference.length === 0) { + return candidateAuthOptions; + } + const preferredAuthOptions = []; + for (const preferredSchemeName of authSchemePreference) { + for (const candidateAuthOption of candidateAuthOptions) { + const candidateAuthSchemeName = candidateAuthOption.schemeId.split("#")[1]; + if (candidateAuthSchemeName === preferredSchemeName) { + preferredAuthOptions.push(candidateAuthOption); + } + } + } + for (const candidateAuthOption of candidateAuthOptions) { + if (!preferredAuthOptions.find(({ schemeId }) => schemeId === candidateAuthOption.schemeId)) { + preferredAuthOptions.push(candidateAuthOption); + } + } + return preferredAuthOptions; +}, "resolveAuthOptions"); + +// src/middleware-http-auth-scheme/httpAuthSchemeMiddleware.ts +function convertHttpAuthSchemesToMap(httpAuthSchemes) { + const map = /* @__PURE__ */ new Map(); + for (const scheme of httpAuthSchemes) { + map.set(scheme.schemeId, scheme); + } + return map; +} +__name(convertHttpAuthSchemesToMap, "convertHttpAuthSchemesToMap"); +var httpAuthSchemeMiddleware = /* @__PURE__ */ __name((config, mwOptions) => (next, context) => async (args) => { + const options = config.httpAuthSchemeProvider( + await mwOptions.httpAuthSchemeParametersProvider(config, context, args.input) + ); + const authSchemePreference = config.authSchemePreference ? await config.authSchemePreference() : []; + const resolvedOptions = resolveAuthOptions(options, authSchemePreference); + const authSchemes = convertHttpAuthSchemesToMap(config.httpAuthSchemes); + const smithyContext = (0, import_util_middleware.getSmithyContext)(context); + const failureReasons = []; + for (const option of resolvedOptions) { + const scheme = authSchemes.get(option.schemeId); + if (!scheme) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` was not enabled for this service.`); + continue; + } + const identityProvider = scheme.identityProvider(await mwOptions.identityProviderConfigProvider(config)); + if (!identityProvider) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` did not have an IdentityProvider configured.`); + continue; + } + const { identityProperties = {}, signingProperties = {} } = option.propertiesExtractor?.(config, context) || {}; + option.identityProperties = Object.assign(option.identityProperties || {}, identityProperties); + option.signingProperties = Object.assign(option.signingProperties || {}, signingProperties); + smithyContext.selectedHttpAuthScheme = { + httpAuthOption: option, + identity: await identityProvider(option.identityProperties), + signer: scheme.signer + }; + break; + } + if (!smithyContext.selectedHttpAuthScheme) { + throw new Error(failureReasons.join("\n")); + } + return next(args); +}, "httpAuthSchemeMiddleware"); + +// src/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.ts +var httpAuthSchemeEndpointRuleSetMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: "endpointV2Middleware" +}; +var getHttpAuthSchemeEndpointRuleSetPlugin = /* @__PURE__ */ __name((config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider +}) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo( + httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider + }), + httpAuthSchemeEndpointRuleSetMiddlewareOptions + ); + } +}), "getHttpAuthSchemeEndpointRuleSetPlugin"); + +// src/middleware-http-auth-scheme/getHttpAuthSchemePlugin.ts +var import_middleware_serde = require("@smithy/middleware-serde"); +var httpAuthSchemeMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: import_middleware_serde.serializerMiddlewareOption.name +}; +var getHttpAuthSchemePlugin = /* @__PURE__ */ __name((config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider +}) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo( + httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider + }), + httpAuthSchemeMiddlewareOptions + ); + } +}), "getHttpAuthSchemePlugin"); + +// src/middleware-http-signing/httpSigningMiddleware.ts +var import_protocol_http = require("@smithy/protocol-http"); + +var defaultErrorHandler = /* @__PURE__ */ __name((signingProperties) => (error) => { + throw error; +}, "defaultErrorHandler"); +var defaultSuccessHandler = /* @__PURE__ */ __name((httpResponse, signingProperties) => { +}, "defaultSuccessHandler"); +var httpSigningMiddleware = /* @__PURE__ */ __name((config) => (next, context) => async (args) => { + if (!import_protocol_http.HttpRequest.isInstance(args.request)) { + return next(args); + } + const smithyContext = (0, import_util_middleware.getSmithyContext)(context); + const scheme = smithyContext.selectedHttpAuthScheme; + if (!scheme) { + throw new Error(`No HttpAuthScheme was selected: unable to sign request`); + } + const { + httpAuthOption: { signingProperties = {} }, + identity, + signer + } = scheme; + const output = await next({ + ...args, + request: await signer.sign(args.request, identity, signingProperties) + }).catch((signer.errorHandler || defaultErrorHandler)(signingProperties)); + (signer.successHandler || defaultSuccessHandler)(output.response, signingProperties); + return output; +}, "httpSigningMiddleware"); + +// src/middleware-http-signing/getHttpSigningMiddleware.ts +var httpSigningMiddlewareOptions = { + step: "finalizeRequest", + tags: ["HTTP_SIGNING"], + name: "httpSigningMiddleware", + aliases: ["apiKeyMiddleware", "tokenMiddleware", "awsAuthMiddleware"], + override: true, + relation: "after", + toMiddleware: "retryMiddleware" +}; +var getHttpSigningPlugin = /* @__PURE__ */ __name((config) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(httpSigningMiddleware(config), httpSigningMiddlewareOptions); + } +}), "getHttpSigningPlugin"); + +// src/normalizeProvider.ts +var normalizeProvider = /* @__PURE__ */ __name((input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}, "normalizeProvider"); + +// src/pagination/createPaginator.ts +var makePagedClientRequest = /* @__PURE__ */ __name(async (CommandCtor, client, input, withCommand = (_) => _, ...args) => { + let command = new CommandCtor(input); + command = withCommand(command) ?? command; + return await client.send(command, ...args); +}, "makePagedClientRequest"); +function createPaginator(ClientCtor, CommandCtor, inputTokenName, outputTokenName, pageSizeTokenName) { + return /* @__PURE__ */ __name(async function* paginateOperation(config, input, ...additionalArguments) { + const _input = input; + let token = config.startingToken ?? _input[inputTokenName]; + let hasNext = true; + let page; + while (hasNext) { + _input[inputTokenName] = token; + if (pageSizeTokenName) { + _input[pageSizeTokenName] = _input[pageSizeTokenName] ?? config.pageSize; + } + if (config.client instanceof ClientCtor) { + page = await makePagedClientRequest( + CommandCtor, + config.client, + input, + config.withCommand, + ...additionalArguments + ); + } else { + throw new Error(`Invalid client, expected instance of ${ClientCtor.name}`); + } + yield page; + const prevToken = token; + token = get(page, outputTokenName); + hasNext = !!(token && (!config.stopOnSameToken || token !== prevToken)); + } + return void 0; + }, "paginateOperation"); +} +__name(createPaginator, "createPaginator"); +var get = /* @__PURE__ */ __name((fromObject, path) => { + let cursor = fromObject; + const pathComponents = path.split("."); + for (const step of pathComponents) { + if (!cursor || typeof cursor !== "object") { + return void 0; + } + cursor = cursor[step]; + } + return cursor; +}, "get"); + +// src/protocols/requestBuilder.ts +var import_protocols = require("@smithy/core/protocols"); + +// src/setFeature.ts +function setFeature(context, feature, value) { + if (!context.__smithy_context) { + context.__smithy_context = { + features: {} + }; + } else if (!context.__smithy_context.features) { + context.__smithy_context.features = {}; + } + context.__smithy_context.features[feature] = value; +} +__name(setFeature, "setFeature"); + +// src/util-identity-and-auth/DefaultIdentityProviderConfig.ts +var DefaultIdentityProviderConfig = class { + /** + * Creates an IdentityProviderConfig with a record of scheme IDs to identity providers. + * + * @param config scheme IDs and identity providers to configure + */ + constructor(config) { + this.authSchemes = /* @__PURE__ */ new Map(); + for (const [key, value] of Object.entries(config)) { + if (value !== void 0) { + this.authSchemes.set(key, value); + } + } + } + static { + __name(this, "DefaultIdentityProviderConfig"); + } + getIdentityProvider(schemeId) { + return this.authSchemes.get(schemeId); + } +}; + +// src/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.ts + + +var HttpApiKeyAuthSigner = class { + static { + __name(this, "HttpApiKeyAuthSigner"); + } + async sign(httpRequest, identity, signingProperties) { + if (!signingProperties) { + throw new Error( + "request could not be signed with `apiKey` since the `name` and `in` signer properties are missing" + ); + } + if (!signingProperties.name) { + throw new Error("request could not be signed with `apiKey` since the `name` signer property is missing"); + } + if (!signingProperties.in) { + throw new Error("request could not be signed with `apiKey` since the `in` signer property is missing"); + } + if (!identity.apiKey) { + throw new Error("request could not be signed with `apiKey` since the `apiKey` is not defined"); + } + const clonedRequest = import_protocol_http.HttpRequest.clone(httpRequest); + if (signingProperties.in === import_types.HttpApiKeyAuthLocation.QUERY) { + clonedRequest.query[signingProperties.name] = identity.apiKey; + } else if (signingProperties.in === import_types.HttpApiKeyAuthLocation.HEADER) { + clonedRequest.headers[signingProperties.name] = signingProperties.scheme ? `${signingProperties.scheme} ${identity.apiKey}` : identity.apiKey; + } else { + throw new Error( + "request can only be signed with `apiKey` locations `query` or `header`, but found: `" + signingProperties.in + "`" + ); + } + return clonedRequest; + } +}; + +// src/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.ts + +var HttpBearerAuthSigner = class { + static { + __name(this, "HttpBearerAuthSigner"); + } + async sign(httpRequest, identity, signingProperties) { + const clonedRequest = import_protocol_http.HttpRequest.clone(httpRequest); + if (!identity.token) { + throw new Error("request could not be signed with `token` since the `token` is not defined"); + } + clonedRequest.headers["Authorization"] = `Bearer ${identity.token}`; + return clonedRequest; + } +}; + +// src/util-identity-and-auth/httpAuthSchemes/noAuth.ts +var NoAuthSigner = class { + static { + __name(this, "NoAuthSigner"); + } + async sign(httpRequest, identity, signingProperties) { + return httpRequest; + } +}; + +// src/util-identity-and-auth/memoizeIdentityProvider.ts +var createIsIdentityExpiredFunction = /* @__PURE__ */ __name((expirationMs) => (identity) => doesIdentityRequireRefresh(identity) && identity.expiration.getTime() - Date.now() < expirationMs, "createIsIdentityExpiredFunction"); +var EXPIRATION_MS = 3e5; +var isIdentityExpired = createIsIdentityExpiredFunction(EXPIRATION_MS); +var doesIdentityRequireRefresh = /* @__PURE__ */ __name((identity) => identity.expiration !== void 0, "doesIdentityRequireRefresh"); +var memoizeIdentityProvider = /* @__PURE__ */ __name((provider, isExpired, requiresRefresh) => { + if (provider === void 0) { + return void 0; + } + const normalizedProvider = typeof provider !== "function" ? async () => Promise.resolve(provider) : provider; + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = /* @__PURE__ */ __name(async (options) => { + if (!pending) { + pending = normalizedProvider(options); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } finally { + pending = void 0; + } + return resolved; + }, "coalesceProvider"); + if (isExpired === void 0) { + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(options); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(options); + } + if (isConstant) { + return resolved; + } + if (!requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(options); + return resolved; + } + return resolved; + }; +}, "memoizeIdentityProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + createPaginator, + getSmithyContext, + httpAuthSchemeMiddleware, + httpAuthSchemeEndpointRuleSetMiddlewareOptions, + getHttpAuthSchemeEndpointRuleSetPlugin, + httpAuthSchemeMiddlewareOptions, + getHttpAuthSchemePlugin, + httpSigningMiddleware, + httpSigningMiddlewareOptions, + getHttpSigningPlugin, + normalizeProvider, + requestBuilder, + setFeature, + DefaultIdentityProviderConfig, + HttpApiKeyAuthSigner, + HttpBearerAuthSigner, + NoAuthSigner, + createIsIdentityExpiredFunction, + EXPIRATION_MS, + isIdentityExpired, + doesIdentityRequireRefresh, + memoizeIdentityProvider +}); + diff --git a/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js b/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js b/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js b/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/index.js b/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/resolveAuthOptions.js b/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/resolveAuthOptions.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/resolveAuthOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-cjs/middleware-http-signing/getHttpSigningMiddleware.js b/node_modules/@smithy/core/dist-cjs/middleware-http-signing/getHttpSigningMiddleware.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/middleware-http-signing/getHttpSigningMiddleware.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-cjs/middleware-http-signing/httpSigningMiddleware.js b/node_modules/@smithy/core/dist-cjs/middleware-http-signing/httpSigningMiddleware.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/middleware-http-signing/httpSigningMiddleware.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-cjs/middleware-http-signing/index.js b/node_modules/@smithy/core/dist-cjs/middleware-http-signing/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/middleware-http-signing/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-cjs/normalizeProvider.js b/node_modules/@smithy/core/dist-cjs/normalizeProvider.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/normalizeProvider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-cjs/pagination/createPaginator.js b/node_modules/@smithy/core/dist-cjs/pagination/createPaginator.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/pagination/createPaginator.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-cjs/protocols/requestBuilder.js b/node_modules/@smithy/core/dist-cjs/protocols/requestBuilder.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/protocols/requestBuilder.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-cjs/setFeature.js b/node_modules/@smithy/core/dist-cjs/setFeature.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/setFeature.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-cjs/submodules/cbor/index.js b/node_modules/@smithy/core/dist-cjs/submodules/cbor/index.js new file mode 100644 index 00000000..4ca1789a --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/submodules/cbor/index.js @@ -0,0 +1,730 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/cbor/index.ts +var cbor_exports = {}; +__export(cbor_exports, { + buildHttpRpcRequest: () => buildHttpRpcRequest, + cbor: () => cbor, + checkCborResponse: () => checkCborResponse, + dateToTag: () => dateToTag, + loadSmithyRpcV2CborErrorCode: () => loadSmithyRpcV2CborErrorCode, + parseCborBody: () => parseCborBody, + parseCborErrorBody: () => parseCborErrorBody, + tag: () => tag, + tagSymbol: () => tagSymbol +}); +module.exports = __toCommonJS(cbor_exports); + +// src/submodules/cbor/cbor-decode.ts +var import_util_utf8 = require("@smithy/util-utf8"); + +// src/submodules/cbor/cbor-types.ts +var majorUint64 = 0; +var majorNegativeInt64 = 1; +var majorUnstructuredByteString = 2; +var majorUtf8String = 3; +var majorList = 4; +var majorMap = 5; +var majorTag = 6; +var majorSpecial = 7; +var specialFalse = 20; +var specialTrue = 21; +var specialNull = 22; +var specialUndefined = 23; +var extendedOneByte = 24; +var extendedFloat16 = 25; +var extendedFloat32 = 26; +var extendedFloat64 = 27; +var minorIndefinite = 31; +function alloc(size) { + return typeof Buffer !== "undefined" ? Buffer.alloc(size) : new Uint8Array(size); +} +var tagSymbol = Symbol("@smithy/core/cbor::tagSymbol"); +function tag(data2) { + data2[tagSymbol] = true; + return data2; +} + +// src/submodules/cbor/cbor-decode.ts +var USE_TEXT_DECODER = typeof TextDecoder !== "undefined"; +var USE_BUFFER = typeof Buffer !== "undefined"; +var payload = alloc(0); +var dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength); +var textDecoder = USE_TEXT_DECODER ? new TextDecoder() : null; +var _offset = 0; +function setPayload(bytes) { + payload = bytes; + dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength); +} +function decode(at, to) { + if (at >= to) { + throw new Error("unexpected end of (decode) payload."); + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + switch (major) { + case majorUint64: + case majorNegativeInt64: + case majorTag: + let unsignedInt; + let offset; + if (minor < 24) { + unsignedInt = minor; + offset = 1; + } else { + switch (minor) { + case extendedOneByte: + case extendedFloat16: + case extendedFloat32: + case extendedFloat64: + const countLength = minorValueToArgumentLength[minor]; + const countOffset = countLength + 1; + offset = countOffset; + if (to - at < countOffset) { + throw new Error(`countLength ${countLength} greater than remaining buf len.`); + } + const countIndex = at + 1; + if (countLength === 1) { + unsignedInt = payload[countIndex]; + } else if (countLength === 2) { + unsignedInt = dataView.getUint16(countIndex); + } else if (countLength === 4) { + unsignedInt = dataView.getUint32(countIndex); + } else { + unsignedInt = dataView.getBigUint64(countIndex); + } + break; + default: + throw new Error(`unexpected minor value ${minor}.`); + } + } + if (major === majorUint64) { + _offset = offset; + return castBigInt(unsignedInt); + } else if (major === majorNegativeInt64) { + let negativeInt; + if (typeof unsignedInt === "bigint") { + negativeInt = BigInt(-1) - unsignedInt; + } else { + negativeInt = -1 - unsignedInt; + } + _offset = offset; + return castBigInt(negativeInt); + } else { + const value = decode(at + offset, to); + const valueOffset = _offset; + _offset = offset + valueOffset; + return tag({ tag: castBigInt(unsignedInt), value }); + } + case majorUtf8String: + case majorMap: + case majorList: + case majorUnstructuredByteString: + if (minor === minorIndefinite) { + switch (major) { + case majorUtf8String: + return decodeUtf8StringIndefinite(at, to); + case majorMap: + return decodeMapIndefinite(at, to); + case majorList: + return decodeListIndefinite(at, to); + case majorUnstructuredByteString: + return decodeUnstructuredByteStringIndefinite(at, to); + } + } else { + switch (major) { + case majorUtf8String: + return decodeUtf8String(at, to); + case majorMap: + return decodeMap(at, to); + case majorList: + return decodeList(at, to); + case majorUnstructuredByteString: + return decodeUnstructuredByteString(at, to); + } + } + default: + return decodeSpecial(at, to); + } +} +function bytesToUtf8(bytes, at, to) { + if (USE_BUFFER && bytes.constructor?.name === "Buffer") { + return bytes.toString("utf-8", at, to); + } + if (textDecoder) { + return textDecoder.decode(bytes.subarray(at, to)); + } + return (0, import_util_utf8.toUtf8)(bytes.subarray(at, to)); +} +function demote(bigInteger) { + const num = Number(bigInteger); + if (num < Number.MIN_SAFE_INTEGER || Number.MAX_SAFE_INTEGER < num) { + console.warn(new Error(`@smithy/core/cbor - truncating BigInt(${bigInteger}) to ${num} with loss of precision.`)); + } + return num; +} +var minorValueToArgumentLength = { + [extendedOneByte]: 1, + [extendedFloat16]: 2, + [extendedFloat32]: 4, + [extendedFloat64]: 8 +}; +function bytesToFloat16(a, b) { + const sign = a >> 7; + const exponent = (a & 124) >> 2; + const fraction = (a & 3) << 8 | b; + const scalar = sign === 0 ? 1 : -1; + let exponentComponent; + let summation; + if (exponent === 0) { + if (fraction === 0) { + return 0; + } else { + exponentComponent = Math.pow(2, 1 - 15); + summation = 0; + } + } else if (exponent === 31) { + if (fraction === 0) { + return scalar * Infinity; + } else { + return NaN; + } + } else { + exponentComponent = Math.pow(2, exponent - 15); + summation = 1; + } + summation += fraction / 1024; + return scalar * (exponentComponent * summation); +} +function decodeCount(at, to) { + const minor = payload[at] & 31; + if (minor < 24) { + _offset = 1; + return minor; + } + if (minor === extendedOneByte || minor === extendedFloat16 || minor === extendedFloat32 || minor === extendedFloat64) { + const countLength = minorValueToArgumentLength[minor]; + _offset = countLength + 1; + if (to - at < _offset) { + throw new Error(`countLength ${countLength} greater than remaining buf len.`); + } + const countIndex = at + 1; + if (countLength === 1) { + return payload[countIndex]; + } else if (countLength === 2) { + return dataView.getUint16(countIndex); + } else if (countLength === 4) { + return dataView.getUint32(countIndex); + } + return demote(dataView.getBigUint64(countIndex)); + } + throw new Error(`unexpected minor value ${minor}.`); +} +function decodeUtf8String(at, to) { + const length = decodeCount(at, to); + const offset = _offset; + at += offset; + if (to - at < length) { + throw new Error(`string len ${length} greater than remaining buf len.`); + } + const value = bytesToUtf8(payload, at, at + length); + _offset = offset + length; + return value; +} +function decodeUtf8StringIndefinite(at, to) { + at += 1; + const vector = []; + for (const base = at; at < to; ) { + if (payload[at] === 255) { + const data2 = alloc(vector.length); + data2.set(vector, 0); + _offset = at - base + 2; + return bytesToUtf8(data2, 0, data2.length); + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} in indefinite string.`); + } + if (minor === minorIndefinite) { + throw new Error("nested indefinite string."); + } + const bytes = decodeUnstructuredByteString(at, to); + const length = _offset; + at += length; + for (let i = 0; i < bytes.length; ++i) { + vector.push(bytes[i]); + } + } + throw new Error("expected break marker."); +} +function decodeUnstructuredByteString(at, to) { + const length = decodeCount(at, to); + const offset = _offset; + at += offset; + if (to - at < length) { + throw new Error(`unstructured byte string len ${length} greater than remaining buf len.`); + } + const value = payload.subarray(at, at + length); + _offset = offset + length; + return value; +} +function decodeUnstructuredByteStringIndefinite(at, to) { + at += 1; + const vector = []; + for (const base = at; at < to; ) { + if (payload[at] === 255) { + const data2 = alloc(vector.length); + data2.set(vector, 0); + _offset = at - base + 2; + return data2; + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + if (major !== majorUnstructuredByteString) { + throw new Error(`unexpected major type ${major} in indefinite string.`); + } + if (minor === minorIndefinite) { + throw new Error("nested indefinite string."); + } + const bytes = decodeUnstructuredByteString(at, to); + const length = _offset; + at += length; + for (let i = 0; i < bytes.length; ++i) { + vector.push(bytes[i]); + } + } + throw new Error("expected break marker."); +} +function decodeList(at, to) { + const listDataLength = decodeCount(at, to); + const offset = _offset; + at += offset; + const base = at; + const list = Array(listDataLength); + for (let i = 0; i < listDataLength; ++i) { + const item = decode(at, to); + const itemOffset = _offset; + list[i] = item; + at += itemOffset; + } + _offset = offset + (at - base); + return list; +} +function decodeListIndefinite(at, to) { + at += 1; + const list = []; + for (const base = at; at < to; ) { + if (payload[at] === 255) { + _offset = at - base + 2; + return list; + } + const item = decode(at, to); + const n = _offset; + at += n; + list.push(item); + } + throw new Error("expected break marker."); +} +function decodeMap(at, to) { + const mapDataLength = decodeCount(at, to); + const offset = _offset; + at += offset; + const base = at; + const map = {}; + for (let i = 0; i < mapDataLength; ++i) { + if (at >= to) { + throw new Error("unexpected end of map payload."); + } + const major = (payload[at] & 224) >> 5; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} for map key at index ${at}.`); + } + const key = decode(at, to); + at += _offset; + const value = decode(at, to); + at += _offset; + map[key] = value; + } + _offset = offset + (at - base); + return map; +} +function decodeMapIndefinite(at, to) { + at += 1; + const base = at; + const map = {}; + for (; at < to; ) { + if (at >= to) { + throw new Error("unexpected end of map payload."); + } + if (payload[at] === 255) { + _offset = at - base + 2; + return map; + } + const major = (payload[at] & 224) >> 5; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} for map key.`); + } + const key = decode(at, to); + at += _offset; + const value = decode(at, to); + at += _offset; + map[key] = value; + } + throw new Error("expected break marker."); +} +function decodeSpecial(at, to) { + const minor = payload[at] & 31; + switch (minor) { + case specialTrue: + case specialFalse: + _offset = 1; + return minor === specialTrue; + case specialNull: + _offset = 1; + return null; + case specialUndefined: + _offset = 1; + return null; + case extendedFloat16: + if (to - at < 3) { + throw new Error("incomplete float16 at end of buf."); + } + _offset = 3; + return bytesToFloat16(payload[at + 1], payload[at + 2]); + case extendedFloat32: + if (to - at < 5) { + throw new Error("incomplete float32 at end of buf."); + } + _offset = 5; + return dataView.getFloat32(at + 1); + case extendedFloat64: + if (to - at < 9) { + throw new Error("incomplete float64 at end of buf."); + } + _offset = 9; + return dataView.getFloat64(at + 1); + default: + throw new Error(`unexpected minor value ${minor}.`); + } +} +function castBigInt(bigInt) { + if (typeof bigInt === "number") { + return bigInt; + } + const num = Number(bigInt); + if (Number.MIN_SAFE_INTEGER <= num && num <= Number.MAX_SAFE_INTEGER) { + return num; + } + return bigInt; +} + +// src/submodules/cbor/cbor-encode.ts +var import_util_utf82 = require("@smithy/util-utf8"); +var USE_BUFFER2 = typeof Buffer !== "undefined"; +var initialSize = 2048; +var data = alloc(initialSize); +var dataView2 = new DataView(data.buffer, data.byteOffset, data.byteLength); +var cursor = 0; +function ensureSpace(bytes) { + const remaining = data.byteLength - cursor; + if (remaining < bytes) { + if (cursor < 16e6) { + resize(Math.max(data.byteLength * 4, data.byteLength + bytes)); + } else { + resize(data.byteLength + bytes + 16e6); + } + } +} +function toUint8Array() { + const out = alloc(cursor); + out.set(data.subarray(0, cursor), 0); + cursor = 0; + return out; +} +function resize(size) { + const old = data; + data = alloc(size); + if (old) { + if (old.copy) { + old.copy(data, 0, 0, old.byteLength); + } else { + data.set(old, 0); + } + } + dataView2 = new DataView(data.buffer, data.byteOffset, data.byteLength); +} +function encodeHeader(major, value) { + if (value < 24) { + data[cursor++] = major << 5 | value; + } else if (value < 1 << 8) { + data[cursor++] = major << 5 | 24; + data[cursor++] = value; + } else if (value < 1 << 16) { + data[cursor++] = major << 5 | extendedFloat16; + dataView2.setUint16(cursor, value); + cursor += 2; + } else if (value < 2 ** 32) { + data[cursor++] = major << 5 | extendedFloat32; + dataView2.setUint32(cursor, value); + cursor += 4; + } else { + data[cursor++] = major << 5 | extendedFloat64; + dataView2.setBigUint64(cursor, typeof value === "bigint" ? value : BigInt(value)); + cursor += 8; + } +} +function encode(_input) { + const encodeStack = [_input]; + while (encodeStack.length) { + const input = encodeStack.pop(); + ensureSpace(typeof input === "string" ? input.length * 4 : 64); + if (typeof input === "string") { + if (USE_BUFFER2) { + encodeHeader(majorUtf8String, Buffer.byteLength(input)); + cursor += data.write(input, cursor); + } else { + const bytes = (0, import_util_utf82.fromUtf8)(input); + encodeHeader(majorUtf8String, bytes.byteLength); + data.set(bytes, cursor); + cursor += bytes.byteLength; + } + continue; + } else if (typeof input === "number") { + if (Number.isInteger(input)) { + const nonNegative = input >= 0; + const major = nonNegative ? majorUint64 : majorNegativeInt64; + const value = nonNegative ? input : -input - 1; + if (value < 24) { + data[cursor++] = major << 5 | value; + } else if (value < 256) { + data[cursor++] = major << 5 | 24; + data[cursor++] = value; + } else if (value < 65536) { + data[cursor++] = major << 5 | extendedFloat16; + data[cursor++] = value >> 8; + data[cursor++] = value; + } else if (value < 4294967296) { + data[cursor++] = major << 5 | extendedFloat32; + dataView2.setUint32(cursor, value); + cursor += 4; + } else { + data[cursor++] = major << 5 | extendedFloat64; + dataView2.setBigUint64(cursor, BigInt(value)); + cursor += 8; + } + continue; + } + data[cursor++] = majorSpecial << 5 | extendedFloat64; + dataView2.setFloat64(cursor, input); + cursor += 8; + continue; + } else if (typeof input === "bigint") { + const nonNegative = input >= 0; + const major = nonNegative ? majorUint64 : majorNegativeInt64; + const value = nonNegative ? input : -input - BigInt(1); + const n = Number(value); + if (n < 24) { + data[cursor++] = major << 5 | n; + } else if (n < 256) { + data[cursor++] = major << 5 | 24; + data[cursor++] = n; + } else if (n < 65536) { + data[cursor++] = major << 5 | extendedFloat16; + data[cursor++] = n >> 8; + data[cursor++] = n & 255; + } else if (n < 4294967296) { + data[cursor++] = major << 5 | extendedFloat32; + dataView2.setUint32(cursor, n); + cursor += 4; + } else { + data[cursor++] = major << 5 | extendedFloat64; + dataView2.setBigUint64(cursor, value); + cursor += 8; + } + continue; + } else if (input === null) { + data[cursor++] = majorSpecial << 5 | specialNull; + continue; + } else if (typeof input === "boolean") { + data[cursor++] = majorSpecial << 5 | (input ? specialTrue : specialFalse); + continue; + } else if (typeof input === "undefined") { + throw new Error("@smithy/core/cbor: client may not serialize undefined value."); + } else if (Array.isArray(input)) { + for (let i = input.length - 1; i >= 0; --i) { + encodeStack.push(input[i]); + } + encodeHeader(majorList, input.length); + continue; + } else if (typeof input.byteLength === "number") { + ensureSpace(input.length * 2); + encodeHeader(majorUnstructuredByteString, input.length); + data.set(input, cursor); + cursor += input.byteLength; + continue; + } else if (typeof input === "object") { + if (input[tagSymbol]) { + if ("tag" in input && "value" in input) { + encodeStack.push(input.value); + encodeHeader(majorTag, input.tag); + continue; + } else { + throw new Error( + "tag encountered with missing fields, need 'tag' and 'value', found: " + JSON.stringify(input) + ); + } + } + const keys = Object.keys(input); + for (let i = keys.length - 1; i >= 0; --i) { + const key = keys[i]; + encodeStack.push(input[key]); + encodeStack.push(key); + } + encodeHeader(majorMap, keys.length); + continue; + } + throw new Error(`data type ${input?.constructor?.name ?? typeof input} not compatible for encoding.`); + } +} + +// src/submodules/cbor/cbor.ts +var cbor = { + deserialize(payload2) { + setPayload(payload2); + return decode(0, payload2.length); + }, + serialize(input) { + try { + encode(input); + return toUint8Array(); + } catch (e) { + toUint8Array(); + throw e; + } + }, + /** + * @public + * @param size - byte length to allocate. + * + * This may be used to garbage collect the CBOR + * shared encoding buffer space, + * e.g. resizeEncodingBuffer(0); + * + * This may also be used to pre-allocate more space for + * CBOR encoding, e.g. resizeEncodingBuffer(100_000_000); + */ + resizeEncodingBuffer(size) { + resize(size); + } +}; + +// src/submodules/cbor/parseCborBody.ts +var import_protocols = require("@smithy/core/protocols"); +var import_protocol_http = require("@smithy/protocol-http"); +var import_util_body_length_browser = require("@smithy/util-body-length-browser"); +var parseCborBody = (streamBody, context) => { + return (0, import_protocols.collectBody)(streamBody, context).then(async (bytes) => { + if (bytes.length) { + try { + return cbor.deserialize(bytes); + } catch (e) { + Object.defineProperty(e, "$responseBodyText", { + value: context.utf8Encoder(bytes) + }); + throw e; + } + } + return {}; + }); +}; +var dateToTag = (date) => { + return tag({ + tag: 1, + value: date.getTime() / 1e3 + }); +}; +var parseCborErrorBody = async (errorBody, context) => { + const value = await parseCborBody(errorBody, context); + value.message = value.message ?? value.Message; + return value; +}; +var loadSmithyRpcV2CborErrorCode = (output, data2) => { + const sanitizeErrorCode = (rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + if (data2["__type"] !== void 0) { + return sanitizeErrorCode(data2["__type"]); + } + if (data2.code !== void 0) { + return sanitizeErrorCode(data2.code); + } +}; +var checkCborResponse = (response) => { + if (String(response.headers["smithy-protocol"]).toLowerCase() !== "rpc-v2-cbor") { + throw new Error("Malformed RPCv2 CBOR response, status: " + response.statusCode); + } +}; +var buildHttpRpcRequest = async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers + }; + if (resolvedHostname !== void 0) { + contents.hostname = resolvedHostname; + } + if (body !== void 0) { + contents.body = body; + try { + contents.headers["content-length"] = String((0, import_util_body_length_browser.calculateBodyLength)(body)); + } catch (e) { + } + } + return new import_protocol_http.HttpRequest(contents); +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + buildHttpRpcRequest, + cbor, + checkCborResponse, + dateToTag, + loadSmithyRpcV2CborErrorCode, + parseCborBody, + parseCborErrorBody, + tag, + tagSymbol +}); diff --git a/node_modules/@smithy/core/dist-cjs/submodules/protocols/index.js b/node_modules/@smithy/core/dist-cjs/submodules/protocols/index.js new file mode 100644 index 00000000..455a5de4 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/submodules/protocols/index.js @@ -0,0 +1,164 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/protocols/index.ts +var protocols_exports = {}; +__export(protocols_exports, { + RequestBuilder: () => RequestBuilder, + collectBody: () => collectBody, + extendedEncodeURIComponent: () => extendedEncodeURIComponent, + requestBuilder: () => requestBuilder, + resolvedPath: () => resolvedPath +}); +module.exports = __toCommonJS(protocols_exports); + +// src/submodules/protocols/collect-stream-body.ts +var import_util_stream = require("@smithy/util-stream"); +var collectBody = async (streamBody = new Uint8Array(), context) => { + if (streamBody instanceof Uint8Array) { + return import_util_stream.Uint8ArrayBlobAdapter.mutate(streamBody); + } + if (!streamBody) { + return import_util_stream.Uint8ArrayBlobAdapter.mutate(new Uint8Array()); + } + const fromContext = context.streamCollector(streamBody); + return import_util_stream.Uint8ArrayBlobAdapter.mutate(await fromContext); +}; + +// src/submodules/protocols/extended-encode-uri-component.ts +function extendedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} + +// src/submodules/protocols/requestBuilder.ts +var import_protocol_http = require("@smithy/protocol-http"); + +// src/submodules/protocols/resolve-path.ts +var resolvedPath = (resolvedPath2, input, memberName, labelValueProvider, uriLabel, isGreedyLabel) => { + if (input != null && input[memberName] !== void 0) { + const labelValue = labelValueProvider(); + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: " + memberName + "."); + } + resolvedPath2 = resolvedPath2.replace( + uriLabel, + isGreedyLabel ? labelValue.split("/").map((segment) => extendedEncodeURIComponent(segment)).join("/") : extendedEncodeURIComponent(labelValue) + ); + } else { + throw new Error("No value provided for input HTTP label: " + memberName + "."); + } + return resolvedPath2; +}; + +// src/submodules/protocols/requestBuilder.ts +function requestBuilder(input, context) { + return new RequestBuilder(input, context); +} +var RequestBuilder = class { + constructor(input, context) { + this.input = input; + this.context = context; + this.query = {}; + this.method = ""; + this.headers = {}; + this.path = ""; + this.body = null; + this.hostname = ""; + this.resolvePathStack = []; + } + async build() { + const { hostname, protocol = "https", port, path: basePath } = await this.context.endpoint(); + this.path = basePath; + for (const resolvePath of this.resolvePathStack) { + resolvePath(this.path); + } + return new import_protocol_http.HttpRequest({ + protocol, + hostname: this.hostname || hostname, + port, + method: this.method, + path: this.path, + query: this.query, + body: this.body, + headers: this.headers + }); + } + /** + * Brevity setter for "hostname". + */ + hn(hostname) { + this.hostname = hostname; + return this; + } + /** + * Brevity initial builder for "basepath". + */ + bp(uriLabel) { + this.resolvePathStack.push((basePath) => { + this.path = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + uriLabel; + }); + return this; + } + /** + * Brevity incremental builder for "path". + */ + p(memberName, labelValueProvider, uriLabel, isGreedyLabel) { + this.resolvePathStack.push((path) => { + this.path = resolvedPath(path, this.input, memberName, labelValueProvider, uriLabel, isGreedyLabel); + }); + return this; + } + /** + * Brevity setter for "headers". + */ + h(headers) { + this.headers = headers; + return this; + } + /** + * Brevity setter for "query". + */ + q(query) { + this.query = query; + return this; + } + /** + * Brevity setter for "body". + */ + b(body) { + this.body = body; + return this; + } + /** + * Brevity setter for "method". + */ + m(method) { + this.method = method; + return this; + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + RequestBuilder, + collectBody, + extendedEncodeURIComponent, + requestBuilder, + resolvedPath +}); diff --git a/node_modules/@smithy/core/dist-cjs/submodules/serde/index.js b/node_modules/@smithy/core/dist-cjs/submodules/serde/index.js new file mode 100644 index 00000000..047fb9b2 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/submodules/serde/index.js @@ -0,0 +1,41 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/serde/index.ts +var serde_exports = {}; +__export(serde_exports, { + NumericValue: () => NumericValue, + nv: () => nv +}); +module.exports = __toCommonJS(serde_exports); + +// src/submodules/serde/value/NumericValue.ts +var NumericValue = class { + constructor(string, type) { + this.string = string; + this.type = type; + } +}; +function nv(string) { + return new NumericValue(string, "bigDecimal"); +} +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + NumericValue, + nv +}); diff --git a/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/DefaultIdentityProviderConfig.js b/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/DefaultIdentityProviderConfig.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/DefaultIdentityProviderConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js b/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js new file mode 100644 index 00000000..88174128 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js b/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js new file mode 100644 index 00000000..88174128 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/index.js b/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/index.js new file mode 100644 index 00000000..88174128 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/index.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/noAuth.js b/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/noAuth.js new file mode 100644 index 00000000..88174128 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/noAuth.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/index.js b/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/memoizeIdentityProvider.js b/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/memoizeIdentityProvider.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/memoizeIdentityProvider.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/core/dist-es/getSmithyContext.js b/node_modules/@smithy/core/dist-es/getSmithyContext.js new file mode 100644 index 00000000..3848a0c2 --- /dev/null +++ b/node_modules/@smithy/core/dist-es/getSmithyContext.js @@ -0,0 +1,2 @@ +import { SMITHY_CONTEXT_KEY } from "@smithy/types"; +export const getSmithyContext = (context) => context[SMITHY_CONTEXT_KEY] || (context[SMITHY_CONTEXT_KEY] = {}); diff --git a/node_modules/@smithy/core/dist-es/index.js b/node_modules/@smithy/core/dist-es/index.js new file mode 100644 index 00000000..1dcdba11 --- /dev/null +++ b/node_modules/@smithy/core/dist-es/index.js @@ -0,0 +1,8 @@ +export * from "./getSmithyContext"; +export * from "./middleware-http-auth-scheme"; +export * from "./middleware-http-signing"; +export * from "./normalizeProvider"; +export { createPaginator } from "./pagination/createPaginator"; +export * from "./protocols/requestBuilder"; +export * from "./setFeature"; +export * from "./util-identity-and-auth"; diff --git a/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js b/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js new file mode 100644 index 00000000..d0aaae6e --- /dev/null +++ b/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js @@ -0,0 +1,17 @@ +import { httpAuthSchemeMiddleware } from "./httpAuthSchemeMiddleware"; +export const httpAuthSchemeEndpointRuleSetMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: "endpointV2Middleware", +}; +export const getHttpAuthSchemeEndpointRuleSetPlugin = (config, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider, + }), httpAuthSchemeEndpointRuleSetMiddlewareOptions); + }, +}); diff --git a/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js b/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js new file mode 100644 index 00000000..3fe03c53 --- /dev/null +++ b/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js @@ -0,0 +1,18 @@ +import { serializerMiddlewareOption } from "@smithy/middleware-serde"; +import { httpAuthSchemeMiddleware } from "./httpAuthSchemeMiddleware"; +export const httpAuthSchemeMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: serializerMiddlewareOption.name, +}; +export const getHttpAuthSchemePlugin = (config, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider, + }), httpAuthSchemeMiddlewareOptions); + }, +}); diff --git a/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js b/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js new file mode 100644 index 00000000..9869f650 --- /dev/null +++ b/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js @@ -0,0 +1,43 @@ +import { SMITHY_CONTEXT_KEY, } from "@smithy/types"; +import { getSmithyContext } from "@smithy/util-middleware"; +import { resolveAuthOptions } from "./resolveAuthOptions"; +function convertHttpAuthSchemesToMap(httpAuthSchemes) { + const map = new Map(); + for (const scheme of httpAuthSchemes) { + map.set(scheme.schemeId, scheme); + } + return map; +} +export const httpAuthSchemeMiddleware = (config, mwOptions) => (next, context) => async (args) => { + const options = config.httpAuthSchemeProvider(await mwOptions.httpAuthSchemeParametersProvider(config, context, args.input)); + const authSchemePreference = config.authSchemePreference ? await config.authSchemePreference() : []; + const resolvedOptions = resolveAuthOptions(options, authSchemePreference); + const authSchemes = convertHttpAuthSchemesToMap(config.httpAuthSchemes); + const smithyContext = getSmithyContext(context); + const failureReasons = []; + for (const option of resolvedOptions) { + const scheme = authSchemes.get(option.schemeId); + if (!scheme) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` was not enabled for this service.`); + continue; + } + const identityProvider = scheme.identityProvider(await mwOptions.identityProviderConfigProvider(config)); + if (!identityProvider) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` did not have an IdentityProvider configured.`); + continue; + } + const { identityProperties = {}, signingProperties = {} } = option.propertiesExtractor?.(config, context) || {}; + option.identityProperties = Object.assign(option.identityProperties || {}, identityProperties); + option.signingProperties = Object.assign(option.signingProperties || {}, signingProperties); + smithyContext.selectedHttpAuthScheme = { + httpAuthOption: option, + identity: await identityProvider(option.identityProperties), + signer: scheme.signer, + }; + break; + } + if (!smithyContext.selectedHttpAuthScheme) { + throw new Error(failureReasons.join("\n")); + } + return next(args); +}; diff --git a/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/index.js b/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/index.js new file mode 100644 index 00000000..5042e7dc --- /dev/null +++ b/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/index.js @@ -0,0 +1,3 @@ +export * from "./httpAuthSchemeMiddleware"; +export * from "./getHttpAuthSchemeEndpointRuleSetPlugin"; +export * from "./getHttpAuthSchemePlugin"; diff --git a/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/resolveAuthOptions.js b/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/resolveAuthOptions.js new file mode 100644 index 00000000..8260757e --- /dev/null +++ b/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/resolveAuthOptions.js @@ -0,0 +1,20 @@ +export const resolveAuthOptions = (candidateAuthOptions, authSchemePreference) => { + if (!authSchemePreference || authSchemePreference.length === 0) { + return candidateAuthOptions; + } + const preferredAuthOptions = []; + for (const preferredSchemeName of authSchemePreference) { + for (const candidateAuthOption of candidateAuthOptions) { + const candidateAuthSchemeName = candidateAuthOption.schemeId.split("#")[1]; + if (candidateAuthSchemeName === preferredSchemeName) { + preferredAuthOptions.push(candidateAuthOption); + } + } + } + for (const candidateAuthOption of candidateAuthOptions) { + if (!preferredAuthOptions.find(({ schemeId }) => schemeId === candidateAuthOption.schemeId)) { + preferredAuthOptions.push(candidateAuthOption); + } + } + return preferredAuthOptions; +}; diff --git a/node_modules/@smithy/core/dist-es/middleware-http-signing/getHttpSigningMiddleware.js b/node_modules/@smithy/core/dist-es/middleware-http-signing/getHttpSigningMiddleware.js new file mode 100644 index 00000000..e1997121 --- /dev/null +++ b/node_modules/@smithy/core/dist-es/middleware-http-signing/getHttpSigningMiddleware.js @@ -0,0 +1,15 @@ +import { httpSigningMiddleware } from "./httpSigningMiddleware"; +export const httpSigningMiddlewareOptions = { + step: "finalizeRequest", + tags: ["HTTP_SIGNING"], + name: "httpSigningMiddleware", + aliases: ["apiKeyMiddleware", "tokenMiddleware", "awsAuthMiddleware"], + override: true, + relation: "after", + toMiddleware: "retryMiddleware", +}; +export const getHttpSigningPlugin = (config) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(httpSigningMiddleware(config), httpSigningMiddlewareOptions); + }, +}); diff --git a/node_modules/@smithy/core/dist-es/middleware-http-signing/httpSigningMiddleware.js b/node_modules/@smithy/core/dist-es/middleware-http-signing/httpSigningMiddleware.js new file mode 100644 index 00000000..dbc1b280 --- /dev/null +++ b/node_modules/@smithy/core/dist-es/middleware-http-signing/httpSigningMiddleware.js @@ -0,0 +1,24 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { SMITHY_CONTEXT_KEY, } from "@smithy/types"; +import { getSmithyContext } from "@smithy/util-middleware"; +const defaultErrorHandler = (signingProperties) => (error) => { + throw error; +}; +const defaultSuccessHandler = (httpResponse, signingProperties) => { }; +export const httpSigningMiddleware = (config) => (next, context) => async (args) => { + if (!HttpRequest.isInstance(args.request)) { + return next(args); + } + const smithyContext = getSmithyContext(context); + const scheme = smithyContext.selectedHttpAuthScheme; + if (!scheme) { + throw new Error(`No HttpAuthScheme was selected: unable to sign request`); + } + const { httpAuthOption: { signingProperties = {} }, identity, signer, } = scheme; + const output = await next({ + ...args, + request: await signer.sign(args.request, identity, signingProperties), + }).catch((signer.errorHandler || defaultErrorHandler)(signingProperties)); + (signer.successHandler || defaultSuccessHandler)(output.response, signingProperties); + return output; +}; diff --git a/node_modules/@smithy/core/dist-es/middleware-http-signing/index.js b/node_modules/@smithy/core/dist-es/middleware-http-signing/index.js new file mode 100644 index 00000000..7bc6cfe1 --- /dev/null +++ b/node_modules/@smithy/core/dist-es/middleware-http-signing/index.js @@ -0,0 +1,2 @@ +export * from "./httpSigningMiddleware"; +export * from "./getHttpSigningMiddleware"; diff --git a/node_modules/@smithy/core/dist-es/normalizeProvider.js b/node_modules/@smithy/core/dist-es/normalizeProvider.js new file mode 100644 index 00000000..a83ea99e --- /dev/null +++ b/node_modules/@smithy/core/dist-es/normalizeProvider.js @@ -0,0 +1,6 @@ +export const normalizeProvider = (input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}; diff --git a/node_modules/@smithy/core/dist-es/pagination/createPaginator.js b/node_modules/@smithy/core/dist-es/pagination/createPaginator.js new file mode 100644 index 00000000..4e8f8892 --- /dev/null +++ b/node_modules/@smithy/core/dist-es/pagination/createPaginator.js @@ -0,0 +1,41 @@ +const makePagedClientRequest = async (CommandCtor, client, input, withCommand = (_) => _, ...args) => { + let command = new CommandCtor(input); + command = withCommand(command) ?? command; + return await client.send(command, ...args); +}; +export function createPaginator(ClientCtor, CommandCtor, inputTokenName, outputTokenName, pageSizeTokenName) { + return async function* paginateOperation(config, input, ...additionalArguments) { + const _input = input; + let token = config.startingToken ?? _input[inputTokenName]; + let hasNext = true; + let page; + while (hasNext) { + _input[inputTokenName] = token; + if (pageSizeTokenName) { + _input[pageSizeTokenName] = _input[pageSizeTokenName] ?? config.pageSize; + } + if (config.client instanceof ClientCtor) { + page = await makePagedClientRequest(CommandCtor, config.client, input, config.withCommand, ...additionalArguments); + } + else { + throw new Error(`Invalid client, expected instance of ${ClientCtor.name}`); + } + yield page; + const prevToken = token; + token = get(page, outputTokenName); + hasNext = !!(token && (!config.stopOnSameToken || token !== prevToken)); + } + return undefined; + }; +} +const get = (fromObject, path) => { + let cursor = fromObject; + const pathComponents = path.split("."); + for (const step of pathComponents) { + if (!cursor || typeof cursor !== "object") { + return undefined; + } + cursor = cursor[step]; + } + return cursor; +}; diff --git a/node_modules/@smithy/core/dist-es/protocols/requestBuilder.js b/node_modules/@smithy/core/dist-es/protocols/requestBuilder.js new file mode 100644 index 00000000..5b790a7f --- /dev/null +++ b/node_modules/@smithy/core/dist-es/protocols/requestBuilder.js @@ -0,0 +1 @@ +export { requestBuilder } from "@smithy/core/protocols"; diff --git a/node_modules/@smithy/core/dist-es/setFeature.js b/node_modules/@smithy/core/dist-es/setFeature.js new file mode 100644 index 00000000..a3a03031 --- /dev/null +++ b/node_modules/@smithy/core/dist-es/setFeature.js @@ -0,0 +1,11 @@ +export function setFeature(context, feature, value) { + if (!context.__smithy_context) { + context.__smithy_context = { + features: {}, + }; + } + else if (!context.__smithy_context.features) { + context.__smithy_context.features = {}; + } + context.__smithy_context.features[feature] = value; +} diff --git a/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-decode.js b/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-decode.js new file mode 100644 index 00000000..dca1c63c --- /dev/null +++ b/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-decode.js @@ -0,0 +1,391 @@ +import { toUtf8 } from "@smithy/util-utf8"; +import { alloc, extendedFloat16, extendedFloat32, extendedFloat64, extendedOneByte, majorList, majorMap, majorNegativeInt64, majorTag, majorUint64, majorUnstructuredByteString, majorUtf8String, minorIndefinite, specialFalse, specialNull, specialTrue, specialUndefined, tag, } from "./cbor-types"; +const USE_TEXT_DECODER = typeof TextDecoder !== "undefined"; +const USE_BUFFER = typeof Buffer !== "undefined"; +let payload = alloc(0); +let dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength); +const textDecoder = USE_TEXT_DECODER ? new TextDecoder() : null; +let _offset = 0; +export function setPayload(bytes) { + payload = bytes; + dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength); +} +export function decode(at, to) { + if (at >= to) { + throw new Error("unexpected end of (decode) payload."); + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + switch (major) { + case majorUint64: + case majorNegativeInt64: + case majorTag: + let unsignedInt; + let offset; + if (minor < 24) { + unsignedInt = minor; + offset = 1; + } + else { + switch (minor) { + case extendedOneByte: + case extendedFloat16: + case extendedFloat32: + case extendedFloat64: + const countLength = minorValueToArgumentLength[minor]; + const countOffset = (countLength + 1); + offset = countOffset; + if (to - at < countOffset) { + throw new Error(`countLength ${countLength} greater than remaining buf len.`); + } + const countIndex = at + 1; + if (countLength === 1) { + unsignedInt = payload[countIndex]; + } + else if (countLength === 2) { + unsignedInt = dataView.getUint16(countIndex); + } + else if (countLength === 4) { + unsignedInt = dataView.getUint32(countIndex); + } + else { + unsignedInt = dataView.getBigUint64(countIndex); + } + break; + default: + throw new Error(`unexpected minor value ${minor}.`); + } + } + if (major === majorUint64) { + _offset = offset; + return castBigInt(unsignedInt); + } + else if (major === majorNegativeInt64) { + let negativeInt; + if (typeof unsignedInt === "bigint") { + negativeInt = BigInt(-1) - unsignedInt; + } + else { + negativeInt = -1 - unsignedInt; + } + _offset = offset; + return castBigInt(negativeInt); + } + else { + const value = decode(at + offset, to); + const valueOffset = _offset; + _offset = offset + valueOffset; + return tag({ tag: castBigInt(unsignedInt), value }); + } + case majorUtf8String: + case majorMap: + case majorList: + case majorUnstructuredByteString: + if (minor === minorIndefinite) { + switch (major) { + case majorUtf8String: + return decodeUtf8StringIndefinite(at, to); + case majorMap: + return decodeMapIndefinite(at, to); + case majorList: + return decodeListIndefinite(at, to); + case majorUnstructuredByteString: + return decodeUnstructuredByteStringIndefinite(at, to); + } + } + else { + switch (major) { + case majorUtf8String: + return decodeUtf8String(at, to); + case majorMap: + return decodeMap(at, to); + case majorList: + return decodeList(at, to); + case majorUnstructuredByteString: + return decodeUnstructuredByteString(at, to); + } + } + default: + return decodeSpecial(at, to); + } +} +function bytesToUtf8(bytes, at, to) { + if (USE_BUFFER && bytes.constructor?.name === "Buffer") { + return bytes.toString("utf-8", at, to); + } + if (textDecoder) { + return textDecoder.decode(bytes.subarray(at, to)); + } + return toUtf8(bytes.subarray(at, to)); +} +function demote(bigInteger) { + const num = Number(bigInteger); + if (num < Number.MIN_SAFE_INTEGER || Number.MAX_SAFE_INTEGER < num) { + console.warn(new Error(`@smithy/core/cbor - truncating BigInt(${bigInteger}) to ${num} with loss of precision.`)); + } + return num; +} +const minorValueToArgumentLength = { + [extendedOneByte]: 1, + [extendedFloat16]: 2, + [extendedFloat32]: 4, + [extendedFloat64]: 8, +}; +export function bytesToFloat16(a, b) { + const sign = a >> 7; + const exponent = (a & 124) >> 2; + const fraction = ((a & 3) << 8) | b; + const scalar = sign === 0 ? 1 : -1; + let exponentComponent; + let summation; + if (exponent === 0b00000) { + if (fraction === 0) { + return 0; + } + else { + exponentComponent = Math.pow(2, 1 - 15); + summation = 0; + } + } + else if (exponent === 0b11111) { + if (fraction === 0) { + return scalar * Infinity; + } + else { + return NaN; + } + } + else { + exponentComponent = Math.pow(2, exponent - 15); + summation = 1; + } + summation += fraction / 1024; + return scalar * (exponentComponent * summation); +} +function decodeCount(at, to) { + const minor = payload[at] & 31; + if (minor < 24) { + _offset = 1; + return minor; + } + if (minor === extendedOneByte || + minor === extendedFloat16 || + minor === extendedFloat32 || + minor === extendedFloat64) { + const countLength = minorValueToArgumentLength[minor]; + _offset = (countLength + 1); + if (to - at < _offset) { + throw new Error(`countLength ${countLength} greater than remaining buf len.`); + } + const countIndex = at + 1; + if (countLength === 1) { + return payload[countIndex]; + } + else if (countLength === 2) { + return dataView.getUint16(countIndex); + } + else if (countLength === 4) { + return dataView.getUint32(countIndex); + } + return demote(dataView.getBigUint64(countIndex)); + } + throw new Error(`unexpected minor value ${minor}.`); +} +function decodeUtf8String(at, to) { + const length = decodeCount(at, to); + const offset = _offset; + at += offset; + if (to - at < length) { + throw new Error(`string len ${length} greater than remaining buf len.`); + } + const value = bytesToUtf8(payload, at, at + length); + _offset = offset + length; + return value; +} +function decodeUtf8StringIndefinite(at, to) { + at += 1; + const vector = []; + for (const base = at; at < to;) { + if (payload[at] === 255) { + const data = alloc(vector.length); + data.set(vector, 0); + _offset = at - base + 2; + return bytesToUtf8(data, 0, data.length); + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} in indefinite string.`); + } + if (minor === minorIndefinite) { + throw new Error("nested indefinite string."); + } + const bytes = decodeUnstructuredByteString(at, to); + const length = _offset; + at += length; + for (let i = 0; i < bytes.length; ++i) { + vector.push(bytes[i]); + } + } + throw new Error("expected break marker."); +} +function decodeUnstructuredByteString(at, to) { + const length = decodeCount(at, to); + const offset = _offset; + at += offset; + if (to - at < length) { + throw new Error(`unstructured byte string len ${length} greater than remaining buf len.`); + } + const value = payload.subarray(at, at + length); + _offset = offset + length; + return value; +} +function decodeUnstructuredByteStringIndefinite(at, to) { + at += 1; + const vector = []; + for (const base = at; at < to;) { + if (payload[at] === 255) { + const data = alloc(vector.length); + data.set(vector, 0); + _offset = at - base + 2; + return data; + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + if (major !== majorUnstructuredByteString) { + throw new Error(`unexpected major type ${major} in indefinite string.`); + } + if (minor === minorIndefinite) { + throw new Error("nested indefinite string."); + } + const bytes = decodeUnstructuredByteString(at, to); + const length = _offset; + at += length; + for (let i = 0; i < bytes.length; ++i) { + vector.push(bytes[i]); + } + } + throw new Error("expected break marker."); +} +function decodeList(at, to) { + const listDataLength = decodeCount(at, to); + const offset = _offset; + at += offset; + const base = at; + const list = Array(listDataLength); + for (let i = 0; i < listDataLength; ++i) { + const item = decode(at, to); + const itemOffset = _offset; + list[i] = item; + at += itemOffset; + } + _offset = offset + (at - base); + return list; +} +function decodeListIndefinite(at, to) { + at += 1; + const list = []; + for (const base = at; at < to;) { + if (payload[at] === 255) { + _offset = at - base + 2; + return list; + } + const item = decode(at, to); + const n = _offset; + at += n; + list.push(item); + } + throw new Error("expected break marker."); +} +function decodeMap(at, to) { + const mapDataLength = decodeCount(at, to); + const offset = _offset; + at += offset; + const base = at; + const map = {}; + for (let i = 0; i < mapDataLength; ++i) { + if (at >= to) { + throw new Error("unexpected end of map payload."); + } + const major = (payload[at] & 224) >> 5; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} for map key at index ${at}.`); + } + const key = decode(at, to); + at += _offset; + const value = decode(at, to); + at += _offset; + map[key] = value; + } + _offset = offset + (at - base); + return map; +} +function decodeMapIndefinite(at, to) { + at += 1; + const base = at; + const map = {}; + for (; at < to;) { + if (at >= to) { + throw new Error("unexpected end of map payload."); + } + if (payload[at] === 255) { + _offset = at - base + 2; + return map; + } + const major = (payload[at] & 224) >> 5; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} for map key.`); + } + const key = decode(at, to); + at += _offset; + const value = decode(at, to); + at += _offset; + map[key] = value; + } + throw new Error("expected break marker."); +} +function decodeSpecial(at, to) { + const minor = payload[at] & 31; + switch (minor) { + case specialTrue: + case specialFalse: + _offset = 1; + return minor === specialTrue; + case specialNull: + _offset = 1; + return null; + case specialUndefined: + _offset = 1; + return null; + case extendedFloat16: + if (to - at < 3) { + throw new Error("incomplete float16 at end of buf."); + } + _offset = 3; + return bytesToFloat16(payload[at + 1], payload[at + 2]); + case extendedFloat32: + if (to - at < 5) { + throw new Error("incomplete float32 at end of buf."); + } + _offset = 5; + return dataView.getFloat32(at + 1); + case extendedFloat64: + if (to - at < 9) { + throw new Error("incomplete float64 at end of buf."); + } + _offset = 9; + return dataView.getFloat64(at + 1); + default: + throw new Error(`unexpected minor value ${minor}.`); + } +} +function castBigInt(bigInt) { + if (typeof bigInt === "number") { + return bigInt; + } + const num = Number(bigInt); + if (Number.MIN_SAFE_INTEGER <= num && num <= Number.MAX_SAFE_INTEGER) { + return num; + } + return bigInt; +} diff --git a/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-encode.js b/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-encode.js new file mode 100644 index 00000000..17af4e29 --- /dev/null +++ b/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-encode.js @@ -0,0 +1,191 @@ +import { fromUtf8 } from "@smithy/util-utf8"; +import { extendedFloat16, extendedFloat32, extendedFloat64, majorList, majorMap, majorNegativeInt64, majorSpecial, majorTag, majorUint64, majorUnstructuredByteString, majorUtf8String, specialFalse, specialNull, specialTrue, tagSymbol, } from "./cbor-types"; +import { alloc } from "./cbor-types"; +const USE_BUFFER = typeof Buffer !== "undefined"; +const initialSize = 2048; +let data = alloc(initialSize); +let dataView = new DataView(data.buffer, data.byteOffset, data.byteLength); +let cursor = 0; +function ensureSpace(bytes) { + const remaining = data.byteLength - cursor; + if (remaining < bytes) { + if (cursor < 16000000) { + resize(Math.max(data.byteLength * 4, data.byteLength + bytes)); + } + else { + resize(data.byteLength + bytes + 16000000); + } + } +} +export function toUint8Array() { + const out = alloc(cursor); + out.set(data.subarray(0, cursor), 0); + cursor = 0; + return out; +} +export function resize(size) { + const old = data; + data = alloc(size); + if (old) { + if (old.copy) { + old.copy(data, 0, 0, old.byteLength); + } + else { + data.set(old, 0); + } + } + dataView = new DataView(data.buffer, data.byteOffset, data.byteLength); +} +function encodeHeader(major, value) { + if (value < 24) { + data[cursor++] = (major << 5) | value; + } + else if (value < 1 << 8) { + data[cursor++] = (major << 5) | 24; + data[cursor++] = value; + } + else if (value < 1 << 16) { + data[cursor++] = (major << 5) | extendedFloat16; + dataView.setUint16(cursor, value); + cursor += 2; + } + else if (value < 2 ** 32) { + data[cursor++] = (major << 5) | extendedFloat32; + dataView.setUint32(cursor, value); + cursor += 4; + } + else { + data[cursor++] = (major << 5) | extendedFloat64; + dataView.setBigUint64(cursor, typeof value === "bigint" ? value : BigInt(value)); + cursor += 8; + } +} +export function encode(_input) { + const encodeStack = [_input]; + while (encodeStack.length) { + const input = encodeStack.pop(); + ensureSpace(typeof input === "string" ? input.length * 4 : 64); + if (typeof input === "string") { + if (USE_BUFFER) { + encodeHeader(majorUtf8String, Buffer.byteLength(input)); + cursor += data.write(input, cursor); + } + else { + const bytes = fromUtf8(input); + encodeHeader(majorUtf8String, bytes.byteLength); + data.set(bytes, cursor); + cursor += bytes.byteLength; + } + continue; + } + else if (typeof input === "number") { + if (Number.isInteger(input)) { + const nonNegative = input >= 0; + const major = nonNegative ? majorUint64 : majorNegativeInt64; + const value = nonNegative ? input : -input - 1; + if (value < 24) { + data[cursor++] = (major << 5) | value; + } + else if (value < 256) { + data[cursor++] = (major << 5) | 24; + data[cursor++] = value; + } + else if (value < 65536) { + data[cursor++] = (major << 5) | extendedFloat16; + data[cursor++] = value >> 8; + data[cursor++] = value; + } + else if (value < 4294967296) { + data[cursor++] = (major << 5) | extendedFloat32; + dataView.setUint32(cursor, value); + cursor += 4; + } + else { + data[cursor++] = (major << 5) | extendedFloat64; + dataView.setBigUint64(cursor, BigInt(value)); + cursor += 8; + } + continue; + } + data[cursor++] = (majorSpecial << 5) | extendedFloat64; + dataView.setFloat64(cursor, input); + cursor += 8; + continue; + } + else if (typeof input === "bigint") { + const nonNegative = input >= 0; + const major = nonNegative ? majorUint64 : majorNegativeInt64; + const value = nonNegative ? input : -input - BigInt(1); + const n = Number(value); + if (n < 24) { + data[cursor++] = (major << 5) | n; + } + else if (n < 256) { + data[cursor++] = (major << 5) | 24; + data[cursor++] = n; + } + else if (n < 65536) { + data[cursor++] = (major << 5) | extendedFloat16; + data[cursor++] = n >> 8; + data[cursor++] = n & 255; + } + else if (n < 4294967296) { + data[cursor++] = (major << 5) | extendedFloat32; + dataView.setUint32(cursor, n); + cursor += 4; + } + else { + data[cursor++] = (major << 5) | extendedFloat64; + dataView.setBigUint64(cursor, value); + cursor += 8; + } + continue; + } + else if (input === null) { + data[cursor++] = (majorSpecial << 5) | specialNull; + continue; + } + else if (typeof input === "boolean") { + data[cursor++] = (majorSpecial << 5) | (input ? specialTrue : specialFalse); + continue; + } + else if (typeof input === "undefined") { + throw new Error("@smithy/core/cbor: client may not serialize undefined value."); + } + else if (Array.isArray(input)) { + for (let i = input.length - 1; i >= 0; --i) { + encodeStack.push(input[i]); + } + encodeHeader(majorList, input.length); + continue; + } + else if (typeof input.byteLength === "number") { + ensureSpace(input.length * 2); + encodeHeader(majorUnstructuredByteString, input.length); + data.set(input, cursor); + cursor += input.byteLength; + continue; + } + else if (typeof input === "object") { + if (input[tagSymbol]) { + if ("tag" in input && "value" in input) { + encodeStack.push(input.value); + encodeHeader(majorTag, input.tag); + continue; + } + else { + throw new Error("tag encountered with missing fields, need 'tag' and 'value', found: " + JSON.stringify(input)); + } + } + const keys = Object.keys(input); + for (let i = keys.length - 1; i >= 0; --i) { + const key = keys[i]; + encodeStack.push(input[key]); + encodeStack.push(key); + } + encodeHeader(majorMap, keys.length); + continue; + } + throw new Error(`data type ${input?.constructor?.name ?? typeof input} not compatible for encoding.`); + } +} diff --git a/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-types.js b/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-types.js new file mode 100644 index 00000000..a720eb74 --- /dev/null +++ b/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-types.js @@ -0,0 +1,25 @@ +export const majorUint64 = 0; +export const majorNegativeInt64 = 1; +export const majorUnstructuredByteString = 2; +export const majorUtf8String = 3; +export const majorList = 4; +export const majorMap = 5; +export const majorTag = 6; +export const majorSpecial = 7; +export const specialFalse = 20; +export const specialTrue = 21; +export const specialNull = 22; +export const specialUndefined = 23; +export const extendedOneByte = 24; +export const extendedFloat16 = 25; +export const extendedFloat32 = 26; +export const extendedFloat64 = 27; +export const minorIndefinite = 31; +export function alloc(size) { + return typeof Buffer !== "undefined" ? Buffer.alloc(size) : new Uint8Array(size); +} +export const tagSymbol = Symbol("@smithy/core/cbor::tagSymbol"); +export function tag(data) { + data[tagSymbol] = true; + return data; +} diff --git a/node_modules/@smithy/core/dist-es/submodules/cbor/cbor.js b/node_modules/@smithy/core/dist-es/submodules/cbor/cbor.js new file mode 100644 index 00000000..8df975fc --- /dev/null +++ b/node_modules/@smithy/core/dist-es/submodules/cbor/cbor.js @@ -0,0 +1,21 @@ +import { decode, setPayload } from "./cbor-decode"; +import { encode, resize, toUint8Array } from "./cbor-encode"; +export const cbor = { + deserialize(payload) { + setPayload(payload); + return decode(0, payload.length); + }, + serialize(input) { + try { + encode(input); + return toUint8Array(); + } + catch (e) { + toUint8Array(); + throw e; + } + }, + resizeEncodingBuffer(size) { + resize(size); + }, +}; diff --git a/node_modules/@smithy/core/dist-es/submodules/cbor/index.js b/node_modules/@smithy/core/dist-es/submodules/cbor/index.js new file mode 100644 index 00000000..0910d274 --- /dev/null +++ b/node_modules/@smithy/core/dist-es/submodules/cbor/index.js @@ -0,0 +1,3 @@ +export { cbor } from "./cbor"; +export * from "./parseCborBody"; +export { tagSymbol, tag } from "./cbor-types"; diff --git a/node_modules/@smithy/core/dist-es/submodules/cbor/parseCborBody.js b/node_modules/@smithy/core/dist-es/submodules/cbor/parseCborBody.js new file mode 100644 index 00000000..09f522ed --- /dev/null +++ b/node_modules/@smithy/core/dist-es/submodules/cbor/parseCborBody.js @@ -0,0 +1,83 @@ +import { collectBody } from "@smithy/core/protocols"; +import { HttpRequest as __HttpRequest } from "@smithy/protocol-http"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { cbor } from "./cbor"; +import { tag, tagSymbol } from "./cbor-types"; +export const parseCborBody = (streamBody, context) => { + return collectBody(streamBody, context).then(async (bytes) => { + if (bytes.length) { + try { + return cbor.deserialize(bytes); + } + catch (e) { + Object.defineProperty(e, "$responseBodyText", { + value: context.utf8Encoder(bytes), + }); + throw e; + } + } + return {}; + }); +}; +export const dateToTag = (date) => { + return tag({ + tag: 1, + value: date.getTime() / 1000, + }); +}; +export const parseCborErrorBody = async (errorBody, context) => { + const value = await parseCborBody(errorBody, context); + value.message = value.message ?? value.Message; + return value; +}; +export const loadSmithyRpcV2CborErrorCode = (output, data) => { + const sanitizeErrorCode = (rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + if (data["__type"] !== undefined) { + return sanitizeErrorCode(data["__type"]); + } + if (data.code !== undefined) { + return sanitizeErrorCode(data.code); + } +}; +export const checkCborResponse = (response) => { + if (String(response.headers["smithy-protocol"]).toLowerCase() !== "rpc-v2-cbor") { + throw new Error("Malformed RPCv2 CBOR response, status: " + response.statusCode); + } +}; +export const buildHttpRpcRequest = async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers, + }; + if (resolvedHostname !== undefined) { + contents.hostname = resolvedHostname; + } + if (body !== undefined) { + contents.body = body; + try { + contents.headers["content-length"] = String(calculateBodyLength(body)); + } + catch (e) { } + } + return new __HttpRequest(contents); +}; diff --git a/node_modules/@smithy/core/dist-es/submodules/protocols/collect-stream-body.js b/node_modules/@smithy/core/dist-es/submodules/protocols/collect-stream-body.js new file mode 100644 index 00000000..b6a5c0ba --- /dev/null +++ b/node_modules/@smithy/core/dist-es/submodules/protocols/collect-stream-body.js @@ -0,0 +1,11 @@ +import { Uint8ArrayBlobAdapter } from "@smithy/util-stream"; +export const collectBody = async (streamBody = new Uint8Array(), context) => { + if (streamBody instanceof Uint8Array) { + return Uint8ArrayBlobAdapter.mutate(streamBody); + } + if (!streamBody) { + return Uint8ArrayBlobAdapter.mutate(new Uint8Array()); + } + const fromContext = context.streamCollector(streamBody); + return Uint8ArrayBlobAdapter.mutate(await fromContext); +}; diff --git a/node_modules/@smithy/core/dist-es/submodules/protocols/extended-encode-uri-component.js b/node_modules/@smithy/core/dist-es/submodules/protocols/extended-encode-uri-component.js new file mode 100644 index 00000000..5baeaf56 --- /dev/null +++ b/node_modules/@smithy/core/dist-es/submodules/protocols/extended-encode-uri-component.js @@ -0,0 +1,5 @@ +export function extendedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} diff --git a/node_modules/@smithy/core/dist-es/submodules/protocols/index.js b/node_modules/@smithy/core/dist-es/submodules/protocols/index.js new file mode 100644 index 00000000..a5de22f1 --- /dev/null +++ b/node_modules/@smithy/core/dist-es/submodules/protocols/index.js @@ -0,0 +1,4 @@ +export * from "./collect-stream-body"; +export * from "./extended-encode-uri-component"; +export * from "./requestBuilder"; +export * from "./resolve-path"; diff --git a/node_modules/@smithy/core/dist-es/submodules/protocols/requestBuilder.js b/node_modules/@smithy/core/dist-es/submodules/protocols/requestBuilder.js new file mode 100644 index 00000000..3391ef2f --- /dev/null +++ b/node_modules/@smithy/core/dist-es/submodules/protocols/requestBuilder.js @@ -0,0 +1,67 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { resolvedPath } from "./resolve-path"; +export function requestBuilder(input, context) { + return new RequestBuilder(input, context); +} +export class RequestBuilder { + constructor(input, context) { + this.input = input; + this.context = context; + this.query = {}; + this.method = ""; + this.headers = {}; + this.path = ""; + this.body = null; + this.hostname = ""; + this.resolvePathStack = []; + } + async build() { + const { hostname, protocol = "https", port, path: basePath } = await this.context.endpoint(); + this.path = basePath; + for (const resolvePath of this.resolvePathStack) { + resolvePath(this.path); + } + return new HttpRequest({ + protocol, + hostname: this.hostname || hostname, + port, + method: this.method, + path: this.path, + query: this.query, + body: this.body, + headers: this.headers, + }); + } + hn(hostname) { + this.hostname = hostname; + return this; + } + bp(uriLabel) { + this.resolvePathStack.push((basePath) => { + this.path = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + uriLabel; + }); + return this; + } + p(memberName, labelValueProvider, uriLabel, isGreedyLabel) { + this.resolvePathStack.push((path) => { + this.path = resolvedPath(path, this.input, memberName, labelValueProvider, uriLabel, isGreedyLabel); + }); + return this; + } + h(headers) { + this.headers = headers; + return this; + } + q(query) { + this.query = query; + return this; + } + b(body) { + this.body = body; + return this; + } + m(method) { + this.method = method; + return this; + } +} diff --git a/node_modules/@smithy/core/dist-es/submodules/protocols/resolve-path.js b/node_modules/@smithy/core/dist-es/submodules/protocols/resolve-path.js new file mode 100644 index 00000000..8483e014 --- /dev/null +++ b/node_modules/@smithy/core/dist-es/submodules/protocols/resolve-path.js @@ -0,0 +1,19 @@ +import { extendedEncodeURIComponent } from "./extended-encode-uri-component"; +export const resolvedPath = (resolvedPath, input, memberName, labelValueProvider, uriLabel, isGreedyLabel) => { + if (input != null && input[memberName] !== undefined) { + const labelValue = labelValueProvider(); + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: " + memberName + "."); + } + resolvedPath = resolvedPath.replace(uriLabel, isGreedyLabel + ? labelValue + .split("/") + .map((segment) => extendedEncodeURIComponent(segment)) + .join("/") + : extendedEncodeURIComponent(labelValue)); + } + else { + throw new Error("No value provided for input HTTP label: " + memberName + "."); + } + return resolvedPath; +}; diff --git a/node_modules/@smithy/core/dist-es/submodules/serde/index.js b/node_modules/@smithy/core/dist-es/submodules/serde/index.js new file mode 100644 index 00000000..a70d0dda --- /dev/null +++ b/node_modules/@smithy/core/dist-es/submodules/serde/index.js @@ -0,0 +1 @@ +export * from "./value/NumericValue"; diff --git a/node_modules/@smithy/core/dist-es/submodules/serde/value/NumericValue.js b/node_modules/@smithy/core/dist-es/submodules/serde/value/NumericValue.js new file mode 100644 index 00000000..6af270fa --- /dev/null +++ b/node_modules/@smithy/core/dist-es/submodules/serde/value/NumericValue.js @@ -0,0 +1,9 @@ +export class NumericValue { + constructor(string, type) { + this.string = string; + this.type = type; + } +} +export function nv(string) { + return new NumericValue(string, "bigDecimal"); +} diff --git a/node_modules/@smithy/core/dist-es/util-identity-and-auth/DefaultIdentityProviderConfig.js b/node_modules/@smithy/core/dist-es/util-identity-and-auth/DefaultIdentityProviderConfig.js new file mode 100644 index 00000000..3bc10161 --- /dev/null +++ b/node_modules/@smithy/core/dist-es/util-identity-and-auth/DefaultIdentityProviderConfig.js @@ -0,0 +1,13 @@ +export class DefaultIdentityProviderConfig { + constructor(config) { + this.authSchemes = new Map(); + for (const [key, value] of Object.entries(config)) { + if (value !== undefined) { + this.authSchemes.set(key, value); + } + } + } + getIdentityProvider(schemeId) { + return this.authSchemes.get(schemeId); + } +} diff --git a/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js b/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js new file mode 100644 index 00000000..8b6f598c --- /dev/null +++ b/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js @@ -0,0 +1,34 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpApiKeyAuthLocation } from "@smithy/types"; +export class HttpApiKeyAuthSigner { + async sign(httpRequest, identity, signingProperties) { + if (!signingProperties) { + throw new Error("request could not be signed with `apiKey` since the `name` and `in` signer properties are missing"); + } + if (!signingProperties.name) { + throw new Error("request could not be signed with `apiKey` since the `name` signer property is missing"); + } + if (!signingProperties.in) { + throw new Error("request could not be signed with `apiKey` since the `in` signer property is missing"); + } + if (!identity.apiKey) { + throw new Error("request could not be signed with `apiKey` since the `apiKey` is not defined"); + } + const clonedRequest = HttpRequest.clone(httpRequest); + if (signingProperties.in === HttpApiKeyAuthLocation.QUERY) { + clonedRequest.query[signingProperties.name] = identity.apiKey; + } + else if (signingProperties.in === HttpApiKeyAuthLocation.HEADER) { + clonedRequest.headers[signingProperties.name] = signingProperties.scheme + ? `${signingProperties.scheme} ${identity.apiKey}` + : identity.apiKey; + } + else { + throw new Error("request can only be signed with `apiKey` locations `query` or `header`, " + + "but found: `" + + signingProperties.in + + "`"); + } + return clonedRequest; + } +} diff --git a/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js b/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js new file mode 100644 index 00000000..b92a9c35 --- /dev/null +++ b/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js @@ -0,0 +1,11 @@ +import { HttpRequest } from "@smithy/protocol-http"; +export class HttpBearerAuthSigner { + async sign(httpRequest, identity, signingProperties) { + const clonedRequest = HttpRequest.clone(httpRequest); + if (!identity.token) { + throw new Error("request could not be signed with `token` since the `token` is not defined"); + } + clonedRequest.headers["Authorization"] = `Bearer ${identity.token}`; + return clonedRequest; + } +} diff --git a/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/index.js b/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/index.js new file mode 100644 index 00000000..9d240feb --- /dev/null +++ b/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/index.js @@ -0,0 +1,3 @@ +export * from "./httpApiKeyAuth"; +export * from "./httpBearerAuth"; +export * from "./noAuth"; diff --git a/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/noAuth.js b/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/noAuth.js new file mode 100644 index 00000000..356193d1 --- /dev/null +++ b/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/noAuth.js @@ -0,0 +1,5 @@ +export class NoAuthSigner { + async sign(httpRequest, identity, signingProperties) { + return httpRequest; + } +} diff --git a/node_modules/@smithy/core/dist-es/util-identity-and-auth/index.js b/node_modules/@smithy/core/dist-es/util-identity-and-auth/index.js new file mode 100644 index 00000000..87ba64ba --- /dev/null +++ b/node_modules/@smithy/core/dist-es/util-identity-and-auth/index.js @@ -0,0 +1,3 @@ +export * from "./DefaultIdentityProviderConfig"; +export * from "./httpAuthSchemes"; +export * from "./memoizeIdentityProvider"; diff --git a/node_modules/@smithy/core/dist-es/util-identity-and-auth/memoizeIdentityProvider.js b/node_modules/@smithy/core/dist-es/util-identity-and-auth/memoizeIdentityProvider.js new file mode 100644 index 00000000..80505850 --- /dev/null +++ b/node_modules/@smithy/core/dist-es/util-identity-and-auth/memoizeIdentityProvider.js @@ -0,0 +1,53 @@ +export const createIsIdentityExpiredFunction = (expirationMs) => (identity) => doesIdentityRequireRefresh(identity) && identity.expiration.getTime() - Date.now() < expirationMs; +export const EXPIRATION_MS = 300000; +export const isIdentityExpired = createIsIdentityExpiredFunction(EXPIRATION_MS); +export const doesIdentityRequireRefresh = (identity) => identity.expiration !== undefined; +export const memoizeIdentityProvider = (provider, isExpired, requiresRefresh) => { + if (provider === undefined) { + return undefined; + } + const normalizedProvider = typeof provider !== "function" ? async () => Promise.resolve(provider) : provider; + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = async (options) => { + if (!pending) { + pending = normalizedProvider(options); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } + finally { + pending = undefined; + } + return resolved; + }; + if (isExpired === undefined) { + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(options); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(options); + } + if (isConstant) { + return resolved; + } + if (!requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(options); + return resolved; + } + return resolved; + }; +}; diff --git a/node_modules/@smithy/core/dist-types/getSmithyContext.d.ts b/node_modules/@smithy/core/dist-types/getSmithyContext.d.ts new file mode 100644 index 00000000..523ee47d --- /dev/null +++ b/node_modules/@smithy/core/dist-types/getSmithyContext.d.ts @@ -0,0 +1,5 @@ +import { HandlerExecutionContext } from "@smithy/types"; +/** + * @internal + */ +export declare const getSmithyContext: (context: HandlerExecutionContext) => Record; diff --git a/node_modules/@smithy/core/dist-types/index.d.ts b/node_modules/@smithy/core/dist-types/index.d.ts new file mode 100644 index 00000000..1dcdba11 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/index.d.ts @@ -0,0 +1,8 @@ +export * from "./getSmithyContext"; +export * from "./middleware-http-auth-scheme"; +export * from "./middleware-http-signing"; +export * from "./normalizeProvider"; +export { createPaginator } from "./pagination/createPaginator"; +export * from "./protocols/requestBuilder"; +export * from "./setFeature"; +export * from "./util-identity-and-auth"; diff --git a/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts b/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts new file mode 100644 index 00000000..996b0dea --- /dev/null +++ b/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts @@ -0,0 +1,18 @@ +import { HandlerExecutionContext, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, IdentityProviderConfig, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { PreviouslyResolved } from "./httpAuthSchemeMiddleware"; +/** + * @internal + */ +export declare const httpAuthSchemeEndpointRuleSetMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +interface HttpAuthSchemeEndpointRuleSetPluginOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +export declare const getHttpAuthSchemeEndpointRuleSetPlugin: (config: TConfig & PreviouslyResolved, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }: HttpAuthSchemeEndpointRuleSetPluginOptions) => Pluggable; +export {}; diff --git a/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts b/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts new file mode 100644 index 00000000..2e57733e --- /dev/null +++ b/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts @@ -0,0 +1,18 @@ +import { HandlerExecutionContext, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, IdentityProviderConfig, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { PreviouslyResolved } from "./httpAuthSchemeMiddleware"; +/** + * @internal + */ +export declare const httpAuthSchemeMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +interface HttpAuthSchemePluginOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +export declare const getHttpAuthSchemePlugin: (config: TConfig & PreviouslyResolved, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }: HttpAuthSchemePluginOptions) => Pluggable; +export {}; diff --git a/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts b/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts new file mode 100644 index 00000000..50f1ea8c --- /dev/null +++ b/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts @@ -0,0 +1,33 @@ +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, IdentityProviderConfig, Provider, SelectedHttpAuthScheme, SerializeMiddleware, SMITHY_CONTEXT_KEY } from "@smithy/types"; +/** + * @internal + */ +export interface PreviouslyResolved { + authSchemePreference?: Provider; + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: HttpAuthSchemeProvider; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareSmithyContext extends Record { + selectedHttpAuthScheme?: SelectedHttpAuthScheme; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareHandlerExecutionContext extends HandlerExecutionContext { + [SMITHY_CONTEXT_KEY]?: HttpAuthSchemeMiddlewareSmithyContext; +} +/** + * @internal + */ +export declare const httpAuthSchemeMiddleware: (config: TConfig & PreviouslyResolved, mwOptions: HttpAuthSchemeMiddlewareOptions) => SerializeMiddleware; +export {}; diff --git a/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/index.d.ts b/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/index.d.ts new file mode 100644 index 00000000..5042e7dc --- /dev/null +++ b/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/index.d.ts @@ -0,0 +1,3 @@ +export * from "./httpAuthSchemeMiddleware"; +export * from "./getHttpAuthSchemeEndpointRuleSetPlugin"; +export * from "./getHttpAuthSchemePlugin"; diff --git a/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/resolveAuthOptions.d.ts b/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/resolveAuthOptions.d.ts new file mode 100644 index 00000000..52fc604d --- /dev/null +++ b/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/resolveAuthOptions.d.ts @@ -0,0 +1,10 @@ +import { HttpAuthOption } from "@smithy/types"; +/** + * Resolves list of auth options based on the supported ones, vs the preference list. + * + * @param candidateAuthOptions list of supported auth options selected by the standard + * resolution process (model-based, endpoints 2.0, etc.) + * @param authSchemePreference list of auth schemes preferred by user. + * @returns + */ +export declare const resolveAuthOptions: (candidateAuthOptions: HttpAuthOption[], authSchemePreference: string[]) => HttpAuthOption[]; diff --git a/node_modules/@smithy/core/dist-types/middleware-http-signing/getHttpSigningMiddleware.d.ts b/node_modules/@smithy/core/dist-types/middleware-http-signing/getHttpSigningMiddleware.d.ts new file mode 100644 index 00000000..56c89a21 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/middleware-http-signing/getHttpSigningMiddleware.d.ts @@ -0,0 +1,9 @@ +import { FinalizeRequestHandlerOptions, Pluggable, RelativeMiddlewareOptions } from "@smithy/types"; +/** + * @internal + */ +export declare const httpSigningMiddlewareOptions: FinalizeRequestHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getHttpSigningPlugin: (config: object) => Pluggable; diff --git a/node_modules/@smithy/core/dist-types/middleware-http-signing/httpSigningMiddleware.d.ts b/node_modules/@smithy/core/dist-types/middleware-http-signing/httpSigningMiddleware.d.ts new file mode 100644 index 00000000..3b436117 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/middleware-http-signing/httpSigningMiddleware.d.ts @@ -0,0 +1,5 @@ +import { FinalizeRequestMiddleware } from "@smithy/types"; +/** + * @internal + */ +export declare const httpSigningMiddleware: (config: object) => FinalizeRequestMiddleware; diff --git a/node_modules/@smithy/core/dist-types/middleware-http-signing/index.d.ts b/node_modules/@smithy/core/dist-types/middleware-http-signing/index.d.ts new file mode 100644 index 00000000..7bc6cfe1 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/middleware-http-signing/index.d.ts @@ -0,0 +1,2 @@ +export * from "./httpSigningMiddleware"; +export * from "./getHttpSigningMiddleware"; diff --git a/node_modules/@smithy/core/dist-types/normalizeProvider.d.ts b/node_modules/@smithy/core/dist-types/normalizeProvider.d.ts new file mode 100644 index 00000000..4fe2d9ad --- /dev/null +++ b/node_modules/@smithy/core/dist-types/normalizeProvider.d.ts @@ -0,0 +1,7 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * @returns a provider function for the input value if it isn't already one. + */ +export declare const normalizeProvider: (input: T | Provider) => Provider; diff --git a/node_modules/@smithy/core/dist-types/pagination/createPaginator.d.ts b/node_modules/@smithy/core/dist-types/pagination/createPaginator.d.ts new file mode 100644 index 00000000..78fcbe05 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/pagination/createPaginator.d.ts @@ -0,0 +1,7 @@ +import type { PaginationConfiguration, Paginator } from "@smithy/types"; +/** + * @internal + * + * Creates a paginator. + */ +export declare function createPaginator(ClientCtor: any, CommandCtor: any, inputTokenName: string, outputTokenName: string, pageSizeTokenName?: string): (config: PaginationConfigType, input: InputType, ...additionalArguments: any[]) => Paginator; diff --git a/node_modules/@smithy/core/dist-types/protocols/requestBuilder.d.ts b/node_modules/@smithy/core/dist-types/protocols/requestBuilder.d.ts new file mode 100644 index 00000000..8e2f2eff --- /dev/null +++ b/node_modules/@smithy/core/dist-types/protocols/requestBuilder.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { requestBuilder } from "@smithy/core/protocols"; diff --git a/node_modules/@smithy/core/dist-types/setFeature.d.ts b/node_modules/@smithy/core/dist-types/setFeature.d.ts new file mode 100644 index 00000000..279106c1 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/setFeature.d.ts @@ -0,0 +1,12 @@ +import type { HandlerExecutionContext, SmithyFeatures } from "@smithy/types"; +/** + * @internal + * Indicates to the request context that a given feature is active. + * + * @param context - handler execution context. + * @param feature - readable name of feature. + * @param value - encoding value of feature. This is required because the + * specification asks the library not to include a runtime lookup of all + * the feature identifiers. + */ +export declare function setFeature(context: HandlerExecutionContext, feature: F, value: SmithyFeatures[F]): void; diff --git a/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-decode.d.ts b/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-decode.d.ts new file mode 100644 index 00000000..baf39613 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-decode.d.ts @@ -0,0 +1,17 @@ +import { CborValueType, Float32, Uint8, Uint32 } from "./cbor-types"; +/** + * @internal + * @param bytes - to be set as the decode source. + * + * Sets the decode bytearray source and its data view. + */ +export declare function setPayload(bytes: Uint8Array): void; +/** + * @internal + * Decodes the data between the two indices. + */ +export declare function decode(at: Uint32, to: Uint32): CborValueType; +/** + * @internal + */ +export declare function bytesToFloat16(a: Uint8, b: Uint8): Float32; diff --git a/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-encode.d.ts b/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-encode.d.ts new file mode 100644 index 00000000..bfc3328b --- /dev/null +++ b/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-encode.d.ts @@ -0,0 +1,9 @@ +/** + * @internal + */ +export declare function toUint8Array(): Uint8Array; +export declare function resize(size: number): void; +/** + * @param _input - JS data object. + */ +export declare function encode(_input: any): void; diff --git a/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-types.d.ts b/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-types.d.ts new file mode 100644 index 00000000..dd413380 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-types.d.ts @@ -0,0 +1,64 @@ +/// +/// +export type CborItemType = undefined | boolean | number | bigint | [CborUnstructuredByteStringType, Uint64] | string | CborTagType; +export type CborTagType = { + tag: Uint64 | number; + value: CborValueType; + [tagSymbol]: true; +}; +export type CborUnstructuredByteStringType = Uint8Array; +export type CborListType = Array; +export type CborMapType = Record; +export type CborCollectionType = CborMapType | CborListType; +export type CborValueType = CborItemType | CborCollectionType | any; +export type CborArgumentLength = 1 | 2 | 4 | 8; +export type CborArgumentLengthOffset = 1 | 2 | 3 | 5 | 9; +export type CborOffset = number; +export type Uint8 = number; +export type Uint32 = number; +export type Uint64 = bigint; +export type Float32 = number; +export type Int64 = bigint; +export type Float16Binary = number; +export type Float32Binary = number; +export type CborMajorType = typeof majorUint64 | typeof majorNegativeInt64 | typeof majorUnstructuredByteString | typeof majorUtf8String | typeof majorList | typeof majorMap | typeof majorTag | typeof majorSpecial; +export declare const majorUint64 = 0; +export declare const majorNegativeInt64 = 1; +export declare const majorUnstructuredByteString = 2; +export declare const majorUtf8String = 3; +export declare const majorList = 4; +export declare const majorMap = 5; +export declare const majorTag = 6; +export declare const majorSpecial = 7; +export declare const specialFalse = 20; +export declare const specialTrue = 21; +export declare const specialNull = 22; +export declare const specialUndefined = 23; +export declare const extendedOneByte = 24; +export declare const extendedFloat16 = 25; +export declare const extendedFloat32 = 26; +export declare const extendedFloat64 = 27; +export declare const minorIndefinite = 31; +export declare function alloc(size: number): Uint8Array | Buffer; +/** + * @public + * + * The presence of this symbol as an object key indicates it should be considered a tag + * for CBOR serialization purposes. + * + * The object must also have the properties "tag" and "value". + */ +export declare const tagSymbol: unique symbol; +/** + * @public + * Applies the tag symbol to the object. + */ +export declare function tag(data: { + tag: number | bigint; + value: any; + [tagSymbol]?: true; +}): { + tag: number | bigint; + value: any; + [tagSymbol]: true; +}; diff --git a/node_modules/@smithy/core/dist-types/submodules/cbor/cbor.d.ts b/node_modules/@smithy/core/dist-types/submodules/cbor/cbor.d.ts new file mode 100644 index 00000000..7577213e --- /dev/null +++ b/node_modules/@smithy/core/dist-types/submodules/cbor/cbor.d.ts @@ -0,0 +1,26 @@ +/** + * This implementation is synchronous and only implements the parts of CBOR + * specification used by Smithy RPCv2 CBOR protocol. + * + * This cbor serde implementation is derived from AWS SDK for Go's implementation. + * @see https://github.com/aws/smithy-go/tree/main/encoding/cbor + * + * The cbor-x implementation was also instructional: + * @see https://github.com/kriszyp/cbor-x + */ +export declare const cbor: { + deserialize(payload: Uint8Array): any; + serialize(input: any): Uint8Array; + /** + * @public + * @param size - byte length to allocate. + * + * This may be used to garbage collect the CBOR + * shared encoding buffer space, + * e.g. resizeEncodingBuffer(0); + * + * This may also be used to pre-allocate more space for + * CBOR encoding, e.g. resizeEncodingBuffer(100_000_000); + */ + resizeEncodingBuffer(size: number): void; +}; diff --git a/node_modules/@smithy/core/dist-types/submodules/cbor/index.d.ts b/node_modules/@smithy/core/dist-types/submodules/cbor/index.d.ts new file mode 100644 index 00000000..0910d274 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/submodules/cbor/index.d.ts @@ -0,0 +1,3 @@ +export { cbor } from "./cbor"; +export * from "./parseCborBody"; +export { tagSymbol, tag } from "./cbor-types"; diff --git a/node_modules/@smithy/core/dist-types/submodules/cbor/parseCborBody.d.ts b/node_modules/@smithy/core/dist-types/submodules/cbor/parseCborBody.d.ts new file mode 100644 index 00000000..8811679c --- /dev/null +++ b/node_modules/@smithy/core/dist-types/submodules/cbor/parseCborBody.d.ts @@ -0,0 +1,31 @@ +import { HttpRequest as __HttpRequest } from "@smithy/protocol-http"; +import { HeaderBag as __HeaderBag, HttpResponse, SerdeContext as __SerdeContext, SerdeContext } from "@smithy/types"; +import { tag, tagSymbol } from "./cbor-types"; +/** + * @internal + */ +export declare const parseCborBody: (streamBody: any, context: SerdeContext) => any; +/** + * @internal + */ +export declare const dateToTag: (date: Date) => { + tag: number | bigint; + value: any; + [tagSymbol]: true; +}; +/** + * @internal + */ +export declare const parseCborErrorBody: (errorBody: any, context: SerdeContext) => Promise; +/** + * @internal + */ +export declare const loadSmithyRpcV2CborErrorCode: (output: HttpResponse, data: any) => string | undefined; +/** + * @internal + */ +export declare const checkCborResponse: (response: HttpResponse) => void; +/** + * @internal + */ +export declare const buildHttpRpcRequest: (context: __SerdeContext, headers: __HeaderBag, path: string, resolvedHostname: string | undefined, body: any) => Promise<__HttpRequest>; diff --git a/node_modules/@smithy/core/dist-types/submodules/protocols/collect-stream-body.d.ts b/node_modules/@smithy/core/dist-types/submodules/protocols/collect-stream-body.d.ts new file mode 100644 index 00000000..b555804b --- /dev/null +++ b/node_modules/@smithy/core/dist-types/submodules/protocols/collect-stream-body.d.ts @@ -0,0 +1,10 @@ +import { SerdeContext } from "@smithy/types"; +import { Uint8ArrayBlobAdapter } from "@smithy/util-stream"; +/** + * @internal + * + * Collect low-level response body stream to Uint8Array. + */ +export declare const collectBody: (streamBody: any, context: { + streamCollector: SerdeContext["streamCollector"]; +}) => Promise; diff --git a/node_modules/@smithy/core/dist-types/submodules/protocols/extended-encode-uri-component.d.ts b/node_modules/@smithy/core/dist-types/submodules/protocols/extended-encode-uri-component.d.ts new file mode 100644 index 00000000..403e9ae4 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/submodules/protocols/extended-encode-uri-component.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Function that wraps encodeURIComponent to encode additional characters + * to fully adhere to RFC 3986. + */ +export declare function extendedEncodeURIComponent(str: string): string; diff --git a/node_modules/@smithy/core/dist-types/submodules/protocols/index.d.ts b/node_modules/@smithy/core/dist-types/submodules/protocols/index.d.ts new file mode 100644 index 00000000..a5de22f1 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/submodules/protocols/index.d.ts @@ -0,0 +1,4 @@ +export * from "./collect-stream-body"; +export * from "./extended-encode-uri-component"; +export * from "./requestBuilder"; +export * from "./resolve-path"; diff --git a/node_modules/@smithy/core/dist-types/submodules/protocols/requestBuilder.d.ts b/node_modules/@smithy/core/dist-types/submodules/protocols/requestBuilder.d.ts new file mode 100644 index 00000000..3013d8a1 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/submodules/protocols/requestBuilder.d.ts @@ -0,0 +1,51 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import type { SerdeContext } from "@smithy/types"; +/** + * @internal + * used in code-generated serde. + */ +export declare function requestBuilder(input: any, context: SerdeContext): RequestBuilder; +/** + * @internal + */ +export declare class RequestBuilder { + private input; + private context; + private query; + private method; + private headers; + private path; + private body; + private hostname; + private resolvePathStack; + constructor(input: any, context: SerdeContext); + build(): Promise; + /** + * Brevity setter for "hostname". + */ + hn(hostname: string): this; + /** + * Brevity initial builder for "basepath". + */ + bp(uriLabel: string): this; + /** + * Brevity incremental builder for "path". + */ + p(memberName: string, labelValueProvider: () => string | undefined, uriLabel: string, isGreedyLabel: boolean): this; + /** + * Brevity setter for "headers". + */ + h(headers: Record): this; + /** + * Brevity setter for "query". + */ + q(query: Record): this; + /** + * Brevity setter for "body". + */ + b(body: any): this; + /** + * Brevity setter for "method". + */ + m(method: string): this; +} diff --git a/node_modules/@smithy/core/dist-types/submodules/protocols/resolve-path.d.ts b/node_modules/@smithy/core/dist-types/submodules/protocols/resolve-path.d.ts new file mode 100644 index 00000000..03386d6b --- /dev/null +++ b/node_modules/@smithy/core/dist-types/submodules/protocols/resolve-path.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const resolvedPath: (resolvedPath: string, input: unknown, memberName: string, labelValueProvider: () => string | undefined, uriLabel: string, isGreedyLabel: boolean) => string; diff --git a/node_modules/@smithy/core/dist-types/submodules/serde/index.d.ts b/node_modules/@smithy/core/dist-types/submodules/serde/index.d.ts new file mode 100644 index 00000000..a70d0dda --- /dev/null +++ b/node_modules/@smithy/core/dist-types/submodules/serde/index.d.ts @@ -0,0 +1 @@ +export * from "./value/NumericValue"; diff --git a/node_modules/@smithy/core/dist-types/submodules/serde/value/NumericValue.d.ts b/node_modules/@smithy/core/dist-types/submodules/serde/value/NumericValue.d.ts new file mode 100644 index 00000000..c3736fcc --- /dev/null +++ b/node_modules/@smithy/core/dist-types/submodules/serde/value/NumericValue.d.ts @@ -0,0 +1,31 @@ +/** + * Types which may be represented by {@link NumericValue}. + * + * There is currently only one option, because BigInteger and Long should + * use JS BigInt directly, and all other numeric types can be contained in JS Number. + * + * @public + */ +export type NumericType = "bigDecimal"; +/** + * Serialization container for Smithy simple types that do not have a + * direct JavaScript runtime representation. + * + * This container does not perform numeric mathematical operations. + * It is a container for discerning a value's true type. + * + * It allows storage of numeric types not representable in JS without + * making a decision on what numeric library to use. + * + * @public + */ +export declare class NumericValue { + readonly string: string; + readonly type: NumericType; + constructor(string: string, type: NumericType); +} +/** + * Serde shortcut. + * @internal + */ +export declare function nv(string: string): NumericValue; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/getSmithyContext.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/getSmithyContext.d.ts new file mode 100644 index 00000000..14cd7c4e --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/getSmithyContext.d.ts @@ -0,0 +1,5 @@ +import { HandlerExecutionContext } from "@smithy/types"; +/** + * @internal + */ +export declare const getSmithyContext: (context: HandlerExecutionContext) => Record; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..347898d8 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +export * from "./getSmithyContext"; +export * from "./middleware-http-auth-scheme"; +export * from "./middleware-http-signing"; +export * from "./normalizeProvider"; +export { createPaginator } from "./pagination/createPaginator"; +export * from "./protocols/requestBuilder"; +export * from "./setFeature"; +export * from "./util-identity-and-auth"; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts new file mode 100644 index 00000000..27e2e26a --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts @@ -0,0 +1,18 @@ +import { HandlerExecutionContext, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, IdentityProviderConfig, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { PreviouslyResolved } from "./httpAuthSchemeMiddleware"; +/** + * @internal + */ +export declare const httpAuthSchemeEndpointRuleSetMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +interface HttpAuthSchemeEndpointRuleSetPluginOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +export declare const getHttpAuthSchemeEndpointRuleSetPlugin: (config: TConfig & PreviouslyResolved, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }: HttpAuthSchemeEndpointRuleSetPluginOptions) => Pluggable; +export {}; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts new file mode 100644 index 00000000..531e6ec1 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts @@ -0,0 +1,18 @@ +import { HandlerExecutionContext, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, IdentityProviderConfig, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { PreviouslyResolved } from "./httpAuthSchemeMiddleware"; +/** + * @internal + */ +export declare const httpAuthSchemeMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +interface HttpAuthSchemePluginOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +export declare const getHttpAuthSchemePlugin: (config: TConfig & PreviouslyResolved, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }: HttpAuthSchemePluginOptions) => Pluggable; +export {}; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts new file mode 100644 index 00000000..bbeaf5fc --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts @@ -0,0 +1,33 @@ +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, IdentityProviderConfig, Provider, SelectedHttpAuthScheme, SerializeMiddleware, SMITHY_CONTEXT_KEY } from "@smithy/types"; +/** + * @internal + */ +export interface PreviouslyResolved { + authSchemePreference?: Provider; + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: HttpAuthSchemeProvider; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareSmithyContext extends Record { + selectedHttpAuthScheme?: SelectedHttpAuthScheme; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareHandlerExecutionContext extends HandlerExecutionContext { + [SMITHY_CONTEXT_KEY]?: HttpAuthSchemeMiddlewareSmithyContext; +} +/** + * @internal + */ +export declare const httpAuthSchemeMiddleware: (config: TConfig & PreviouslyResolved, mwOptions: HttpAuthSchemeMiddlewareOptions) => SerializeMiddleware; +export {}; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/index.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/index.d.ts new file mode 100644 index 00000000..2f275c56 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/index.d.ts @@ -0,0 +1,3 @@ +export * from "./httpAuthSchemeMiddleware"; +export * from "./getHttpAuthSchemeEndpointRuleSetPlugin"; +export * from "./getHttpAuthSchemePlugin"; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/resolveAuthOptions.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/resolveAuthOptions.d.ts new file mode 100644 index 00000000..80886833 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/resolveAuthOptions.d.ts @@ -0,0 +1,10 @@ +import { HttpAuthOption } from "@smithy/types"; +/** + * Resolves list of auth options based on the supported ones, vs the preference list. + * + * @param candidateAuthOptions list of supported auth options selected by the standard + * resolution process (model-based, endpoints 2.0, etc.) + * @param authSchemePreference list of auth schemes preferred by user. + * @returns + */ +export declare const resolveAuthOptions: (candidateAuthOptions: HttpAuthOption[], authSchemePreference: string[]) => HttpAuthOption[]; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/getHttpSigningMiddleware.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/getHttpSigningMiddleware.d.ts new file mode 100644 index 00000000..a01bb311 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/getHttpSigningMiddleware.d.ts @@ -0,0 +1,9 @@ +import { FinalizeRequestHandlerOptions, Pluggable, RelativeMiddlewareOptions } from "@smithy/types"; +/** + * @internal + */ +export declare const httpSigningMiddlewareOptions: FinalizeRequestHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getHttpSigningPlugin: (config: object) => Pluggable; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/httpSigningMiddleware.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/httpSigningMiddleware.d.ts new file mode 100644 index 00000000..7a86b0b7 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/httpSigningMiddleware.d.ts @@ -0,0 +1,5 @@ +import { FinalizeRequestMiddleware } from "@smithy/types"; +/** + * @internal + */ +export declare const httpSigningMiddleware: (config: object) => FinalizeRequestMiddleware; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/index.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/index.d.ts new file mode 100644 index 00000000..578f26dd --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/index.d.ts @@ -0,0 +1,2 @@ +export * from "./httpSigningMiddleware"; +export * from "./getHttpSigningMiddleware"; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/normalizeProvider.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/normalizeProvider.d.ts new file mode 100644 index 00000000..594e8fa8 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/normalizeProvider.d.ts @@ -0,0 +1,7 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * @returns a provider function for the input value if it isn't already one. + */ +export declare const normalizeProvider: (input: T | Provider) => Provider; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/pagination/createPaginator.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/pagination/createPaginator.d.ts new file mode 100644 index 00000000..50400d80 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/pagination/createPaginator.d.ts @@ -0,0 +1,7 @@ +import { PaginationConfiguration, Paginator } from "@smithy/types"; +/** + * @internal + * + * Creates a paginator. + */ +export declare function createPaginator(ClientCtor: any, CommandCtor: any, inputTokenName: string, outputTokenName: string, pageSizeTokenName?: string): (config: PaginationConfigType, input: InputType, ...additionalArguments: any[]) => Paginator; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/protocols/requestBuilder.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/protocols/requestBuilder.d.ts new file mode 100644 index 00000000..25459a8d --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/protocols/requestBuilder.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { requestBuilder } from "@smithy/core/protocols"; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/setFeature.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/setFeature.d.ts new file mode 100644 index 00000000..a1995ab8 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/setFeature.d.ts @@ -0,0 +1,12 @@ +import { HandlerExecutionContext, SmithyFeatures } from "@smithy/types"; +/** + * @internal + * Indicates to the request context that a given feature is active. + * + * @param context - handler execution context. + * @param feature - readable name of feature. + * @param value - encoding value of feature. This is required because the + * specification asks the library not to include a runtime lookup of all + * the feature identifiers. + */ +export declare function setFeature(context: HandlerExecutionContext, feature: F, value: SmithyFeatures[F]): void; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-decode.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-decode.d.ts new file mode 100644 index 00000000..9ddc992c --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-decode.d.ts @@ -0,0 +1,17 @@ +import { CborValueType, Float32, Uint8, Uint32 } from "./cbor-types"; +/** + * @internal + * @param bytes - to be set as the decode source. + * + * Sets the decode bytearray source and its data view. + */ +export declare function setPayload(bytes: Uint8Array): void; +/** + * @internal + * Decodes the data between the two indices. + */ +export declare function decode(at: Uint32, to: Uint32): CborValueType; +/** + * @internal + */ +export declare function bytesToFloat16(a: Uint8, b: Uint8): Float32; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-encode.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-encode.d.ts new file mode 100644 index 00000000..83218b5a --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-encode.d.ts @@ -0,0 +1,9 @@ +/** + * @internal + */ +export declare function toUint8Array(): Uint8Array; +export declare function resize(size: number): void; +/** + * @param _input - JS data object. + */ +export declare function encode(_input: any): void; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-types.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-types.d.ts new file mode 100644 index 00000000..e37a6ac0 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-types.d.ts @@ -0,0 +1,66 @@ +/// +export type CborItemType = undefined | boolean | number | bigint | [ + CborUnstructuredByteStringType, + Uint64 +] | string | CborTagType; +export type CborTagType = { + tag: Uint64 | number; + value: CborValueType; + [tagSymbol]: true; +}; +export type CborUnstructuredByteStringType = Uint8Array; +export type CborListType = Array; +export type CborMapType = Record; +export type CborCollectionType = CborMapType | CborListType; +export type CborValueType = CborItemType | CborCollectionType | any; +export type CborArgumentLength = 1 | 2 | 4 | 8; +export type CborArgumentLengthOffset = 1 | 2 | 3 | 5 | 9; +export type CborOffset = number; +export type Uint8 = number; +export type Uint32 = number; +export type Uint64 = bigint; +export type Float32 = number; +export type Int64 = bigint; +export type Float16Binary = number; +export type Float32Binary = number; +export type CborMajorType = typeof majorUint64 | typeof majorNegativeInt64 | typeof majorUnstructuredByteString | typeof majorUtf8String | typeof majorList | typeof majorMap | typeof majorTag | typeof majorSpecial; +export declare const majorUint64 = 0; +export declare const majorNegativeInt64 = 1; +export declare const majorUnstructuredByteString = 2; +export declare const majorUtf8String = 3; +export declare const majorList = 4; +export declare const majorMap = 5; +export declare const majorTag = 6; +export declare const majorSpecial = 7; +export declare const specialFalse = 20; +export declare const specialTrue = 21; +export declare const specialNull = 22; +export declare const specialUndefined = 23; +export declare const extendedOneByte = 24; +export declare const extendedFloat16 = 25; +export declare const extendedFloat32 = 26; +export declare const extendedFloat64 = 27; +export declare const minorIndefinite = 31; +export declare function alloc(size: number): Uint8Array | Buffer; +/** + * @public + * + * The presence of this symbol as an object key indicates it should be considered a tag + * for CBOR serialization purposes. + * + * The object must also have the properties "tag" and "value". + */ +export declare const tagSymbol: unique symbol; +/** + * @public + * Applies the tag symbol to the object. + */ +export declare function tag(data: { + tag: number | bigint; + value: any; + [tagSymbol]?: true; +}): { + tag: number | bigint; + value: any; + [tagSymbol]: true; +}; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor.d.ts new file mode 100644 index 00000000..d317890a --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor.d.ts @@ -0,0 +1,26 @@ +/** + * This implementation is synchronous and only implements the parts of CBOR + * specification used by Smithy RPCv2 CBOR protocol. + * + * This cbor serde implementation is derived from AWS SDK for Go's implementation. + * @see https://github.com/aws/smithy-go/tree/main/encoding/cbor + * + * The cbor-x implementation was also instructional: + * @see https://github.com/kriszyp/cbor-x + */ +export declare const cbor: { + deserialize(payload: Uint8Array): any; + serialize(input: any): Uint8Array; + /** + * @public + * @param size - byte length to allocate. + * + * This may be used to garbage collect the CBOR + * shared encoding buffer space, + * e.g. resizeEncodingBuffer(0); + * + * This may also be used to pre-allocate more space for + * CBOR encoding, e.g. resizeEncodingBuffer(100_000_000); + */ + resizeEncodingBuffer(size: number): void; +}; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/index.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/index.d.ts new file mode 100644 index 00000000..63e2787a --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/index.d.ts @@ -0,0 +1,3 @@ +export { cbor } from "./cbor"; +export * from "./parseCborBody"; +export { tagSymbol, tag } from "./cbor-types"; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/parseCborBody.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/parseCborBody.d.ts new file mode 100644 index 00000000..90676a21 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/parseCborBody.d.ts @@ -0,0 +1,31 @@ +import { HttpRequest as __HttpRequest } from "@smithy/protocol-http"; +import { HeaderBag as __HeaderBag, HttpResponse, SerdeContext as __SerdeContext, SerdeContext } from "@smithy/types"; +import { tag, tagSymbol } from "./cbor-types"; +/** + * @internal + */ +export declare const parseCborBody: (streamBody: any, context: SerdeContext) => any; +/** + * @internal + */ +export declare const dateToTag: (date: Date) => { + tag: number | bigint; + value: any; + [tagSymbol]: true; +}; +/** + * @internal + */ +export declare const parseCborErrorBody: (errorBody: any, context: SerdeContext) => Promise; +/** + * @internal + */ +export declare const loadSmithyRpcV2CborErrorCode: (output: HttpResponse, data: any) => string | undefined; +/** + * @internal + */ +export declare const checkCborResponse: (response: HttpResponse) => void; +/** + * @internal + */ +export declare const buildHttpRpcRequest: (context: __SerdeContext, headers: __HeaderBag, path: string, resolvedHostname: string | undefined, body: any) => Promise<__HttpRequest>; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/collect-stream-body.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/collect-stream-body.d.ts new file mode 100644 index 00000000..9c5f4711 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/collect-stream-body.d.ts @@ -0,0 +1,10 @@ +import { SerdeContext } from "@smithy/types"; +import { Uint8ArrayBlobAdapter } from "@smithy/util-stream"; +/** + * @internal + * + * Collect low-level response body stream to Uint8Array. + */ +export declare const collectBody: (streamBody: any, context: { + streamCollector: SerdeContext["streamCollector"]; +}) => Promise; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/extended-encode-uri-component.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/extended-encode-uri-component.d.ts new file mode 100644 index 00000000..98c3802e --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/extended-encode-uri-component.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Function that wraps encodeURIComponent to encode additional characters + * to fully adhere to RFC 3986. + */ +export declare function extendedEncodeURIComponent(str: string): string; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/index.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/index.d.ts new file mode 100644 index 00000000..4ffc2902 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/index.d.ts @@ -0,0 +1,4 @@ +export * from "./collect-stream-body"; +export * from "./extended-encode-uri-component"; +export * from "./requestBuilder"; +export * from "./resolve-path"; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/requestBuilder.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/requestBuilder.d.ts new file mode 100644 index 00000000..0449354f --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/requestBuilder.d.ts @@ -0,0 +1,51 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { SerdeContext } from "@smithy/types"; +/** + * @internal + * used in code-generated serde. + */ +export declare function requestBuilder(input: any, context: SerdeContext): RequestBuilder; +/** + * @internal + */ +export declare class RequestBuilder { + private input; + private context; + private query; + private method; + private headers; + private path; + private body; + private hostname; + private resolvePathStack; + constructor(input: any, context: SerdeContext); + build(): Promise; + /** + * Brevity setter for "hostname". + */ + hn(hostname: string): this; + /** + * Brevity initial builder for "basepath". + */ + bp(uriLabel: string): this; + /** + * Brevity incremental builder for "path". + */ + p(memberName: string, labelValueProvider: () => string | undefined, uriLabel: string, isGreedyLabel: boolean): this; + /** + * Brevity setter for "headers". + */ + h(headers: Record): this; + /** + * Brevity setter for "query". + */ + q(query: Record): this; + /** + * Brevity setter for "body". + */ + b(body: any): this; + /** + * Brevity setter for "method". + */ + m(method: string): this; +} diff --git a/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/resolve-path.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/resolve-path.d.ts new file mode 100644 index 00000000..4c4c4430 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/resolve-path.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const resolvedPath: (resolvedPath: string, input: unknown, memberName: string, labelValueProvider: () => string | undefined, uriLabel: string, isGreedyLabel: boolean) => string; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/index.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/index.d.ts new file mode 100644 index 00000000..3e78075e --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/index.d.ts @@ -0,0 +1 @@ +export * from "./value/NumericValue"; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/value/NumericValue.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/value/NumericValue.d.ts new file mode 100644 index 00000000..00dd3b75 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/value/NumericValue.d.ts @@ -0,0 +1,31 @@ +/** + * Types which may be represented by {@link NumericValue}. + * + * There is currently only one option, because BigInteger and Long should + * use JS BigInt directly, and all other numeric types can be contained in JS Number. + * + * @public + */ +export type NumericType = "bigDecimal"; +/** + * Serialization container for Smithy simple types that do not have a + * direct JavaScript runtime representation. + * + * This container does not perform numeric mathematical operations. + * It is a container for discerning a value's true type. + * + * It allows storage of numeric types not representable in JS without + * making a decision on what numeric library to use. + * + * @public + */ +export declare class NumericValue { + readonly string: string; + readonly type: NumericType; + constructor(string: string, type: NumericType); +} +/** + * Serde shortcut. + * @internal + */ +export declare function nv(string: string): NumericValue; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts new file mode 100644 index 00000000..7e806593 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts @@ -0,0 +1,15 @@ +import { HttpAuthSchemeId, Identity, IdentityProvider, IdentityProviderConfig } from "@smithy/types"; +/** + * Default implementation of IdentityProviderConfig + * @internal + */ +export declare class DefaultIdentityProviderConfig implements IdentityProviderConfig { + private authSchemes; + /** + * Creates an IdentityProviderConfig with a record of scheme IDs to identity providers. + * + * @param config scheme IDs and identity providers to configure + */ + constructor(config: Record | undefined>); + getIdentityProvider(schemeId: HttpAuthSchemeId): IdentityProvider | undefined; +} diff --git a/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts new file mode 100644 index 00000000..3981a1be --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { ApiKeyIdentity, HttpRequest as IHttpRequest, HttpSigner } from "@smithy/types"; +/** + * @internal + */ +export declare class HttpApiKeyAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: ApiKeyIdentity, signingProperties: Record): Promise; +} diff --git a/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts new file mode 100644 index 00000000..9c83b1cf --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpRequest as IHttpRequest, HttpSigner, TokenIdentity } from "@smithy/types"; +/** + * @internal + */ +export declare class HttpBearerAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: TokenIdentity, signingProperties: Record): Promise; +} diff --git a/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/index.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/index.d.ts new file mode 100644 index 00000000..aa5caa8e --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/index.d.ts @@ -0,0 +1,3 @@ +export * from "./httpApiKeyAuth"; +export * from "./httpBearerAuth"; +export * from "./noAuth"; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts new file mode 100644 index 00000000..0d7b612e --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest, HttpSigner, Identity } from "@smithy/types"; +/** + * Signer for the synthetic @smithy.api#noAuth auth scheme. + * @internal + */ +export declare class NoAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: Identity, signingProperties: Record): Promise; +} diff --git a/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/index.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/index.d.ts new file mode 100644 index 00000000..626ade99 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/index.d.ts @@ -0,0 +1,3 @@ +export * from "./DefaultIdentityProviderConfig"; +export * from "./httpAuthSchemes"; +export * from "./memoizeIdentityProvider"; diff --git a/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/memoizeIdentityProvider.d.ts b/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/memoizeIdentityProvider.d.ts new file mode 100644 index 00000000..270aa711 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/memoizeIdentityProvider.d.ts @@ -0,0 +1,30 @@ +import { Identity, IdentityProvider } from "@smithy/types"; +/** + * @internal + */ +export declare const createIsIdentityExpiredFunction: (expirationMs: number) => (identity: Identity) => boolean; +/** + * @internal + * This may need to be configurable in the future, but for now it is defaulted to 5min. + */ +export declare const EXPIRATION_MS = 300000; +/** + * @internal + */ +export declare const isIdentityExpired: (identity: Identity) => boolean; +/** + * @internal + */ +export declare const doesIdentityRequireRefresh: (identity: Identity) => boolean; +/** + * @internal + */ +export interface MemoizedIdentityProvider { + (options?: Record & { + forceRefresh?: boolean; + }): Promise; +} +/** + * @internal + */ +export declare const memoizeIdentityProvider: (provider: IdentityT | IdentityProvider | undefined, isExpired: (resolved: Identity) => boolean, requiresRefresh: (resolved: Identity) => boolean) => MemoizedIdentityProvider | undefined; diff --git a/node_modules/@smithy/core/dist-types/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts b/node_modules/@smithy/core/dist-types/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts new file mode 100644 index 00000000..0b392042 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts @@ -0,0 +1,15 @@ +import { HttpAuthSchemeId, Identity, IdentityProvider, IdentityProviderConfig } from "@smithy/types"; +/** + * Default implementation of IdentityProviderConfig + * @internal + */ +export declare class DefaultIdentityProviderConfig implements IdentityProviderConfig { + private authSchemes; + /** + * Creates an IdentityProviderConfig with a record of scheme IDs to identity providers. + * + * @param config scheme IDs and identity providers to configure + */ + constructor(config: Record | undefined>); + getIdentityProvider(schemeId: HttpAuthSchemeId): IdentityProvider | undefined; +} diff --git a/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts b/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts new file mode 100644 index 00000000..63de4bc4 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { ApiKeyIdentity, HttpRequest as IHttpRequest, HttpSigner } from "@smithy/types"; +/** + * @internal + */ +export declare class HttpApiKeyAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: ApiKeyIdentity, signingProperties: Record): Promise; +} diff --git a/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts b/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts new file mode 100644 index 00000000..0e31e7df --- /dev/null +++ b/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpRequest as IHttpRequest, HttpSigner, TokenIdentity } from "@smithy/types"; +/** + * @internal + */ +export declare class HttpBearerAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: TokenIdentity, signingProperties: Record): Promise; +} diff --git a/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/index.d.ts b/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/index.d.ts new file mode 100644 index 00000000..9d240feb --- /dev/null +++ b/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/index.d.ts @@ -0,0 +1,3 @@ +export * from "./httpApiKeyAuth"; +export * from "./httpBearerAuth"; +export * from "./noAuth"; diff --git a/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts b/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts new file mode 100644 index 00000000..fc8d6b1c --- /dev/null +++ b/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest, HttpSigner, Identity } from "@smithy/types"; +/** + * Signer for the synthetic @smithy.api#noAuth auth scheme. + * @internal + */ +export declare class NoAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: Identity, signingProperties: Record): Promise; +} diff --git a/node_modules/@smithy/core/dist-types/util-identity-and-auth/index.d.ts b/node_modules/@smithy/core/dist-types/util-identity-and-auth/index.d.ts new file mode 100644 index 00000000..87ba64ba --- /dev/null +++ b/node_modules/@smithy/core/dist-types/util-identity-and-auth/index.d.ts @@ -0,0 +1,3 @@ +export * from "./DefaultIdentityProviderConfig"; +export * from "./httpAuthSchemes"; +export * from "./memoizeIdentityProvider"; diff --git a/node_modules/@smithy/core/dist-types/util-identity-and-auth/memoizeIdentityProvider.d.ts b/node_modules/@smithy/core/dist-types/util-identity-and-auth/memoizeIdentityProvider.d.ts new file mode 100644 index 00000000..67b3be83 --- /dev/null +++ b/node_modules/@smithy/core/dist-types/util-identity-and-auth/memoizeIdentityProvider.d.ts @@ -0,0 +1,30 @@ +import { Identity, IdentityProvider } from "@smithy/types"; +/** + * @internal + */ +export declare const createIsIdentityExpiredFunction: (expirationMs: number) => (identity: Identity) => boolean; +/** + * @internal + * This may need to be configurable in the future, but for now it is defaulted to 5min. + */ +export declare const EXPIRATION_MS = 300000; +/** + * @internal + */ +export declare const isIdentityExpired: (identity: Identity) => boolean; +/** + * @internal + */ +export declare const doesIdentityRequireRefresh: (identity: Identity) => boolean; +/** + * @internal + */ +export interface MemoizedIdentityProvider { + (options?: Record & { + forceRefresh?: boolean; + }): Promise; +} +/** + * @internal + */ +export declare const memoizeIdentityProvider: (provider: IdentityT | IdentityProvider | undefined, isExpired: (resolved: Identity) => boolean, requiresRefresh: (resolved: Identity) => boolean) => MemoizedIdentityProvider | undefined; diff --git a/node_modules/@smithy/core/package.json b/node_modules/@smithy/core/package.json new file mode 100644 index 00000000..638476ef --- /dev/null +++ b/node_modules/@smithy/core/package.json @@ -0,0 +1,112 @@ +{ + "name": "@smithy/core", + "version": "3.3.0", + "scripts": { + "build": "yarn lint && concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline core", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "npx eslint -c ../../.eslintrc.js \"src/**/*.ts\" --fix && node ./scripts/lint", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:cbor:perf": "node ./scripts/cbor-perf.mjs", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "exports": { + ".": { + "module": "./dist-es/index.js", + "node": "./dist-cjs/index.js", + "import": "./dist-es/index.js", + "require": "./dist-cjs/index.js", + "types": "./dist-types/index.d.ts" + }, + "./package.json": { + "module": "./package.json", + "node": "./package.json", + "import": "./package.json", + "require": "./package.json" + }, + "./cbor": { + "module": "./dist-es/submodules/cbor/index.js", + "node": "./dist-cjs/submodules/cbor/index.js", + "import": "./dist-es/submodules/cbor/index.js", + "require": "./dist-cjs/submodules/cbor/index.js", + "types": "./dist-types/submodules/cbor/index.d.ts" + }, + "./protocols": { + "module": "./dist-es/submodules/protocols/index.js", + "node": "./dist-cjs/submodules/protocols/index.js", + "import": "./dist-es/submodules/protocols/index.js", + "require": "./dist-cjs/submodules/protocols/index.js", + "types": "./dist-types/submodules/protocols/index.d.ts" + }, + "./serde": { + "module": "./dist-es/submodules/serde/index.js", + "node": "./dist-cjs/submodules/serde/index.js", + "import": "./dist-es/submodules/serde/index.js", + "require": "./dist-cjs/submodules/serde/index.js", + "types": "./dist-types/submodules/serde/index.d.ts" + } + }, + "author": { + "name": "AWS Smithy Team", + "email": "", + "url": "https://smithy.io" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/middleware-serde": "^4.0.3", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "./cbor.d.ts", + "./cbor.js", + "./protocols.d.ts", + "./protocols.js", + "./serde.d.ts", + "./serde.js", + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/core", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/core" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "json-bigint": "^1.0.0", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/core/protocols.d.ts b/node_modules/@smithy/core/protocols.d.ts new file mode 100644 index 00000000..e0afd4e8 --- /dev/null +++ b/node_modules/@smithy/core/protocols.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@smithy/core/protocols" { + export * from "@smithy/core/dist-types/submodules/protocols/index.d"; +} diff --git a/node_modules/@smithy/core/protocols.js b/node_modules/@smithy/core/protocols.js new file mode 100644 index 00000000..43e0c426 --- /dev/null +++ b/node_modules/@smithy/core/protocols.js @@ -0,0 +1,6 @@ + +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/protocols/index.js"); diff --git a/node_modules/@smithy/core/serde.d.ts b/node_modules/@smithy/core/serde.d.ts new file mode 100644 index 00000000..9906bb08 --- /dev/null +++ b/node_modules/@smithy/core/serde.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@smithy/core/serde" { + export * from "@smithy/core/dist-types/submodules/serde/index.d"; +} diff --git a/node_modules/@smithy/core/serde.js b/node_modules/@smithy/core/serde.js new file mode 100644 index 00000000..b2d727f8 --- /dev/null +++ b/node_modules/@smithy/core/serde.js @@ -0,0 +1,6 @@ + +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/serde/index.js"); diff --git a/node_modules/@smithy/credential-provider-imds/LICENSE b/node_modules/@smithy/credential-provider-imds/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/credential-provider-imds/README.md b/node_modules/@smithy/credential-provider-imds/README.md new file mode 100644 index 00000000..9a8f8a53 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/README.md @@ -0,0 +1,11 @@ +# @smithy/credential-provider-imds + +[![NPM version](https://img.shields.io/npm/v/@smithy/credential-provider-imds/latest.svg)](https://www.npmjs.com/package/@smithy/credential-provider-imds) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/credential-provider-imds.svg)](https://www.npmjs.com/package/@smithy/credential-provider-imds) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@smithy/credential-providers](https://www.npmjs.com/package/@smithy/credential-providers) +instead. diff --git a/node_modules/@smithy/credential-provider-imds/dist-cjs/config/Endpoint.js b/node_modules/@smithy/credential-provider-imds/dist-cjs/config/Endpoint.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-cjs/config/Endpoint.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointConfigOptions.js b/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointConfigOptions.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointConfigOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointMode.js b/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointMode.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointMode.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointModeConfigOptions.js b/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointModeConfigOptions.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointModeConfigOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/credential-provider-imds/dist-cjs/error/InstanceMetadataV1FallbackError.js b/node_modules/@smithy/credential-provider-imds/dist-cjs/error/InstanceMetadataV1FallbackError.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-cjs/error/InstanceMetadataV1FallbackError.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/credential-provider-imds/dist-cjs/fromContainerMetadata.js b/node_modules/@smithy/credential-provider-imds/dist-cjs/fromContainerMetadata.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-cjs/fromContainerMetadata.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/credential-provider-imds/dist-cjs/fromInstanceMetadata.js b/node_modules/@smithy/credential-provider-imds/dist-cjs/fromInstanceMetadata.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-cjs/fromInstanceMetadata.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/credential-provider-imds/dist-cjs/index.js b/node_modules/@smithy/credential-provider-imds/dist-cjs/index.js new file mode 100644 index 00000000..21b34236 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-cjs/index.js @@ -0,0 +1,445 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + DEFAULT_MAX_RETRIES: () => DEFAULT_MAX_RETRIES, + DEFAULT_TIMEOUT: () => DEFAULT_TIMEOUT, + ENV_CMDS_AUTH_TOKEN: () => ENV_CMDS_AUTH_TOKEN, + ENV_CMDS_FULL_URI: () => ENV_CMDS_FULL_URI, + ENV_CMDS_RELATIVE_URI: () => ENV_CMDS_RELATIVE_URI, + Endpoint: () => Endpoint, + fromContainerMetadata: () => fromContainerMetadata, + fromInstanceMetadata: () => fromInstanceMetadata, + getInstanceMetadataEndpoint: () => getInstanceMetadataEndpoint, + httpRequest: () => httpRequest, + providerConfigFromInit: () => providerConfigFromInit +}); +module.exports = __toCommonJS(src_exports); + +// src/fromContainerMetadata.ts + +var import_url = require("url"); + +// src/remoteProvider/httpRequest.ts +var import_property_provider = require("@smithy/property-provider"); +var import_buffer = require("buffer"); +var import_http = require("http"); +function httpRequest(options) { + return new Promise((resolve, reject) => { + const req = (0, import_http.request)({ + method: "GET", + ...options, + // Node.js http module doesn't accept hostname with square brackets + // Refs: https://github.com/nodejs/node/issues/39738 + hostname: options.hostname?.replace(/^\[(.+)\]$/, "$1") + }); + req.on("error", (err) => { + reject(Object.assign(new import_property_provider.ProviderError("Unable to connect to instance metadata service"), err)); + req.destroy(); + }); + req.on("timeout", () => { + reject(new import_property_provider.ProviderError("TimeoutError from instance metadata service")); + req.destroy(); + }); + req.on("response", (res) => { + const { statusCode = 400 } = res; + if (statusCode < 200 || 300 <= statusCode) { + reject( + Object.assign(new import_property_provider.ProviderError("Error response received from instance metadata service"), { statusCode }) + ); + req.destroy(); + } + const chunks = []; + res.on("data", (chunk) => { + chunks.push(chunk); + }); + res.on("end", () => { + resolve(import_buffer.Buffer.concat(chunks)); + req.destroy(); + }); + }); + req.end(); + }); +} +__name(httpRequest, "httpRequest"); + +// src/remoteProvider/ImdsCredentials.ts +var isImdsCredentials = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.AccessKeyId === "string" && typeof arg.SecretAccessKey === "string" && typeof arg.Token === "string" && typeof arg.Expiration === "string", "isImdsCredentials"); +var fromImdsCredentials = /* @__PURE__ */ __name((creds) => ({ + accessKeyId: creds.AccessKeyId, + secretAccessKey: creds.SecretAccessKey, + sessionToken: creds.Token, + expiration: new Date(creds.Expiration), + ...creds.AccountId && { accountId: creds.AccountId } +}), "fromImdsCredentials"); + +// src/remoteProvider/RemoteProviderInit.ts +var DEFAULT_TIMEOUT = 1e3; +var DEFAULT_MAX_RETRIES = 0; +var providerConfigFromInit = /* @__PURE__ */ __name(({ + maxRetries = DEFAULT_MAX_RETRIES, + timeout = DEFAULT_TIMEOUT +}) => ({ maxRetries, timeout }), "providerConfigFromInit"); + +// src/remoteProvider/retry.ts +var retry = /* @__PURE__ */ __name((toRetry, maxRetries) => { + let promise = toRetry(); + for (let i = 0; i < maxRetries; i++) { + promise = promise.catch(toRetry); + } + return promise; +}, "retry"); + +// src/fromContainerMetadata.ts +var ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +var ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +var ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +var fromContainerMetadata = /* @__PURE__ */ __name((init = {}) => { + const { timeout, maxRetries } = providerConfigFromInit(init); + return () => retry(async () => { + const requestOptions = await getCmdsUri({ logger: init.logger }); + const credsResponse = JSON.parse(await requestFromEcsImds(timeout, requestOptions)); + if (!isImdsCredentials(credsResponse)) { + throw new import_property_provider.CredentialsProviderError("Invalid response received from instance metadata service.", { + logger: init.logger + }); + } + return fromImdsCredentials(credsResponse); + }, maxRetries); +}, "fromContainerMetadata"); +var requestFromEcsImds = /* @__PURE__ */ __name(async (timeout, options) => { + if (process.env[ENV_CMDS_AUTH_TOKEN]) { + options.headers = { + ...options.headers, + Authorization: process.env[ENV_CMDS_AUTH_TOKEN] + }; + } + const buffer = await httpRequest({ + ...options, + timeout + }); + return buffer.toString(); +}, "requestFromEcsImds"); +var CMDS_IP = "169.254.170.2"; +var GREENGRASS_HOSTS = { + localhost: true, + "127.0.0.1": true +}; +var GREENGRASS_PROTOCOLS = { + "http:": true, + "https:": true +}; +var getCmdsUri = /* @__PURE__ */ __name(async ({ logger }) => { + if (process.env[ENV_CMDS_RELATIVE_URI]) { + return { + hostname: CMDS_IP, + path: process.env[ENV_CMDS_RELATIVE_URI] + }; + } + if (process.env[ENV_CMDS_FULL_URI]) { + const parsed = (0, import_url.parse)(process.env[ENV_CMDS_FULL_URI]); + if (!parsed.hostname || !(parsed.hostname in GREENGRASS_HOSTS)) { + throw new import_property_provider.CredentialsProviderError(`${parsed.hostname} is not a valid container metadata service hostname`, { + tryNextLink: false, + logger + }); + } + if (!parsed.protocol || !(parsed.protocol in GREENGRASS_PROTOCOLS)) { + throw new import_property_provider.CredentialsProviderError(`${parsed.protocol} is not a valid container metadata service protocol`, { + tryNextLink: false, + logger + }); + } + return { + ...parsed, + port: parsed.port ? parseInt(parsed.port, 10) : void 0 + }; + } + throw new import_property_provider.CredentialsProviderError( + `The container metadata credential provider cannot be used unless the ${ENV_CMDS_RELATIVE_URI} or ${ENV_CMDS_FULL_URI} environment variable is set`, + { + tryNextLink: false, + logger + } + ); +}, "getCmdsUri"); + +// src/fromInstanceMetadata.ts + + + +// src/error/InstanceMetadataV1FallbackError.ts + +var InstanceMetadataV1FallbackError = class _InstanceMetadataV1FallbackError extends import_property_provider.CredentialsProviderError { + constructor(message, tryNextLink = true) { + super(message, tryNextLink); + this.tryNextLink = tryNextLink; + this.name = "InstanceMetadataV1FallbackError"; + Object.setPrototypeOf(this, _InstanceMetadataV1FallbackError.prototype); + } + static { + __name(this, "InstanceMetadataV1FallbackError"); + } +}; + +// src/utils/getInstanceMetadataEndpoint.ts +var import_node_config_provider = require("@smithy/node-config-provider"); +var import_url_parser = require("@smithy/url-parser"); + +// src/config/Endpoint.ts +var Endpoint = /* @__PURE__ */ ((Endpoint2) => { + Endpoint2["IPv4"] = "http://169.254.169.254"; + Endpoint2["IPv6"] = "http://[fd00:ec2::254]"; + return Endpoint2; +})(Endpoint || {}); + +// src/config/EndpointConfigOptions.ts +var ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +var CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +var ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_NAME], + default: void 0 +}; + +// src/config/EndpointMode.ts +var EndpointMode = /* @__PURE__ */ ((EndpointMode2) => { + EndpointMode2["IPv4"] = "IPv4"; + EndpointMode2["IPv6"] = "IPv6"; + return EndpointMode2; +})(EndpointMode || {}); + +// src/config/EndpointModeConfigOptions.ts +var ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +var CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +var ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_MODE_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_MODE_NAME], + default: "IPv4" /* IPv4 */ +}; + +// src/utils/getInstanceMetadataEndpoint.ts +var getInstanceMetadataEndpoint = /* @__PURE__ */ __name(async () => (0, import_url_parser.parseUrl)(await getFromEndpointConfig() || await getFromEndpointModeConfig()), "getInstanceMetadataEndpoint"); +var getFromEndpointConfig = /* @__PURE__ */ __name(async () => (0, import_node_config_provider.loadConfig)(ENDPOINT_CONFIG_OPTIONS)(), "getFromEndpointConfig"); +var getFromEndpointModeConfig = /* @__PURE__ */ __name(async () => { + const endpointMode = await (0, import_node_config_provider.loadConfig)(ENDPOINT_MODE_CONFIG_OPTIONS)(); + switch (endpointMode) { + case "IPv4" /* IPv4 */: + return "http://169.254.169.254" /* IPv4 */; + case "IPv6" /* IPv6 */: + return "http://[fd00:ec2::254]" /* IPv6 */; + default: + throw new Error(`Unsupported endpoint mode: ${endpointMode}. Select from ${Object.values(EndpointMode)}`); + } +}, "getFromEndpointModeConfig"); + +// src/utils/getExtendedInstanceMetadataCredentials.ts +var STATIC_STABILITY_REFRESH_INTERVAL_SECONDS = 5 * 60; +var STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS = 5 * 60; +var STATIC_STABILITY_DOC_URL = "https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html"; +var getExtendedInstanceMetadataCredentials = /* @__PURE__ */ __name((credentials, logger) => { + const refreshInterval = STATIC_STABILITY_REFRESH_INTERVAL_SECONDS + Math.floor(Math.random() * STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS); + const newExpiration = new Date(Date.now() + refreshInterval * 1e3); + logger.warn( + `Attempting credential expiration extension due to a credential service availability issue. A refresh of these credentials will be attempted after ${new Date(newExpiration)}. +For more information, please visit: ` + STATIC_STABILITY_DOC_URL + ); + const originalExpiration = credentials.originalExpiration ?? credentials.expiration; + return { + ...credentials, + ...originalExpiration ? { originalExpiration } : {}, + expiration: newExpiration + }; +}, "getExtendedInstanceMetadataCredentials"); + +// src/utils/staticStabilityProvider.ts +var staticStabilityProvider = /* @__PURE__ */ __name((provider, options = {}) => { + const logger = options?.logger || console; + let pastCredentials; + return async () => { + let credentials; + try { + credentials = await provider(); + if (credentials.expiration && credentials.expiration.getTime() < Date.now()) { + credentials = getExtendedInstanceMetadataCredentials(credentials, logger); + } + } catch (e) { + if (pastCredentials) { + logger.warn("Credential renew failed: ", e); + credentials = getExtendedInstanceMetadataCredentials(pastCredentials, logger); + } else { + throw e; + } + } + pastCredentials = credentials; + return credentials; + }; +}, "staticStabilityProvider"); + +// src/fromInstanceMetadata.ts +var IMDS_PATH = "/latest/meta-data/iam/security-credentials/"; +var IMDS_TOKEN_PATH = "/latest/api/token"; +var AWS_EC2_METADATA_V1_DISABLED = "AWS_EC2_METADATA_V1_DISABLED"; +var PROFILE_AWS_EC2_METADATA_V1_DISABLED = "ec2_metadata_v1_disabled"; +var X_AWS_EC2_METADATA_TOKEN = "x-aws-ec2-metadata-token"; +var fromInstanceMetadata = /* @__PURE__ */ __name((init = {}) => staticStabilityProvider(getInstanceMetadataProvider(init), { logger: init.logger }), "fromInstanceMetadata"); +var getInstanceMetadataProvider = /* @__PURE__ */ __name((init = {}) => { + let disableFetchToken = false; + const { logger, profile } = init; + const { timeout, maxRetries } = providerConfigFromInit(init); + const getCredentials = /* @__PURE__ */ __name(async (maxRetries2, options) => { + const isImdsV1Fallback = disableFetchToken || options.headers?.[X_AWS_EC2_METADATA_TOKEN] == null; + if (isImdsV1Fallback) { + let fallbackBlockedFromProfile = false; + let fallbackBlockedFromProcessEnv = false; + const configValue = await (0, import_node_config_provider.loadConfig)( + { + environmentVariableSelector: (env) => { + const envValue = env[AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProcessEnv = !!envValue && envValue !== "false"; + if (envValue === void 0) { + throw new import_property_provider.CredentialsProviderError( + `${AWS_EC2_METADATA_V1_DISABLED} not set in env, checking config file next.`, + { logger: init.logger } + ); + } + return fallbackBlockedFromProcessEnv; + }, + configFileSelector: (profile2) => { + const profileValue = profile2[PROFILE_AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProfile = !!profileValue && profileValue !== "false"; + return fallbackBlockedFromProfile; + }, + default: false + }, + { + profile + } + )(); + if (init.ec2MetadataV1Disabled || configValue) { + const causes = []; + if (init.ec2MetadataV1Disabled) + causes.push("credential provider initialization (runtime option ec2MetadataV1Disabled)"); + if (fallbackBlockedFromProfile) + causes.push(`config file profile (${PROFILE_AWS_EC2_METADATA_V1_DISABLED})`); + if (fallbackBlockedFromProcessEnv) + causes.push(`process environment variable (${AWS_EC2_METADATA_V1_DISABLED})`); + throw new InstanceMetadataV1FallbackError( + `AWS EC2 Metadata v1 fallback has been blocked by AWS SDK configuration in the following: [${causes.join( + ", " + )}].` + ); + } + } + const imdsProfile = (await retry(async () => { + let profile2; + try { + profile2 = await getProfile(options); + } catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return profile2; + }, maxRetries2)).trim(); + return retry(async () => { + let creds; + try { + creds = await getCredentialsFromProfile(imdsProfile, options, init); + } catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return creds; + }, maxRetries2); + }, "getCredentials"); + return async () => { + const endpoint = await getInstanceMetadataEndpoint(); + if (disableFetchToken) { + logger?.debug("AWS SDK Instance Metadata", "using v1 fallback (no token fetch)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } else { + let token; + try { + token = (await getMetadataToken({ ...endpoint, timeout })).toString(); + } catch (error) { + if (error?.statusCode === 400) { + throw Object.assign(error, { + message: "EC2 Metadata token request returned error" + }); + } else if (error.message === "TimeoutError" || [403, 404, 405].includes(error.statusCode)) { + disableFetchToken = true; + } + logger?.debug("AWS SDK Instance Metadata", "using v1 fallback (initial)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } + return getCredentials(maxRetries, { + ...endpoint, + headers: { + [X_AWS_EC2_METADATA_TOKEN]: token + }, + timeout + }); + } + }; +}, "getInstanceMetadataProvider"); +var getMetadataToken = /* @__PURE__ */ __name(async (options) => httpRequest({ + ...options, + path: IMDS_TOKEN_PATH, + method: "PUT", + headers: { + "x-aws-ec2-metadata-token-ttl-seconds": "21600" + } +}), "getMetadataToken"); +var getProfile = /* @__PURE__ */ __name(async (options) => (await httpRequest({ ...options, path: IMDS_PATH })).toString(), "getProfile"); +var getCredentialsFromProfile = /* @__PURE__ */ __name(async (profile, options, init) => { + const credentialsResponse = JSON.parse( + (await httpRequest({ + ...options, + path: IMDS_PATH + profile + })).toString() + ); + if (!isImdsCredentials(credentialsResponse)) { + throw new import_property_provider.CredentialsProviderError("Invalid response received from instance metadata service.", { + logger: init.logger + }); + } + return fromImdsCredentials(credentialsResponse); +}, "getCredentialsFromProfile"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + httpRequest, + getInstanceMetadataEndpoint, + Endpoint, + ENV_CMDS_FULL_URI, + ENV_CMDS_RELATIVE_URI, + ENV_CMDS_AUTH_TOKEN, + fromContainerMetadata, + fromInstanceMetadata, + DEFAULT_TIMEOUT, + DEFAULT_MAX_RETRIES, + providerConfigFromInit +}); + diff --git a/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/ImdsCredentials.js b/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/ImdsCredentials.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/ImdsCredentials.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/RemoteProviderInit.js b/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/RemoteProviderInit.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/RemoteProviderInit.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/httpRequest.js b/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/httpRequest.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/httpRequest.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/index.js b/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/retry.js b/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/retry.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/retry.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/credential-provider-imds/dist-cjs/types.js b/node_modules/@smithy/credential-provider-imds/dist-cjs/types.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getExtendedInstanceMetadataCredentials.js b/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getExtendedInstanceMetadataCredentials.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getExtendedInstanceMetadataCredentials.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getInstanceMetadataEndpoint.js b/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getInstanceMetadataEndpoint.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getInstanceMetadataEndpoint.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/staticStabilityProvider.js b/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/staticStabilityProvider.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/staticStabilityProvider.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/credential-provider-imds/dist-es/config/Endpoint.js b/node_modules/@smithy/credential-provider-imds/dist-es/config/Endpoint.js new file mode 100644 index 00000000..b088eb0d --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-es/config/Endpoint.js @@ -0,0 +1,5 @@ +export var Endpoint; +(function (Endpoint) { + Endpoint["IPv4"] = "http://169.254.169.254"; + Endpoint["IPv6"] = "http://[fd00:ec2::254]"; +})(Endpoint || (Endpoint = {})); diff --git a/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointConfigOptions.js b/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointConfigOptions.js new file mode 100644 index 00000000..f043de93 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointConfigOptions.js @@ -0,0 +1,7 @@ +export const ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +export const CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +export const ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_NAME], + default: undefined, +}; diff --git a/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointMode.js b/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointMode.js new file mode 100644 index 00000000..bace8198 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointMode.js @@ -0,0 +1,5 @@ +export var EndpointMode; +(function (EndpointMode) { + EndpointMode["IPv4"] = "IPv4"; + EndpointMode["IPv6"] = "IPv6"; +})(EndpointMode || (EndpointMode = {})); diff --git a/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointModeConfigOptions.js b/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointModeConfigOptions.js new file mode 100644 index 00000000..15b19d04 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointModeConfigOptions.js @@ -0,0 +1,8 @@ +import { EndpointMode } from "./EndpointMode"; +export const ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +export const CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +export const ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_MODE_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_MODE_NAME], + default: EndpointMode.IPv4, +}; diff --git a/node_modules/@smithy/credential-provider-imds/dist-es/error/InstanceMetadataV1FallbackError.js b/node_modules/@smithy/credential-provider-imds/dist-es/error/InstanceMetadataV1FallbackError.js new file mode 100644 index 00000000..29aaf509 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-es/error/InstanceMetadataV1FallbackError.js @@ -0,0 +1,9 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +export class InstanceMetadataV1FallbackError extends CredentialsProviderError { + constructor(message, tryNextLink = true) { + super(message, tryNextLink); + this.tryNextLink = tryNextLink; + this.name = "InstanceMetadataV1FallbackError"; + Object.setPrototypeOf(this, InstanceMetadataV1FallbackError.prototype); + } +} diff --git a/node_modules/@smithy/credential-provider-imds/dist-es/fromContainerMetadata.js b/node_modules/@smithy/credential-provider-imds/dist-es/fromContainerMetadata.js new file mode 100644 index 00000000..4340e3e5 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-es/fromContainerMetadata.js @@ -0,0 +1,77 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { parse } from "url"; +import { httpRequest } from "./remoteProvider/httpRequest"; +import { fromImdsCredentials, isImdsCredentials } from "./remoteProvider/ImdsCredentials"; +import { providerConfigFromInit } from "./remoteProvider/RemoteProviderInit"; +import { retry } from "./remoteProvider/retry"; +export const ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +export const ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +export const ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +export const fromContainerMetadata = (init = {}) => { + const { timeout, maxRetries } = providerConfigFromInit(init); + return () => retry(async () => { + const requestOptions = await getCmdsUri({ logger: init.logger }); + const credsResponse = JSON.parse(await requestFromEcsImds(timeout, requestOptions)); + if (!isImdsCredentials(credsResponse)) { + throw new CredentialsProviderError("Invalid response received from instance metadata service.", { + logger: init.logger, + }); + } + return fromImdsCredentials(credsResponse); + }, maxRetries); +}; +const requestFromEcsImds = async (timeout, options) => { + if (process.env[ENV_CMDS_AUTH_TOKEN]) { + options.headers = { + ...options.headers, + Authorization: process.env[ENV_CMDS_AUTH_TOKEN], + }; + } + const buffer = await httpRequest({ + ...options, + timeout, + }); + return buffer.toString(); +}; +const CMDS_IP = "169.254.170.2"; +const GREENGRASS_HOSTS = { + localhost: true, + "127.0.0.1": true, +}; +const GREENGRASS_PROTOCOLS = { + "http:": true, + "https:": true, +}; +const getCmdsUri = async ({ logger }) => { + if (process.env[ENV_CMDS_RELATIVE_URI]) { + return { + hostname: CMDS_IP, + path: process.env[ENV_CMDS_RELATIVE_URI], + }; + } + if (process.env[ENV_CMDS_FULL_URI]) { + const parsed = parse(process.env[ENV_CMDS_FULL_URI]); + if (!parsed.hostname || !(parsed.hostname in GREENGRASS_HOSTS)) { + throw new CredentialsProviderError(`${parsed.hostname} is not a valid container metadata service hostname`, { + tryNextLink: false, + logger, + }); + } + if (!parsed.protocol || !(parsed.protocol in GREENGRASS_PROTOCOLS)) { + throw new CredentialsProviderError(`${parsed.protocol} is not a valid container metadata service protocol`, { + tryNextLink: false, + logger, + }); + } + return { + ...parsed, + port: parsed.port ? parseInt(parsed.port, 10) : undefined, + }; + } + throw new CredentialsProviderError("The container metadata credential provider cannot be used unless" + + ` the ${ENV_CMDS_RELATIVE_URI} or ${ENV_CMDS_FULL_URI} environment` + + " variable is set", { + tryNextLink: false, + logger, + }); +}; diff --git a/node_modules/@smithy/credential-provider-imds/dist-es/fromInstanceMetadata.js b/node_modules/@smithy/credential-provider-imds/dist-es/fromInstanceMetadata.js new file mode 100644 index 00000000..24ecbfd5 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-es/fromInstanceMetadata.js @@ -0,0 +1,134 @@ +import { loadConfig } from "@smithy/node-config-provider"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { InstanceMetadataV1FallbackError } from "./error/InstanceMetadataV1FallbackError"; +import { httpRequest } from "./remoteProvider/httpRequest"; +import { fromImdsCredentials, isImdsCredentials } from "./remoteProvider/ImdsCredentials"; +import { providerConfigFromInit } from "./remoteProvider/RemoteProviderInit"; +import { retry } from "./remoteProvider/retry"; +import { getInstanceMetadataEndpoint } from "./utils/getInstanceMetadataEndpoint"; +import { staticStabilityProvider } from "./utils/staticStabilityProvider"; +const IMDS_PATH = "/latest/meta-data/iam/security-credentials/"; +const IMDS_TOKEN_PATH = "/latest/api/token"; +const AWS_EC2_METADATA_V1_DISABLED = "AWS_EC2_METADATA_V1_DISABLED"; +const PROFILE_AWS_EC2_METADATA_V1_DISABLED = "ec2_metadata_v1_disabled"; +const X_AWS_EC2_METADATA_TOKEN = "x-aws-ec2-metadata-token"; +export const fromInstanceMetadata = (init = {}) => staticStabilityProvider(getInstanceMetadataProvider(init), { logger: init.logger }); +const getInstanceMetadataProvider = (init = {}) => { + let disableFetchToken = false; + const { logger, profile } = init; + const { timeout, maxRetries } = providerConfigFromInit(init); + const getCredentials = async (maxRetries, options) => { + const isImdsV1Fallback = disableFetchToken || options.headers?.[X_AWS_EC2_METADATA_TOKEN] == null; + if (isImdsV1Fallback) { + let fallbackBlockedFromProfile = false; + let fallbackBlockedFromProcessEnv = false; + const configValue = await loadConfig({ + environmentVariableSelector: (env) => { + const envValue = env[AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProcessEnv = !!envValue && envValue !== "false"; + if (envValue === undefined) { + throw new CredentialsProviderError(`${AWS_EC2_METADATA_V1_DISABLED} not set in env, checking config file next.`, { logger: init.logger }); + } + return fallbackBlockedFromProcessEnv; + }, + configFileSelector: (profile) => { + const profileValue = profile[PROFILE_AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProfile = !!profileValue && profileValue !== "false"; + return fallbackBlockedFromProfile; + }, + default: false, + }, { + profile, + })(); + if (init.ec2MetadataV1Disabled || configValue) { + const causes = []; + if (init.ec2MetadataV1Disabled) + causes.push("credential provider initialization (runtime option ec2MetadataV1Disabled)"); + if (fallbackBlockedFromProfile) + causes.push(`config file profile (${PROFILE_AWS_EC2_METADATA_V1_DISABLED})`); + if (fallbackBlockedFromProcessEnv) + causes.push(`process environment variable (${AWS_EC2_METADATA_V1_DISABLED})`); + throw new InstanceMetadataV1FallbackError(`AWS EC2 Metadata v1 fallback has been blocked by AWS SDK configuration in the following: [${causes.join(", ")}].`); + } + } + const imdsProfile = (await retry(async () => { + let profile; + try { + profile = await getProfile(options); + } + catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return profile; + }, maxRetries)).trim(); + return retry(async () => { + let creds; + try { + creds = await getCredentialsFromProfile(imdsProfile, options, init); + } + catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return creds; + }, maxRetries); + }; + return async () => { + const endpoint = await getInstanceMetadataEndpoint(); + if (disableFetchToken) { + logger?.debug("AWS SDK Instance Metadata", "using v1 fallback (no token fetch)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } + else { + let token; + try { + token = (await getMetadataToken({ ...endpoint, timeout })).toString(); + } + catch (error) { + if (error?.statusCode === 400) { + throw Object.assign(error, { + message: "EC2 Metadata token request returned error", + }); + } + else if (error.message === "TimeoutError" || [403, 404, 405].includes(error.statusCode)) { + disableFetchToken = true; + } + logger?.debug("AWS SDK Instance Metadata", "using v1 fallback (initial)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } + return getCredentials(maxRetries, { + ...endpoint, + headers: { + [X_AWS_EC2_METADATA_TOKEN]: token, + }, + timeout, + }); + } + }; +}; +const getMetadataToken = async (options) => httpRequest({ + ...options, + path: IMDS_TOKEN_PATH, + method: "PUT", + headers: { + "x-aws-ec2-metadata-token-ttl-seconds": "21600", + }, +}); +const getProfile = async (options) => (await httpRequest({ ...options, path: IMDS_PATH })).toString(); +const getCredentialsFromProfile = async (profile, options, init) => { + const credentialsResponse = JSON.parse((await httpRequest({ + ...options, + path: IMDS_PATH + profile, + })).toString()); + if (!isImdsCredentials(credentialsResponse)) { + throw new CredentialsProviderError("Invalid response received from instance metadata service.", { + logger: init.logger, + }); + } + return fromImdsCredentials(credentialsResponse); +}; diff --git a/node_modules/@smithy/credential-provider-imds/dist-es/index.js b/node_modules/@smithy/credential-provider-imds/dist-es/index.js new file mode 100644 index 00000000..53627609 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-es/index.js @@ -0,0 +1,7 @@ +export * from "./fromContainerMetadata"; +export * from "./fromInstanceMetadata"; +export * from "./remoteProvider/RemoteProviderInit"; +export * from "./types"; +export { httpRequest } from "./remoteProvider/httpRequest"; +export { getInstanceMetadataEndpoint } from "./utils/getInstanceMetadataEndpoint"; +export { Endpoint } from "./config/Endpoint"; diff --git a/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/ImdsCredentials.js b/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/ImdsCredentials.js new file mode 100644 index 00000000..c559c4f8 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/ImdsCredentials.js @@ -0,0 +1,13 @@ +export const isImdsCredentials = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.AccessKeyId === "string" && + typeof arg.SecretAccessKey === "string" && + typeof arg.Token === "string" && + typeof arg.Expiration === "string"; +export const fromImdsCredentials = (creds) => ({ + accessKeyId: creds.AccessKeyId, + secretAccessKey: creds.SecretAccessKey, + sessionToken: creds.Token, + expiration: new Date(creds.Expiration), + ...(creds.AccountId && { accountId: creds.AccountId }), +}); diff --git a/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/RemoteProviderInit.js b/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/RemoteProviderInit.js new file mode 100644 index 00000000..39ace380 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/RemoteProviderInit.js @@ -0,0 +1,3 @@ +export const DEFAULT_TIMEOUT = 1000; +export const DEFAULT_MAX_RETRIES = 0; +export const providerConfigFromInit = ({ maxRetries = DEFAULT_MAX_RETRIES, timeout = DEFAULT_TIMEOUT, }) => ({ maxRetries, timeout }); diff --git a/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/httpRequest.js b/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/httpRequest.js new file mode 100644 index 00000000..91742d0d --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/httpRequest.js @@ -0,0 +1,36 @@ +import { ProviderError } from "@smithy/property-provider"; +import { Buffer } from "buffer"; +import { request } from "http"; +export function httpRequest(options) { + return new Promise((resolve, reject) => { + const req = request({ + method: "GET", + ...options, + hostname: options.hostname?.replace(/^\[(.+)\]$/, "$1"), + }); + req.on("error", (err) => { + reject(Object.assign(new ProviderError("Unable to connect to instance metadata service"), err)); + req.destroy(); + }); + req.on("timeout", () => { + reject(new ProviderError("TimeoutError from instance metadata service")); + req.destroy(); + }); + req.on("response", (res) => { + const { statusCode = 400 } = res; + if (statusCode < 200 || 300 <= statusCode) { + reject(Object.assign(new ProviderError("Error response received from instance metadata service"), { statusCode })); + req.destroy(); + } + const chunks = []; + res.on("data", (chunk) => { + chunks.push(chunk); + }); + res.on("end", () => { + resolve(Buffer.concat(chunks)); + req.destroy(); + }); + }); + req.end(); + }); +} diff --git a/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/index.js b/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/index.js new file mode 100644 index 00000000..d4ad6010 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/index.js @@ -0,0 +1,2 @@ +export * from "./ImdsCredentials"; +export * from "./RemoteProviderInit"; diff --git a/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/retry.js b/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/retry.js new file mode 100644 index 00000000..22b79bb2 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/retry.js @@ -0,0 +1,7 @@ +export const retry = (toRetry, maxRetries) => { + let promise = toRetry(); + for (let i = 0; i < maxRetries; i++) { + promise = promise.catch(toRetry); + } + return promise; +}; diff --git a/node_modules/@smithy/credential-provider-imds/dist-es/types.js b/node_modules/@smithy/credential-provider-imds/dist-es/types.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/credential-provider-imds/dist-es/utils/getExtendedInstanceMetadataCredentials.js b/node_modules/@smithy/credential-provider-imds/dist-es/utils/getExtendedInstanceMetadataCredentials.js new file mode 100644 index 00000000..5614692f --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-es/utils/getExtendedInstanceMetadataCredentials.js @@ -0,0 +1,17 @@ +const STATIC_STABILITY_REFRESH_INTERVAL_SECONDS = 5 * 60; +const STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS = 5 * 60; +const STATIC_STABILITY_DOC_URL = "https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html"; +export const getExtendedInstanceMetadataCredentials = (credentials, logger) => { + const refreshInterval = STATIC_STABILITY_REFRESH_INTERVAL_SECONDS + + Math.floor(Math.random() * STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS); + const newExpiration = new Date(Date.now() + refreshInterval * 1000); + logger.warn("Attempting credential expiration extension due to a credential service availability issue. A refresh of these " + + `credentials will be attempted after ${new Date(newExpiration)}.\nFor more information, please visit: ` + + STATIC_STABILITY_DOC_URL); + const originalExpiration = credentials.originalExpiration ?? credentials.expiration; + return { + ...credentials, + ...(originalExpiration ? { originalExpiration } : {}), + expiration: newExpiration, + }; +}; diff --git a/node_modules/@smithy/credential-provider-imds/dist-es/utils/getInstanceMetadataEndpoint.js b/node_modules/@smithy/credential-provider-imds/dist-es/utils/getInstanceMetadataEndpoint.js new file mode 100644 index 00000000..4c611ad2 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-es/utils/getInstanceMetadataEndpoint.js @@ -0,0 +1,19 @@ +import { loadConfig } from "@smithy/node-config-provider"; +import { parseUrl } from "@smithy/url-parser"; +import { Endpoint as InstanceMetadataEndpoint } from "../config/Endpoint"; +import { ENDPOINT_CONFIG_OPTIONS } from "../config/EndpointConfigOptions"; +import { EndpointMode } from "../config/EndpointMode"; +import { ENDPOINT_MODE_CONFIG_OPTIONS, } from "../config/EndpointModeConfigOptions"; +export const getInstanceMetadataEndpoint = async () => parseUrl((await getFromEndpointConfig()) || (await getFromEndpointModeConfig())); +const getFromEndpointConfig = async () => loadConfig(ENDPOINT_CONFIG_OPTIONS)(); +const getFromEndpointModeConfig = async () => { + const endpointMode = await loadConfig(ENDPOINT_MODE_CONFIG_OPTIONS)(); + switch (endpointMode) { + case EndpointMode.IPv4: + return InstanceMetadataEndpoint.IPv4; + case EndpointMode.IPv6: + return InstanceMetadataEndpoint.IPv6; + default: + throw new Error(`Unsupported endpoint mode: ${endpointMode}.` + ` Select from ${Object.values(EndpointMode)}`); + } +}; diff --git a/node_modules/@smithy/credential-provider-imds/dist-es/utils/staticStabilityProvider.js b/node_modules/@smithy/credential-provider-imds/dist-es/utils/staticStabilityProvider.js new file mode 100644 index 00000000..9a1e7421 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-es/utils/staticStabilityProvider.js @@ -0,0 +1,25 @@ +import { getExtendedInstanceMetadataCredentials } from "./getExtendedInstanceMetadataCredentials"; +export const staticStabilityProvider = (provider, options = {}) => { + const logger = options?.logger || console; + let pastCredentials; + return async () => { + let credentials; + try { + credentials = await provider(); + if (credentials.expiration && credentials.expiration.getTime() < Date.now()) { + credentials = getExtendedInstanceMetadataCredentials(credentials, logger); + } + } + catch (e) { + if (pastCredentials) { + logger.warn("Credential renew failed: ", e); + credentials = getExtendedInstanceMetadataCredentials(pastCredentials, logger); + } + else { + throw e; + } + } + pastCredentials = credentials; + return credentials; + }; +}; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/config/Endpoint.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/config/Endpoint.d.ts new file mode 100644 index 00000000..000e3136 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/config/Endpoint.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum Endpoint { + IPv4 = "http://169.254.169.254", + IPv6 = "http://[fd00:ec2::254]" +} diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointConfigOptions.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointConfigOptions.d.ts new file mode 100644 index 00000000..c03e22cb --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +/** + * @internal + */ +export declare const ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointMode.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointMode.d.ts new file mode 100644 index 00000000..db706191 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointMode.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum EndpointMode { + IPv4 = "IPv4", + IPv6 = "IPv6" +} diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointModeConfigOptions.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointModeConfigOptions.d.ts new file mode 100644 index 00000000..c7431999 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointModeConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +/** + * @internal + */ +export declare const CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +/** + * @internal + */ +export declare const ENDPOINT_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/error/InstanceMetadataV1FallbackError.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/error/InstanceMetadataV1FallbackError.d.ts new file mode 100644 index 00000000..8338ccb7 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/error/InstanceMetadataV1FallbackError.d.ts @@ -0,0 +1,12 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +/** + * @public + * + * A specific sub-case of CredentialsProviderError, when the IMDSv1 fallback + * has been attempted but shut off by SDK configuration. + */ +export declare class InstanceMetadataV1FallbackError extends CredentialsProviderError { + readonly tryNextLink: boolean; + name: string; + constructor(message: string, tryNextLink?: boolean); +} diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/fromContainerMetadata.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/fromContainerMetadata.d.ts new file mode 100644 index 00000000..f6f28f07 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/fromContainerMetadata.d.ts @@ -0,0 +1,21 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { RemoteProviderInit } from "./remoteProvider/RemoteProviderInit"; +/** + * @internal + */ +export declare const ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +/** + * @internal + */ +export declare const ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +/** + * @internal + */ +export declare const ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +/** + * @internal + * + * Creates a credential provider that will source credentials from the ECS + * Container Metadata Service + */ +export declare const fromContainerMetadata: (init?: RemoteProviderInit) => AwsCredentialIdentityProvider; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/fromInstanceMetadata.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/fromInstanceMetadata.d.ts new file mode 100644 index 00000000..24db95ae --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/fromInstanceMetadata.d.ts @@ -0,0 +1,10 @@ +import { Provider } from "@smithy/types"; +import { RemoteProviderInit } from "./remoteProvider/RemoteProviderInit"; +import { InstanceMetadataCredentials } from "./types"; +/** + * @internal + * + * Creates a credential provider that will source credentials from the EC2 + * Instance Metadata Service + */ +export declare const fromInstanceMetadata: (init?: RemoteProviderInit) => Provider; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/index.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/index.d.ts new file mode 100644 index 00000000..5a87b2f1 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/index.d.ts @@ -0,0 +1,28 @@ +/** + * @internal + */ +export * from "./fromContainerMetadata"; +/** + * @internal + */ +export * from "./fromInstanceMetadata"; +/** + * @internal + */ +export * from "./remoteProvider/RemoteProviderInit"; +/** + * @internal + */ +export * from "./types"; +/** + * @internal + */ +export { httpRequest } from "./remoteProvider/httpRequest"; +/** + * @internal + */ +export { getInstanceMetadataEndpoint } from "./utils/getInstanceMetadataEndpoint"; +/** + * @internal + */ +export { Endpoint } from "./config/Endpoint"; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/ImdsCredentials.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/ImdsCredentials.d.ts new file mode 100644 index 00000000..c2c7d51a --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/ImdsCredentials.d.ts @@ -0,0 +1,19 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +/** + * @internal + */ +export interface ImdsCredentials { + AccessKeyId: string; + SecretAccessKey: string; + Token: string; + Expiration: string; + AccountId?: string; +} +/** + * @internal + */ +export declare const isImdsCredentials: (arg: any) => arg is ImdsCredentials; +/** + * @internal + */ +export declare const fromImdsCredentials: (creds: ImdsCredentials) => AwsCredentialIdentity; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/RemoteProviderInit.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/RemoteProviderInit.d.ts new file mode 100644 index 00000000..df9eff70 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/RemoteProviderInit.d.ts @@ -0,0 +1,40 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULT_TIMEOUT = 1000; +/** + * @internal + */ +export declare const DEFAULT_MAX_RETRIES = 0; +/** + * @public + */ +export interface RemoteProviderConfig { + /** + * The connection timeout (in milliseconds) + */ + timeout: number; + /** + * The maximum number of times the HTTP connection should be retried + */ + maxRetries: number; +} +/** + * @public + */ +export interface RemoteProviderInit extends Partial { + logger?: Logger; + /** + * Only used in the IMDS credential provider. + */ + ec2MetadataV1Disabled?: boolean; + /** + * AWS_PROFILE. + */ + profile?: string; +} +/** + * @internal + */ +export declare const providerConfigFromInit: ({ maxRetries, timeout, }: RemoteProviderInit) => RemoteProviderConfig; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/httpRequest.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/httpRequest.d.ts new file mode 100644 index 00000000..87c7d0de --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/httpRequest.d.ts @@ -0,0 +1,9 @@ +/// +/// +/// +import { Buffer } from "buffer"; +import { RequestOptions } from "http"; +/** + * @internal + */ +export declare function httpRequest(options: RequestOptions): Promise; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/index.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/index.d.ts new file mode 100644 index 00000000..ed18a703 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./ImdsCredentials"; +/** + * @internal + */ +export * from "./RemoteProviderInit"; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/retry.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/retry.d.ts new file mode 100644 index 00000000..4e8abc04 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/retry.d.ts @@ -0,0 +1,10 @@ +/** + * @internal + */ +export interface RetryableProvider { + (): Promise; +} +/** + * @internal + */ +export declare const retry: (toRetry: RetryableProvider, maxRetries: number) => Promise; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/Endpoint.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/Endpoint.d.ts new file mode 100644 index 00000000..b700953f --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/Endpoint.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum Endpoint { + IPv4 = "http://169.254.169.254", + IPv6 = "http://[fd00:ec2::254]" +} diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointConfigOptions.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointConfigOptions.d.ts new file mode 100644 index 00000000..dbcb2433 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +/** + * @internal + */ +export declare const ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointMode.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointMode.d.ts new file mode 100644 index 00000000..7dee86ef --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointMode.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum EndpointMode { + IPv4 = "IPv4", + IPv6 = "IPv6" +} diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointModeConfigOptions.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointModeConfigOptions.d.ts new file mode 100644 index 00000000..1d5e4584 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointModeConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +/** + * @internal + */ +export declare const CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +/** + * @internal + */ +export declare const ENDPOINT_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/error/InstanceMetadataV1FallbackError.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/error/InstanceMetadataV1FallbackError.d.ts new file mode 100644 index 00000000..93ac2202 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/error/InstanceMetadataV1FallbackError.d.ts @@ -0,0 +1,12 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +/** + * @public + * + * A specific sub-case of CredentialsProviderError, when the IMDSv1 fallback + * has been attempted but shut off by SDK configuration. + */ +export declare class InstanceMetadataV1FallbackError extends CredentialsProviderError { + readonly tryNextLink: boolean; + name: string; + constructor(message: string, tryNextLink?: boolean); +} diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromContainerMetadata.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromContainerMetadata.d.ts new file mode 100644 index 00000000..deb48fd4 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromContainerMetadata.d.ts @@ -0,0 +1,21 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { RemoteProviderInit } from "./remoteProvider/RemoteProviderInit"; +/** + * @internal + */ +export declare const ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +/** + * @internal + */ +export declare const ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +/** + * @internal + */ +export declare const ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +/** + * @internal + * + * Creates a credential provider that will source credentials from the ECS + * Container Metadata Service + */ +export declare const fromContainerMetadata: (init?: RemoteProviderInit) => AwsCredentialIdentityProvider; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromInstanceMetadata.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromInstanceMetadata.d.ts new file mode 100644 index 00000000..8a533f20 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromInstanceMetadata.d.ts @@ -0,0 +1,10 @@ +import { Provider } from "@smithy/types"; +import { RemoteProviderInit } from "./remoteProvider/RemoteProviderInit"; +import { InstanceMetadataCredentials } from "./types"; +/** + * @internal + * + * Creates a credential provider that will source credentials from the EC2 + * Instance Metadata Service + */ +export declare const fromInstanceMetadata: (init?: RemoteProviderInit) => Provider; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..c0bc7e44 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/index.d.ts @@ -0,0 +1,28 @@ +/** + * @internal + */ +export * from "./fromContainerMetadata"; +/** + * @internal + */ +export * from "./fromInstanceMetadata"; +/** + * @internal + */ +export * from "./remoteProvider/RemoteProviderInit"; +/** + * @internal + */ +export * from "./types"; +/** + * @internal + */ +export { httpRequest } from "./remoteProvider/httpRequest"; +/** + * @internal + */ +export { getInstanceMetadataEndpoint } from "./utils/getInstanceMetadataEndpoint"; +/** + * @internal + */ +export { Endpoint } from "./config/Endpoint"; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/ImdsCredentials.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/ImdsCredentials.d.ts new file mode 100644 index 00000000..c621e0a0 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/ImdsCredentials.d.ts @@ -0,0 +1,19 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +/** + * @internal + */ +export interface ImdsCredentials { + AccessKeyId: string; + SecretAccessKey: string; + Token: string; + Expiration: string; + AccountId?: string; +} +/** + * @internal + */ +export declare const isImdsCredentials: (arg: any) => arg is ImdsCredentials; +/** + * @internal + */ +export declare const fromImdsCredentials: (creds: ImdsCredentials) => AwsCredentialIdentity; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/RemoteProviderInit.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/RemoteProviderInit.d.ts new file mode 100644 index 00000000..4fe25f1e --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/RemoteProviderInit.d.ts @@ -0,0 +1,40 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULT_TIMEOUT = 1000; +/** + * @internal + */ +export declare const DEFAULT_MAX_RETRIES = 0; +/** + * @public + */ +export interface RemoteProviderConfig { + /** + * The connection timeout (in milliseconds) + */ + timeout: number; + /** + * The maximum number of times the HTTP connection should be retried + */ + maxRetries: number; +} +/** + * @public + */ +export interface RemoteProviderInit extends Partial { + logger?: Logger; + /** + * Only used in the IMDS credential provider. + */ + ec2MetadataV1Disabled?: boolean; + /** + * AWS_PROFILE. + */ + profile?: string; +} +/** + * @internal + */ +export declare const providerConfigFromInit: ({ maxRetries, timeout, }: RemoteProviderInit) => RemoteProviderConfig; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/httpRequest.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/httpRequest.d.ts new file mode 100644 index 00000000..b514fef6 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/httpRequest.d.ts @@ -0,0 +1,7 @@ +/// +import { Buffer } from "buffer"; +import { RequestOptions } from "http"; +/** + * @internal + */ +export declare function httpRequest(options: RequestOptions): Promise; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/index.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/index.d.ts new file mode 100644 index 00000000..a9d60945 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./ImdsCredentials"; +/** + * @internal + */ +export * from "./RemoteProviderInit"; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/retry.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/retry.d.ts new file mode 100644 index 00000000..d72d6048 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/retry.d.ts @@ -0,0 +1,10 @@ +/** + * @internal + */ +export interface RetryableProvider { + (): Promise; +} +/** + * @internal + */ +export declare const retry: (toRetry: RetryableProvider, maxRetries: number) => Promise; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/types.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/types.d.ts new file mode 100644 index 00000000..2e9592b4 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/types.d.ts @@ -0,0 +1,7 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +/** + * @internal + */ +export interface InstanceMetadataCredentials extends AwsCredentialIdentity { + readonly originalExpiration?: Date; +} diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getExtendedInstanceMetadataCredentials.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getExtendedInstanceMetadataCredentials.d.ts new file mode 100644 index 00000000..67edd2c6 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getExtendedInstanceMetadataCredentials.d.ts @@ -0,0 +1,6 @@ +import { Logger } from "@smithy/types"; +import { InstanceMetadataCredentials } from "../types"; +/** + * @internal + */ +export declare const getExtendedInstanceMetadataCredentials: (credentials: InstanceMetadataCredentials, logger: Logger) => InstanceMetadataCredentials; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getInstanceMetadataEndpoint.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getInstanceMetadataEndpoint.d.ts new file mode 100644 index 00000000..1ad772de --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getInstanceMetadataEndpoint.d.ts @@ -0,0 +1,21 @@ +import { Endpoint } from "@smithy/types"; +/** + * Returns the host to use for instance metadata service call. + * + * The host is read from endpoint which can be set either in + * {@link ENV_ENDPOINT_NAME} environment variable or {@link CONFIG_ENDPOINT_NAME} + * configuration property. + * + * If endpoint is not set, then endpoint mode is read either from + * {@link ENV_ENDPOINT_MODE_NAME} environment variable or {@link CONFIG_ENDPOINT_MODE_NAME} + * configuration property. If endpoint mode is not set, then default endpoint mode + * {@link EndpointMode.IPv4} is used. + * + * If endpoint mode is set to {@link EndpointMode.IPv4}, then the host is {@link Endpoint.IPv4}. + * If endpoint mode is set to {@link EndpointMode.IPv6}, then the host is {@link Endpoint.IPv6}. + * + * @returns Host to use for instance metadata service call. + * + * @internal + */ +export declare const getInstanceMetadataEndpoint: () => Promise; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/staticStabilityProvider.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/staticStabilityProvider.d.ts new file mode 100644 index 00000000..337091ef --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/staticStabilityProvider.d.ts @@ -0,0 +1,16 @@ +import { Logger, Provider } from "@smithy/types"; +import { InstanceMetadataCredentials } from "../types"; +/** + * @internal + * + * IMDS credential supports static stability feature. When used, the expiration + * of recently issued credentials is extended. The server side allows using + * the recently expired credentials. This mitigates impact when clients using + * refreshable credentials are unable to retrieve updates. + * + * @param provider Credential provider + * @returns A credential provider that supports static stability + */ +export declare const staticStabilityProvider: (provider: Provider, options?: { + logger?: Logger | undefined; +}) => Provider; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/types.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/types.d.ts new file mode 100644 index 00000000..e74ec995 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/types.d.ts @@ -0,0 +1,7 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +/** + * @internal + */ +export interface InstanceMetadataCredentials extends AwsCredentialIdentity { + readonly originalExpiration?: Date; +} diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/utils/getExtendedInstanceMetadataCredentials.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/utils/getExtendedInstanceMetadataCredentials.d.ts new file mode 100644 index 00000000..f0ed41bc --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/utils/getExtendedInstanceMetadataCredentials.d.ts @@ -0,0 +1,6 @@ +import { Logger } from "@smithy/types"; +import { InstanceMetadataCredentials } from "../types"; +/** + * @internal + */ +export declare const getExtendedInstanceMetadataCredentials: (credentials: InstanceMetadataCredentials, logger: Logger) => InstanceMetadataCredentials; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/utils/getInstanceMetadataEndpoint.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/utils/getInstanceMetadataEndpoint.d.ts new file mode 100644 index 00000000..db6b6dae --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/utils/getInstanceMetadataEndpoint.d.ts @@ -0,0 +1,21 @@ +import { Endpoint } from "@smithy/types"; +/** + * Returns the host to use for instance metadata service call. + * + * The host is read from endpoint which can be set either in + * {@link ENV_ENDPOINT_NAME} environment variable or {@link CONFIG_ENDPOINT_NAME} + * configuration property. + * + * If endpoint is not set, then endpoint mode is read either from + * {@link ENV_ENDPOINT_MODE_NAME} environment variable or {@link CONFIG_ENDPOINT_MODE_NAME} + * configuration property. If endpoint mode is not set, then default endpoint mode + * {@link EndpointMode.IPv4} is used. + * + * If endpoint mode is set to {@link EndpointMode.IPv4}, then the host is {@link Endpoint.IPv4}. + * If endpoint mode is set to {@link EndpointMode.IPv6}, then the host is {@link Endpoint.IPv6}. + * + * @returns Host to use for instance metadata service call. + * + * @internal + */ +export declare const getInstanceMetadataEndpoint: () => Promise; diff --git a/node_modules/@smithy/credential-provider-imds/dist-types/utils/staticStabilityProvider.d.ts b/node_modules/@smithy/credential-provider-imds/dist-types/utils/staticStabilityProvider.d.ts new file mode 100644 index 00000000..6bfcb694 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/dist-types/utils/staticStabilityProvider.d.ts @@ -0,0 +1,16 @@ +import { Logger, Provider } from "@smithy/types"; +import { InstanceMetadataCredentials } from "../types"; +/** + * @internal + * + * IMDS credential supports static stability feature. When used, the expiration + * of recently issued credentials is extended. The server side allows using + * the recently expired credentials. This mitigates impact when clients using + * refreshable credentials are unable to retrieve updates. + * + * @param provider Credential provider + * @returns A credential provider that supports static stability + */ +export declare const staticStabilityProvider: (provider: Provider, options?: { + logger?: Logger | undefined; +}) => Provider; diff --git a/node_modules/@smithy/credential-provider-imds/package.json b/node_modules/@smithy/credential-provider-imds/package.json new file mode 100644 index 00000000..8fd08242 --- /dev/null +++ b/node_modules/@smithy/credential-provider-imds/package.json @@ -0,0 +1,69 @@ +{ + "name": "@smithy/credential-provider-imds", + "version": "4.0.2", + "description": "AWS credential provider that sources credentials from the EC2 instance metadata service and ECS container metadata service", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline credential-provider-imds", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/credential-provider-imds", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/credential-provider-imds" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-codec/LICENSE b/node_modules/@smithy/eventstream-codec/LICENSE new file mode 100644 index 00000000..dd65ae06 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@smithy/eventstream-codec/README.md b/node_modules/@smithy/eventstream-codec/README.md new file mode 100644 index 00000000..f846ca1b --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/README.md @@ -0,0 +1,4 @@ +# @smithy/eventstream-codec + +[![NPM version](https://img.shields.io/npm/v/@smithy/eventstream-codec/latest.svg)](https://www.npmjs.com/package/@smithy/eventstream-codec) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/eventstream-codec.svg)](https://www.npmjs.com/package/@smithy/eventstream-codec) diff --git a/node_modules/@smithy/eventstream-codec/dist-cjs/EventStreamCodec.js b/node_modules/@smithy/eventstream-codec/dist-cjs/EventStreamCodec.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-cjs/EventStreamCodec.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-codec/dist-cjs/HeaderMarshaller.js b/node_modules/@smithy/eventstream-codec/dist-cjs/HeaderMarshaller.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-cjs/HeaderMarshaller.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-codec/dist-cjs/Int64.js b/node_modules/@smithy/eventstream-codec/dist-cjs/Int64.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-cjs/Int64.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-codec/dist-cjs/Message.js b/node_modules/@smithy/eventstream-codec/dist-cjs/Message.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-cjs/Message.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-codec/dist-cjs/MessageDecoderStream.js b/node_modules/@smithy/eventstream-codec/dist-cjs/MessageDecoderStream.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-cjs/MessageDecoderStream.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-codec/dist-cjs/MessageEncoderStream.js b/node_modules/@smithy/eventstream-codec/dist-cjs/MessageEncoderStream.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-cjs/MessageEncoderStream.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-codec/dist-cjs/SmithyMessageDecoderStream.js b/node_modules/@smithy/eventstream-codec/dist-cjs/SmithyMessageDecoderStream.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-cjs/SmithyMessageDecoderStream.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-codec/dist-cjs/SmithyMessageEncoderStream.js b/node_modules/@smithy/eventstream-codec/dist-cjs/SmithyMessageEncoderStream.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-cjs/SmithyMessageEncoderStream.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-codec/dist-cjs/TestVectors.fixture.js b/node_modules/@smithy/eventstream-codec/dist-cjs/TestVectors.fixture.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-cjs/TestVectors.fixture.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-codec/dist-cjs/index.js b/node_modules/@smithy/eventstream-codec/dist-cjs/index.js new file mode 100644 index 00000000..5f1dd737 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-cjs/index.js @@ -0,0 +1,475 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + EventStreamCodec: () => EventStreamCodec, + HeaderMarshaller: () => HeaderMarshaller, + Int64: () => Int64, + MessageDecoderStream: () => MessageDecoderStream, + MessageEncoderStream: () => MessageEncoderStream, + SmithyMessageDecoderStream: () => SmithyMessageDecoderStream, + SmithyMessageEncoderStream: () => SmithyMessageEncoderStream +}); +module.exports = __toCommonJS(src_exports); + +// src/EventStreamCodec.ts +var import_crc322 = require("@aws-crypto/crc32"); + +// src/HeaderMarshaller.ts + + +// src/Int64.ts +var import_util_hex_encoding = require("@smithy/util-hex-encoding"); +var Int64 = class _Int64 { + constructor(bytes) { + this.bytes = bytes; + if (bytes.byteLength !== 8) { + throw new Error("Int64 buffers must be exactly 8 bytes"); + } + } + static { + __name(this, "Int64"); + } + static fromNumber(number) { + if (number > 9223372036854776e3 || number < -9223372036854776e3) { + throw new Error(`${number} is too large (or, if negative, too small) to represent as an Int64`); + } + const bytes = new Uint8Array(8); + for (let i = 7, remaining = Math.abs(Math.round(number)); i > -1 && remaining > 0; i--, remaining /= 256) { + bytes[i] = remaining; + } + if (number < 0) { + negate(bytes); + } + return new _Int64(bytes); + } + /** + * Called implicitly by infix arithmetic operators. + */ + valueOf() { + const bytes = this.bytes.slice(0); + const negative = bytes[0] & 128; + if (negative) { + negate(bytes); + } + return parseInt((0, import_util_hex_encoding.toHex)(bytes), 16) * (negative ? -1 : 1); + } + toString() { + return String(this.valueOf()); + } +}; +function negate(bytes) { + for (let i = 0; i < 8; i++) { + bytes[i] ^= 255; + } + for (let i = 7; i > -1; i--) { + bytes[i]++; + if (bytes[i] !== 0) + break; + } +} +__name(negate, "negate"); + +// src/HeaderMarshaller.ts +var HeaderMarshaller = class { + constructor(toUtf8, fromUtf8) { + this.toUtf8 = toUtf8; + this.fromUtf8 = fromUtf8; + } + static { + __name(this, "HeaderMarshaller"); + } + format(headers) { + const chunks = []; + for (const headerName of Object.keys(headers)) { + const bytes = this.fromUtf8(headerName); + chunks.push(Uint8Array.from([bytes.byteLength]), bytes, this.formatHeaderValue(headers[headerName])); + } + const out = new Uint8Array(chunks.reduce((carry, bytes) => carry + bytes.byteLength, 0)); + let position = 0; + for (const chunk of chunks) { + out.set(chunk, position); + position += chunk.byteLength; + } + return out; + } + formatHeaderValue(header) { + switch (header.type) { + case "boolean": + return Uint8Array.from([header.value ? 0 /* boolTrue */ : 1 /* boolFalse */]); + case "byte": + return Uint8Array.from([2 /* byte */, header.value]); + case "short": + const shortView = new DataView(new ArrayBuffer(3)); + shortView.setUint8(0, 3 /* short */); + shortView.setInt16(1, header.value, false); + return new Uint8Array(shortView.buffer); + case "integer": + const intView = new DataView(new ArrayBuffer(5)); + intView.setUint8(0, 4 /* integer */); + intView.setInt32(1, header.value, false); + return new Uint8Array(intView.buffer); + case "long": + const longBytes = new Uint8Array(9); + longBytes[0] = 5 /* long */; + longBytes.set(header.value.bytes, 1); + return longBytes; + case "binary": + const binView = new DataView(new ArrayBuffer(3 + header.value.byteLength)); + binView.setUint8(0, 6 /* byteArray */); + binView.setUint16(1, header.value.byteLength, false); + const binBytes = new Uint8Array(binView.buffer); + binBytes.set(header.value, 3); + return binBytes; + case "string": + const utf8Bytes = this.fromUtf8(header.value); + const strView = new DataView(new ArrayBuffer(3 + utf8Bytes.byteLength)); + strView.setUint8(0, 7 /* string */); + strView.setUint16(1, utf8Bytes.byteLength, false); + const strBytes = new Uint8Array(strView.buffer); + strBytes.set(utf8Bytes, 3); + return strBytes; + case "timestamp": + const tsBytes = new Uint8Array(9); + tsBytes[0] = 8 /* timestamp */; + tsBytes.set(Int64.fromNumber(header.value.valueOf()).bytes, 1); + return tsBytes; + case "uuid": + if (!UUID_PATTERN.test(header.value)) { + throw new Error(`Invalid UUID received: ${header.value}`); + } + const uuidBytes = new Uint8Array(17); + uuidBytes[0] = 9 /* uuid */; + uuidBytes.set((0, import_util_hex_encoding.fromHex)(header.value.replace(/\-/g, "")), 1); + return uuidBytes; + } + } + parse(headers) { + const out = {}; + let position = 0; + while (position < headers.byteLength) { + const nameLength = headers.getUint8(position++); + const name = this.toUtf8(new Uint8Array(headers.buffer, headers.byteOffset + position, nameLength)); + position += nameLength; + switch (headers.getUint8(position++)) { + case 0 /* boolTrue */: + out[name] = { + type: BOOLEAN_TAG, + value: true + }; + break; + case 1 /* boolFalse */: + out[name] = { + type: BOOLEAN_TAG, + value: false + }; + break; + case 2 /* byte */: + out[name] = { + type: BYTE_TAG, + value: headers.getInt8(position++) + }; + break; + case 3 /* short */: + out[name] = { + type: SHORT_TAG, + value: headers.getInt16(position, false) + }; + position += 2; + break; + case 4 /* integer */: + out[name] = { + type: INT_TAG, + value: headers.getInt32(position, false) + }; + position += 4; + break; + case 5 /* long */: + out[name] = { + type: LONG_TAG, + value: new Int64(new Uint8Array(headers.buffer, headers.byteOffset + position, 8)) + }; + position += 8; + break; + case 6 /* byteArray */: + const binaryLength = headers.getUint16(position, false); + position += 2; + out[name] = { + type: BINARY_TAG, + value: new Uint8Array(headers.buffer, headers.byteOffset + position, binaryLength) + }; + position += binaryLength; + break; + case 7 /* string */: + const stringLength = headers.getUint16(position, false); + position += 2; + out[name] = { + type: STRING_TAG, + value: this.toUtf8(new Uint8Array(headers.buffer, headers.byteOffset + position, stringLength)) + }; + position += stringLength; + break; + case 8 /* timestamp */: + out[name] = { + type: TIMESTAMP_TAG, + value: new Date(new Int64(new Uint8Array(headers.buffer, headers.byteOffset + position, 8)).valueOf()) + }; + position += 8; + break; + case 9 /* uuid */: + const uuidBytes = new Uint8Array(headers.buffer, headers.byteOffset + position, 16); + position += 16; + out[name] = { + type: UUID_TAG, + value: `${(0, import_util_hex_encoding.toHex)(uuidBytes.subarray(0, 4))}-${(0, import_util_hex_encoding.toHex)(uuidBytes.subarray(4, 6))}-${(0, import_util_hex_encoding.toHex)( + uuidBytes.subarray(6, 8) + )}-${(0, import_util_hex_encoding.toHex)(uuidBytes.subarray(8, 10))}-${(0, import_util_hex_encoding.toHex)(uuidBytes.subarray(10))}` + }; + break; + default: + throw new Error(`Unrecognized header type tag`); + } + } + return out; + } +}; +var BOOLEAN_TAG = "boolean"; +var BYTE_TAG = "byte"; +var SHORT_TAG = "short"; +var INT_TAG = "integer"; +var LONG_TAG = "long"; +var BINARY_TAG = "binary"; +var STRING_TAG = "string"; +var TIMESTAMP_TAG = "timestamp"; +var UUID_TAG = "uuid"; +var UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/; + +// src/splitMessage.ts +var import_crc32 = require("@aws-crypto/crc32"); +var PRELUDE_MEMBER_LENGTH = 4; +var PRELUDE_LENGTH = PRELUDE_MEMBER_LENGTH * 2; +var CHECKSUM_LENGTH = 4; +var MINIMUM_MESSAGE_LENGTH = PRELUDE_LENGTH + CHECKSUM_LENGTH * 2; +function splitMessage({ byteLength, byteOffset, buffer }) { + if (byteLength < MINIMUM_MESSAGE_LENGTH) { + throw new Error("Provided message too short to accommodate event stream message overhead"); + } + const view = new DataView(buffer, byteOffset, byteLength); + const messageLength = view.getUint32(0, false); + if (byteLength !== messageLength) { + throw new Error("Reported message length does not match received message length"); + } + const headerLength = view.getUint32(PRELUDE_MEMBER_LENGTH, false); + const expectedPreludeChecksum = view.getUint32(PRELUDE_LENGTH, false); + const expectedMessageChecksum = view.getUint32(byteLength - CHECKSUM_LENGTH, false); + const checksummer = new import_crc32.Crc32().update(new Uint8Array(buffer, byteOffset, PRELUDE_LENGTH)); + if (expectedPreludeChecksum !== checksummer.digest()) { + throw new Error( + `The prelude checksum specified in the message (${expectedPreludeChecksum}) does not match the calculated CRC32 checksum (${checksummer.digest()})` + ); + } + checksummer.update( + new Uint8Array(buffer, byteOffset + PRELUDE_LENGTH, byteLength - (PRELUDE_LENGTH + CHECKSUM_LENGTH)) + ); + if (expectedMessageChecksum !== checksummer.digest()) { + throw new Error( + `The message checksum (${checksummer.digest()}) did not match the expected value of ${expectedMessageChecksum}` + ); + } + return { + headers: new DataView(buffer, byteOffset + PRELUDE_LENGTH + CHECKSUM_LENGTH, headerLength), + body: new Uint8Array( + buffer, + byteOffset + PRELUDE_LENGTH + CHECKSUM_LENGTH + headerLength, + messageLength - headerLength - (PRELUDE_LENGTH + CHECKSUM_LENGTH + CHECKSUM_LENGTH) + ) + }; +} +__name(splitMessage, "splitMessage"); + +// src/EventStreamCodec.ts +var EventStreamCodec = class { + static { + __name(this, "EventStreamCodec"); + } + constructor(toUtf8, fromUtf8) { + this.headerMarshaller = new HeaderMarshaller(toUtf8, fromUtf8); + this.messageBuffer = []; + this.isEndOfStream = false; + } + feed(message) { + this.messageBuffer.push(this.decode(message)); + } + endOfStream() { + this.isEndOfStream = true; + } + getMessage() { + const message = this.messageBuffer.pop(); + const isEndOfStream = this.isEndOfStream; + return { + getMessage() { + return message; + }, + isEndOfStream() { + return isEndOfStream; + } + }; + } + getAvailableMessages() { + const messages = this.messageBuffer; + this.messageBuffer = []; + const isEndOfStream = this.isEndOfStream; + return { + getMessages() { + return messages; + }, + isEndOfStream() { + return isEndOfStream; + } + }; + } + /** + * Convert a structured JavaScript object with tagged headers into a binary + * event stream message. + */ + encode({ headers: rawHeaders, body }) { + const headers = this.headerMarshaller.format(rawHeaders); + const length = headers.byteLength + body.byteLength + 16; + const out = new Uint8Array(length); + const view = new DataView(out.buffer, out.byteOffset, out.byteLength); + const checksum = new import_crc322.Crc32(); + view.setUint32(0, length, false); + view.setUint32(4, headers.byteLength, false); + view.setUint32(8, checksum.update(out.subarray(0, 8)).digest(), false); + out.set(headers, 12); + out.set(body, headers.byteLength + 12); + view.setUint32(length - 4, checksum.update(out.subarray(8, length - 4)).digest(), false); + return out; + } + /** + * Convert a binary event stream message into a JavaScript object with an + * opaque, binary body and tagged, parsed headers. + */ + decode(message) { + const { headers, body } = splitMessage(message); + return { headers: this.headerMarshaller.parse(headers), body }; + } + /** + * Convert a structured JavaScript object with tagged headers into a binary + * event stream message header. + */ + formatHeaders(rawHeaders) { + return this.headerMarshaller.format(rawHeaders); + } +}; + +// src/MessageDecoderStream.ts +var MessageDecoderStream = class { + constructor(options) { + this.options = options; + } + static { + __name(this, "MessageDecoderStream"); + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const bytes of this.options.inputStream) { + const decoded = this.options.decoder.decode(bytes); + yield decoded; + } + } +}; + +// src/MessageEncoderStream.ts +var MessageEncoderStream = class { + constructor(options) { + this.options = options; + } + static { + __name(this, "MessageEncoderStream"); + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const msg of this.options.messageStream) { + const encoded = this.options.encoder.encode(msg); + yield encoded; + } + if (this.options.includeEndFrame) { + yield new Uint8Array(0); + } + } +}; + +// src/SmithyMessageDecoderStream.ts +var SmithyMessageDecoderStream = class { + constructor(options) { + this.options = options; + } + static { + __name(this, "SmithyMessageDecoderStream"); + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const message of this.options.messageStream) { + const deserialized = await this.options.deserializer(message); + if (deserialized === void 0) + continue; + yield deserialized; + } + } +}; + +// src/SmithyMessageEncoderStream.ts +var SmithyMessageEncoderStream = class { + constructor(options) { + this.options = options; + } + static { + __name(this, "SmithyMessageEncoderStream"); + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const chunk of this.options.inputStream) { + const payloadBuf = this.options.serializer(chunk); + yield payloadBuf; + } + } +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + EventStreamCodec, + HeaderMarshaller, + Int64, + MessageDecoderStream, + MessageEncoderStream, + SmithyMessageDecoderStream, + SmithyMessageEncoderStream +}); + diff --git a/node_modules/@smithy/eventstream-codec/dist-cjs/splitMessage.js b/node_modules/@smithy/eventstream-codec/dist-cjs/splitMessage.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-cjs/splitMessage.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-codec/dist-cjs/vectorTypes.fixture.js b/node_modules/@smithy/eventstream-codec/dist-cjs/vectorTypes.fixture.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-cjs/vectorTypes.fixture.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-codec/dist-es/EventStreamCodec.js b/node_modules/@smithy/eventstream-codec/dist-es/EventStreamCodec.js new file mode 100644 index 00000000..dacbe48e --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-es/EventStreamCodec.js @@ -0,0 +1,62 @@ +import { Crc32 } from "@aws-crypto/crc32"; +import { HeaderMarshaller } from "./HeaderMarshaller"; +import { splitMessage } from "./splitMessage"; +export class EventStreamCodec { + constructor(toUtf8, fromUtf8) { + this.headerMarshaller = new HeaderMarshaller(toUtf8, fromUtf8); + this.messageBuffer = []; + this.isEndOfStream = false; + } + feed(message) { + this.messageBuffer.push(this.decode(message)); + } + endOfStream() { + this.isEndOfStream = true; + } + getMessage() { + const message = this.messageBuffer.pop(); + const isEndOfStream = this.isEndOfStream; + return { + getMessage() { + return message; + }, + isEndOfStream() { + return isEndOfStream; + }, + }; + } + getAvailableMessages() { + const messages = this.messageBuffer; + this.messageBuffer = []; + const isEndOfStream = this.isEndOfStream; + return { + getMessages() { + return messages; + }, + isEndOfStream() { + return isEndOfStream; + }, + }; + } + encode({ headers: rawHeaders, body }) { + const headers = this.headerMarshaller.format(rawHeaders); + const length = headers.byteLength + body.byteLength + 16; + const out = new Uint8Array(length); + const view = new DataView(out.buffer, out.byteOffset, out.byteLength); + const checksum = new Crc32(); + view.setUint32(0, length, false); + view.setUint32(4, headers.byteLength, false); + view.setUint32(8, checksum.update(out.subarray(0, 8)).digest(), false); + out.set(headers, 12); + out.set(body, headers.byteLength + 12); + view.setUint32(length - 4, checksum.update(out.subarray(8, length - 4)).digest(), false); + return out; + } + decode(message) { + const { headers, body } = splitMessage(message); + return { headers: this.headerMarshaller.parse(headers), body }; + } + formatHeaders(rawHeaders) { + return this.headerMarshaller.format(rawHeaders); + } +} diff --git a/node_modules/@smithy/eventstream-codec/dist-es/HeaderMarshaller.js b/node_modules/@smithy/eventstream-codec/dist-es/HeaderMarshaller.js new file mode 100644 index 00000000..27995b4f --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-es/HeaderMarshaller.js @@ -0,0 +1,182 @@ +import { fromHex, toHex } from "@smithy/util-hex-encoding"; +import { Int64 } from "./Int64"; +export class HeaderMarshaller { + constructor(toUtf8, fromUtf8) { + this.toUtf8 = toUtf8; + this.fromUtf8 = fromUtf8; + } + format(headers) { + const chunks = []; + for (const headerName of Object.keys(headers)) { + const bytes = this.fromUtf8(headerName); + chunks.push(Uint8Array.from([bytes.byteLength]), bytes, this.formatHeaderValue(headers[headerName])); + } + const out = new Uint8Array(chunks.reduce((carry, bytes) => carry + bytes.byteLength, 0)); + let position = 0; + for (const chunk of chunks) { + out.set(chunk, position); + position += chunk.byteLength; + } + return out; + } + formatHeaderValue(header) { + switch (header.type) { + case "boolean": + return Uint8Array.from([header.value ? 0 : 1]); + case "byte": + return Uint8Array.from([2, header.value]); + case "short": + const shortView = new DataView(new ArrayBuffer(3)); + shortView.setUint8(0, 3); + shortView.setInt16(1, header.value, false); + return new Uint8Array(shortView.buffer); + case "integer": + const intView = new DataView(new ArrayBuffer(5)); + intView.setUint8(0, 4); + intView.setInt32(1, header.value, false); + return new Uint8Array(intView.buffer); + case "long": + const longBytes = new Uint8Array(9); + longBytes[0] = 5; + longBytes.set(header.value.bytes, 1); + return longBytes; + case "binary": + const binView = new DataView(new ArrayBuffer(3 + header.value.byteLength)); + binView.setUint8(0, 6); + binView.setUint16(1, header.value.byteLength, false); + const binBytes = new Uint8Array(binView.buffer); + binBytes.set(header.value, 3); + return binBytes; + case "string": + const utf8Bytes = this.fromUtf8(header.value); + const strView = new DataView(new ArrayBuffer(3 + utf8Bytes.byteLength)); + strView.setUint8(0, 7); + strView.setUint16(1, utf8Bytes.byteLength, false); + const strBytes = new Uint8Array(strView.buffer); + strBytes.set(utf8Bytes, 3); + return strBytes; + case "timestamp": + const tsBytes = new Uint8Array(9); + tsBytes[0] = 8; + tsBytes.set(Int64.fromNumber(header.value.valueOf()).bytes, 1); + return tsBytes; + case "uuid": + if (!UUID_PATTERN.test(header.value)) { + throw new Error(`Invalid UUID received: ${header.value}`); + } + const uuidBytes = new Uint8Array(17); + uuidBytes[0] = 9; + uuidBytes.set(fromHex(header.value.replace(/\-/g, "")), 1); + return uuidBytes; + } + } + parse(headers) { + const out = {}; + let position = 0; + while (position < headers.byteLength) { + const nameLength = headers.getUint8(position++); + const name = this.toUtf8(new Uint8Array(headers.buffer, headers.byteOffset + position, nameLength)); + position += nameLength; + switch (headers.getUint8(position++)) { + case 0: + out[name] = { + type: BOOLEAN_TAG, + value: true, + }; + break; + case 1: + out[name] = { + type: BOOLEAN_TAG, + value: false, + }; + break; + case 2: + out[name] = { + type: BYTE_TAG, + value: headers.getInt8(position++), + }; + break; + case 3: + out[name] = { + type: SHORT_TAG, + value: headers.getInt16(position, false), + }; + position += 2; + break; + case 4: + out[name] = { + type: INT_TAG, + value: headers.getInt32(position, false), + }; + position += 4; + break; + case 5: + out[name] = { + type: LONG_TAG, + value: new Int64(new Uint8Array(headers.buffer, headers.byteOffset + position, 8)), + }; + position += 8; + break; + case 6: + const binaryLength = headers.getUint16(position, false); + position += 2; + out[name] = { + type: BINARY_TAG, + value: new Uint8Array(headers.buffer, headers.byteOffset + position, binaryLength), + }; + position += binaryLength; + break; + case 7: + const stringLength = headers.getUint16(position, false); + position += 2; + out[name] = { + type: STRING_TAG, + value: this.toUtf8(new Uint8Array(headers.buffer, headers.byteOffset + position, stringLength)), + }; + position += stringLength; + break; + case 8: + out[name] = { + type: TIMESTAMP_TAG, + value: new Date(new Int64(new Uint8Array(headers.buffer, headers.byteOffset + position, 8)).valueOf()), + }; + position += 8; + break; + case 9: + const uuidBytes = new Uint8Array(headers.buffer, headers.byteOffset + position, 16); + position += 16; + out[name] = { + type: UUID_TAG, + value: `${toHex(uuidBytes.subarray(0, 4))}-${toHex(uuidBytes.subarray(4, 6))}-${toHex(uuidBytes.subarray(6, 8))}-${toHex(uuidBytes.subarray(8, 10))}-${toHex(uuidBytes.subarray(10))}`, + }; + break; + default: + throw new Error(`Unrecognized header type tag`); + } + } + return out; + } +} +var HEADER_VALUE_TYPE; +(function (HEADER_VALUE_TYPE) { + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["boolTrue"] = 0] = "boolTrue"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["boolFalse"] = 1] = "boolFalse"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["byte"] = 2] = "byte"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["short"] = 3] = "short"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["integer"] = 4] = "integer"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["long"] = 5] = "long"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["byteArray"] = 6] = "byteArray"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["string"] = 7] = "string"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["timestamp"] = 8] = "timestamp"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["uuid"] = 9] = "uuid"; +})(HEADER_VALUE_TYPE || (HEADER_VALUE_TYPE = {})); +const BOOLEAN_TAG = "boolean"; +const BYTE_TAG = "byte"; +const SHORT_TAG = "short"; +const INT_TAG = "integer"; +const LONG_TAG = "long"; +const BINARY_TAG = "binary"; +const STRING_TAG = "string"; +const TIMESTAMP_TAG = "timestamp"; +const UUID_TAG = "uuid"; +const UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/; diff --git a/node_modules/@smithy/eventstream-codec/dist-es/Int64.js b/node_modules/@smithy/eventstream-codec/dist-es/Int64.js new file mode 100644 index 00000000..f3f77850 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-es/Int64.js @@ -0,0 +1,43 @@ +import { toHex } from "@smithy/util-hex-encoding"; +export class Int64 { + constructor(bytes) { + this.bytes = bytes; + if (bytes.byteLength !== 8) { + throw new Error("Int64 buffers must be exactly 8 bytes"); + } + } + static fromNumber(number) { + if (number > 9223372036854776000 || number < -9223372036854776000) { + throw new Error(`${number} is too large (or, if negative, too small) to represent as an Int64`); + } + const bytes = new Uint8Array(8); + for (let i = 7, remaining = Math.abs(Math.round(number)); i > -1 && remaining > 0; i--, remaining /= 256) { + bytes[i] = remaining; + } + if (number < 0) { + negate(bytes); + } + return new Int64(bytes); + } + valueOf() { + const bytes = this.bytes.slice(0); + const negative = bytes[0] & 0b10000000; + if (negative) { + negate(bytes); + } + return parseInt(toHex(bytes), 16) * (negative ? -1 : 1); + } + toString() { + return String(this.valueOf()); + } +} +function negate(bytes) { + for (let i = 0; i < 8; i++) { + bytes[i] ^= 0xff; + } + for (let i = 7; i > -1; i--) { + bytes[i]++; + if (bytes[i] !== 0) + break; + } +} diff --git a/node_modules/@smithy/eventstream-codec/dist-es/Message.js b/node_modules/@smithy/eventstream-codec/dist-es/Message.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-es/Message.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/eventstream-codec/dist-es/MessageDecoderStream.js b/node_modules/@smithy/eventstream-codec/dist-es/MessageDecoderStream.js new file mode 100644 index 00000000..f14ade54 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-es/MessageDecoderStream.js @@ -0,0 +1,14 @@ +export class MessageDecoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const bytes of this.options.inputStream) { + const decoded = this.options.decoder.decode(bytes); + yield decoded; + } + } +} diff --git a/node_modules/@smithy/eventstream-codec/dist-es/MessageEncoderStream.js b/node_modules/@smithy/eventstream-codec/dist-es/MessageEncoderStream.js new file mode 100644 index 00000000..7d5aa8cd --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-es/MessageEncoderStream.js @@ -0,0 +1,17 @@ +export class MessageEncoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const msg of this.options.messageStream) { + const encoded = this.options.encoder.encode(msg); + yield encoded; + } + if (this.options.includeEndFrame) { + yield new Uint8Array(0); + } + } +} diff --git a/node_modules/@smithy/eventstream-codec/dist-es/SmithyMessageDecoderStream.js b/node_modules/@smithy/eventstream-codec/dist-es/SmithyMessageDecoderStream.js new file mode 100644 index 00000000..73b15995 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-es/SmithyMessageDecoderStream.js @@ -0,0 +1,16 @@ +export class SmithyMessageDecoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const message of this.options.messageStream) { + const deserialized = await this.options.deserializer(message); + if (deserialized === undefined) + continue; + yield deserialized; + } + } +} diff --git a/node_modules/@smithy/eventstream-codec/dist-es/SmithyMessageEncoderStream.js b/node_modules/@smithy/eventstream-codec/dist-es/SmithyMessageEncoderStream.js new file mode 100644 index 00000000..a124026c --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-es/SmithyMessageEncoderStream.js @@ -0,0 +1,14 @@ +export class SmithyMessageEncoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const chunk of this.options.inputStream) { + const payloadBuf = this.options.serializer(chunk); + yield payloadBuf; + } + } +} diff --git a/node_modules/@smithy/eventstream-codec/dist-es/TestVectors.fixture.js b/node_modules/@smithy/eventstream-codec/dist-es/TestVectors.fixture.js new file mode 100644 index 00000000..3fc4962d --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-es/TestVectors.fixture.js @@ -0,0 +1,146 @@ +import { Int64 } from "./Int64"; +export const vectors = { + all_headers: { + expectation: "success", + encoded: Uint8Array.from([ + 0, 0, 0, 204, 0, 0, 0, 175, 15, 174, 100, 202, 10, 101, 118, 101, 110, 116, 45, 116, 121, 112, 101, 4, 0, 0, 160, + 12, 12, 99, 111, 110, 116, 101, 110, 116, 45, 116, 121, 112, 101, 7, 0, 16, 97, 112, 112, 108, 105, 99, 97, 116, + 105, 111, 110, 47, 106, 115, 111, 110, 10, 98, 111, 111, 108, 32, 102, 97, 108, 115, 101, 1, 9, 98, 111, 111, 108, + 32, 116, 114, 117, 101, 0, 4, 98, 121, 116, 101, 2, 207, 8, 98, 121, 116, 101, 32, 98, 117, 102, 6, 0, 20, 73, 39, + 109, 32, 97, 32, 108, 105, 116, 116, 108, 101, 32, 116, 101, 97, 112, 111, 116, 33, 9, 116, 105, 109, 101, 115, + 116, 97, 109, 112, 8, 0, 0, 0, 0, 0, 132, 95, 237, 5, 105, 110, 116, 49, 54, 3, 0, 42, 5, 105, 110, 116, 54, 52, + 5, 0, 0, 0, 0, 2, 135, 87, 178, 4, 117, 117, 105, 100, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, + 123, 39, 102, 111, 111, 39, 58, 39, 98, 97, 114, 39, 125, 171, 165, 241, 12, + ]), + decoded: { + headers: { + "event-type": { + type: "integer", + value: 40972, + }, + "content-type": { + type: "string", + value: "application/json", + }, + "bool false": { + type: "boolean", + value: false, + }, + "bool true": { + type: "boolean", + value: true, + }, + byte: { + type: "byte", + value: -49, + }, + "byte buf": { + type: "binary", + value: Uint8Array.from([ + 73, 39, 109, 32, 97, 32, 108, 105, 116, 116, 108, 101, 32, 116, 101, 97, 112, 111, 116, 33, + ]), + }, + timestamp: { + type: "timestamp", + value: new Date(8675309), + }, + int16: { + type: "short", + value: 42, + }, + int64: { + type: "long", + value: Int64.fromNumber(42424242), + }, + uuid: { + type: "uuid", + value: "01020304-0506-0708-090a-0b0c0d0e0f10", + }, + }, + body: Uint8Array.from([123, 39, 102, 111, 111, 39, 58, 39, 98, 97, 114, 39, 125]), + }, + }, + empty_message: { + expectation: "success", + encoded: Uint8Array.from([0, 0, 0, 16, 0, 0, 0, 0, 5, 194, 72, 235, 125, 152, 200, 255]), + decoded: { + headers: {}, + body: Uint8Array.from([]), + }, + }, + int32_header: { + expectation: "success", + encoded: Uint8Array.from([ + 0, 0, 0, 45, 0, 0, 0, 16, 65, 196, 36, 184, 10, 101, 118, 101, 110, 116, 45, 116, 121, 112, 101, 4, 0, 0, 160, 12, + 123, 39, 102, 111, 111, 39, 58, 39, 98, 97, 114, 39, 125, 54, 244, 128, 160, + ]), + decoded: { + headers: { + "event-type": { + type: "integer", + value: 40972, + }, + }, + body: Uint8Array.from([123, 39, 102, 111, 111, 39, 58, 39, 98, 97, 114, 39, 125]), + }, + }, + payload_no_headers: { + expectation: "success", + encoded: Uint8Array.from([ + 0, 0, 0, 29, 0, 0, 0, 0, 253, 82, 140, 90, 123, 39, 102, 111, 111, 39, 58, 39, 98, 97, 114, 39, 125, 195, 101, 57, + 54, + ]), + decoded: { + headers: {}, + body: Uint8Array.from([123, 39, 102, 111, 111, 39, 58, 39, 98, 97, 114, 39, 125]), + }, + }, + payload_one_str_header: { + expectation: "success", + encoded: Uint8Array.from([ + 0, 0, 0, 61, 0, 0, 0, 32, 7, 253, 131, 150, 12, 99, 111, 110, 116, 101, 110, 116, 45, 116, 121, 112, 101, 7, 0, + 16, 97, 112, 112, 108, 105, 99, 97, 116, 105, 111, 110, 47, 106, 115, 111, 110, 123, 39, 102, 111, 111, 39, 58, + 39, 98, 97, 114, 39, 125, 141, 156, 8, 177, + ]), + decoded: { + headers: { + "content-type": { + type: "string", + value: "application/json", + }, + }, + body: Uint8Array.from([123, 39, 102, 111, 111, 39, 58, 39, 98, 97, 114, 39, 125]), + }, + }, + corrupted_headers: { + expectation: "failure", + encoded: Uint8Array.from([ + 0, 0, 0, 61, 0, 0, 0, 32, 7, 253, 131, 150, 12, 99, 111, 110, 116, 101, 110, 116, 45, 116, 121, 112, 101, 7, 0, + 16, 97, 112, 112, 108, 105, 99, 97, 116, 105, 111, 110, 47, 106, 115, 111, 110, 123, 97, 102, 111, 111, 39, 58, + 39, 98, 97, 114, 39, 125, 141, 156, 8, 177, + ]), + }, + corrupted_header_len: { + expectation: "failure", + encoded: Uint8Array.from([ + 0, 0, 0, 61, 0, 0, 0, 33, 7, 253, 131, 150, 12, 99, 111, 110, 116, 101, 110, 116, 45, 116, 121, 112, 101, 7, 0, + 16, 97, 112, 112, 108, 105, 99, 97, 116, 105, 111, 110, 47, 106, 115, 111, 110, 123, 39, 102, 111, 111, 39, 58, + 39, 98, 97, 114, 39, 125, 141, 156, 8, 177, + ]), + }, + corrupted_length: { + expectation: "failure", + encoded: Uint8Array.from([ + 0, 0, 0, 62, 0, 0, 0, 32, 7, 253, 131, 150, 12, 99, 111, 110, 116, 101, 110, 116, 45, 116, 121, 112, 101, 7, 0, + 16, 97, 112, 112, 108, 105, 99, 97, 116, 105, 111, 110, 47, 106, 115, 111, 110, 123, 39, 102, 111, 111, 39, 58, + 39, 98, 97, 114, 39, 125, 141, 156, 8, 177, + ]), + }, + corrupted_payload: { + expectation: "failure", + encoded: Uint8Array.from([ + 0, 0, 0, 29, 0, 0, 0, 0, 253, 82, 140, 90, 91, 39, 102, 111, 111, 39, 58, 39, 98, 97, 114, 39, 125, 195, 101, 57, + 54, + ]), + }, +}; diff --git a/node_modules/@smithy/eventstream-codec/dist-es/index.js b/node_modules/@smithy/eventstream-codec/dist-es/index.js new file mode 100644 index 00000000..458feabc --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-es/index.js @@ -0,0 +1,8 @@ +export * from "./EventStreamCodec"; +export * from "./HeaderMarshaller"; +export * from "./Int64"; +export * from "./Message"; +export * from "./MessageDecoderStream"; +export * from "./MessageEncoderStream"; +export * from "./SmithyMessageDecoderStream"; +export * from "./SmithyMessageEncoderStream"; diff --git a/node_modules/@smithy/eventstream-codec/dist-es/splitMessage.js b/node_modules/@smithy/eventstream-codec/dist-es/splitMessage.js new file mode 100644 index 00000000..725346bb --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-es/splitMessage.js @@ -0,0 +1,30 @@ +import { Crc32 } from "@aws-crypto/crc32"; +const PRELUDE_MEMBER_LENGTH = 4; +const PRELUDE_LENGTH = PRELUDE_MEMBER_LENGTH * 2; +const CHECKSUM_LENGTH = 4; +const MINIMUM_MESSAGE_LENGTH = PRELUDE_LENGTH + CHECKSUM_LENGTH * 2; +export function splitMessage({ byteLength, byteOffset, buffer }) { + if (byteLength < MINIMUM_MESSAGE_LENGTH) { + throw new Error("Provided message too short to accommodate event stream message overhead"); + } + const view = new DataView(buffer, byteOffset, byteLength); + const messageLength = view.getUint32(0, false); + if (byteLength !== messageLength) { + throw new Error("Reported message length does not match received message length"); + } + const headerLength = view.getUint32(PRELUDE_MEMBER_LENGTH, false); + const expectedPreludeChecksum = view.getUint32(PRELUDE_LENGTH, false); + const expectedMessageChecksum = view.getUint32(byteLength - CHECKSUM_LENGTH, false); + const checksummer = new Crc32().update(new Uint8Array(buffer, byteOffset, PRELUDE_LENGTH)); + if (expectedPreludeChecksum !== checksummer.digest()) { + throw new Error(`The prelude checksum specified in the message (${expectedPreludeChecksum}) does not match the calculated CRC32 checksum (${checksummer.digest()})`); + } + checksummer.update(new Uint8Array(buffer, byteOffset + PRELUDE_LENGTH, byteLength - (PRELUDE_LENGTH + CHECKSUM_LENGTH))); + if (expectedMessageChecksum !== checksummer.digest()) { + throw new Error(`The message checksum (${checksummer.digest()}) did not match the expected value of ${expectedMessageChecksum}`); + } + return { + headers: new DataView(buffer, byteOffset + PRELUDE_LENGTH + CHECKSUM_LENGTH, headerLength), + body: new Uint8Array(buffer, byteOffset + PRELUDE_LENGTH + CHECKSUM_LENGTH + headerLength, messageLength - headerLength - (PRELUDE_LENGTH + CHECKSUM_LENGTH + CHECKSUM_LENGTH)), + }; +} diff --git a/node_modules/@smithy/eventstream-codec/dist-es/vectorTypes.fixture.js b/node_modules/@smithy/eventstream-codec/dist-es/vectorTypes.fixture.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-es/vectorTypes.fixture.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/eventstream-codec/dist-types/EventStreamCodec.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/EventStreamCodec.d.ts new file mode 100644 index 00000000..647ac134 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/EventStreamCodec.d.ts @@ -0,0 +1,31 @@ +import { AvailableMessage, AvailableMessages, Message, MessageDecoder, MessageEncoder, MessageHeaders } from "@smithy/types"; +import { Decoder, Encoder } from "@smithy/types"; +/** + * A Codec that can convert binary-packed event stream messages into + * JavaScript objects and back again into their binary format. + */ +export declare class EventStreamCodec implements MessageEncoder, MessageDecoder { + private readonly headerMarshaller; + private messageBuffer; + private isEndOfStream; + constructor(toUtf8: Encoder, fromUtf8: Decoder); + feed(message: ArrayBufferView): void; + endOfStream(): void; + getMessage(): AvailableMessage; + getAvailableMessages(): AvailableMessages; + /** + * Convert a structured JavaScript object with tagged headers into a binary + * event stream message. + */ + encode({ headers: rawHeaders, body }: Message): Uint8Array; + /** + * Convert a binary event stream message into a JavaScript object with an + * opaque, binary body and tagged, parsed headers. + */ + decode(message: ArrayBufferView): Message; + /** + * Convert a structured JavaScript object with tagged headers into a binary + * event stream message header. + */ + formatHeaders(rawHeaders: MessageHeaders): Uint8Array; +} diff --git a/node_modules/@smithy/eventstream-codec/dist-types/HeaderMarshaller.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/HeaderMarshaller.d.ts new file mode 100644 index 00000000..481e0d86 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/HeaderMarshaller.d.ts @@ -0,0 +1,12 @@ +import { Decoder, Encoder, MessageHeaders } from "@smithy/types"; +/** + * @internal + */ +export declare class HeaderMarshaller { + private readonly toUtf8; + private readonly fromUtf8; + constructor(toUtf8: Encoder, fromUtf8: Decoder); + format(headers: MessageHeaders): Uint8Array; + private formatHeaderValue; + parse(headers: DataView): MessageHeaders; +} diff --git a/node_modules/@smithy/eventstream-codec/dist-types/Int64.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/Int64.d.ts new file mode 100644 index 00000000..16c6a80c --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/Int64.d.ts @@ -0,0 +1,20 @@ +import { Int64 as IInt64 } from "@smithy/types"; +export interface Int64 extends IInt64 { +} +/** + * A lossless representation of a signed, 64-bit integer. Instances of this + * class may be used in arithmetic expressions as if they were numeric + * primitives, but the binary representation will be preserved unchanged as the + * `bytes` property of the object. The bytes should be encoded as big-endian, + * two's complement integers. + */ +export declare class Int64 { + readonly bytes: Uint8Array; + constructor(bytes: Uint8Array); + static fromNumber(number: number): Int64; + /** + * Called implicitly by infix arithmetic operators. + */ + valueOf(): number; + toString(): string; +} diff --git a/node_modules/@smithy/eventstream-codec/dist-types/Message.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/Message.d.ts new file mode 100644 index 00000000..4cceffc9 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/Message.d.ts @@ -0,0 +1,26 @@ +import { Int64 } from "./Int64"; +/** + * An event stream message. The headers and body properties will always be + * defined, with empty headers represented as an object with no keys and an + * empty body represented as a zero-length Uint8Array. + */ +export interface Message { + headers: MessageHeaders; + body: Uint8Array; +} +export type MessageHeaders = Record; +type HeaderValue = { + type: K; + value: V; +}; +export type BooleanHeaderValue = HeaderValue<"boolean", boolean>; +export type ByteHeaderValue = HeaderValue<"byte", number>; +export type ShortHeaderValue = HeaderValue<"short", number>; +export type IntegerHeaderValue = HeaderValue<"integer", number>; +export type LongHeaderValue = HeaderValue<"long", Int64>; +export type BinaryHeaderValue = HeaderValue<"binary", Uint8Array>; +export type StringHeaderValue = HeaderValue<"string", string>; +export type TimestampHeaderValue = HeaderValue<"timestamp", Date>; +export type UuidHeaderValue = HeaderValue<"uuid", string>; +export type MessageHeaderValue = BooleanHeaderValue | ByteHeaderValue | ShortHeaderValue | IntegerHeaderValue | LongHeaderValue | BinaryHeaderValue | StringHeaderValue | TimestampHeaderValue | UuidHeaderValue; +export {}; diff --git a/node_modules/@smithy/eventstream-codec/dist-types/MessageDecoderStream.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/MessageDecoderStream.d.ts new file mode 100644 index 00000000..4a157875 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/MessageDecoderStream.d.ts @@ -0,0 +1,17 @@ +import { Message, MessageDecoder } from "@smithy/types"; +/** + * @internal + */ +export interface MessageDecoderStreamOptions { + inputStream: AsyncIterable; + decoder: MessageDecoder; +} +/** + * @internal + */ +export declare class MessageDecoderStream implements AsyncIterable { + private readonly options; + constructor(options: MessageDecoderStreamOptions); + [Symbol.asyncIterator](): AsyncIterator; + private asyncIterator; +} diff --git a/node_modules/@smithy/eventstream-codec/dist-types/MessageEncoderStream.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/MessageEncoderStream.d.ts new file mode 100644 index 00000000..cdacd3c6 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/MessageEncoderStream.d.ts @@ -0,0 +1,18 @@ +import { Message, MessageEncoder } from "@smithy/types"; +/** + * @internal + */ +export interface MessageEncoderStreamOptions { + messageStream: AsyncIterable; + encoder: MessageEncoder; + includeEndFrame?: Boolean; +} +/** + * @internal + */ +export declare class MessageEncoderStream implements AsyncIterable { + private readonly options; + constructor(options: MessageEncoderStreamOptions); + [Symbol.asyncIterator](): AsyncIterator; + private asyncIterator; +} diff --git a/node_modules/@smithy/eventstream-codec/dist-types/SmithyMessageDecoderStream.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/SmithyMessageDecoderStream.d.ts new file mode 100644 index 00000000..2786506f --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/SmithyMessageDecoderStream.d.ts @@ -0,0 +1,17 @@ +import { Message } from "@smithy/types"; +/** + * @internal + */ +export interface SmithyMessageDecoderStreamOptions { + readonly messageStream: AsyncIterable; + readonly deserializer: (input: Message) => Promise; +} +/** + * @internal + */ +export declare class SmithyMessageDecoderStream implements AsyncIterable { + private readonly options; + constructor(options: SmithyMessageDecoderStreamOptions); + [Symbol.asyncIterator](): AsyncIterator; + private asyncIterator; +} diff --git a/node_modules/@smithy/eventstream-codec/dist-types/SmithyMessageEncoderStream.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/SmithyMessageEncoderStream.d.ts new file mode 100644 index 00000000..6240595f --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/SmithyMessageEncoderStream.d.ts @@ -0,0 +1,17 @@ +import { Message } from "@smithy/types"; +/** + * @internal + */ +export interface SmithyMessageEncoderStreamOptions { + inputStream: AsyncIterable; + serializer: (event: T) => Message; +} +/** + * @internal + */ +export declare class SmithyMessageEncoderStream implements AsyncIterable { + private readonly options; + constructor(options: SmithyMessageEncoderStreamOptions); + [Symbol.asyncIterator](): AsyncIterator; + private asyncIterator; +} diff --git a/node_modules/@smithy/eventstream-codec/dist-types/TestVectors.fixture.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/TestVectors.fixture.d.ts new file mode 100644 index 00000000..e1b04e61 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/TestVectors.fixture.d.ts @@ -0,0 +1,2 @@ +import { TestVectors } from "./vectorTypes.fixture"; +export declare const vectors: TestVectors; diff --git a/node_modules/@smithy/eventstream-codec/dist-types/index.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/index.d.ts new file mode 100644 index 00000000..458feabc --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/index.d.ts @@ -0,0 +1,8 @@ +export * from "./EventStreamCodec"; +export * from "./HeaderMarshaller"; +export * from "./Int64"; +export * from "./Message"; +export * from "./MessageDecoderStream"; +export * from "./MessageEncoderStream"; +export * from "./SmithyMessageDecoderStream"; +export * from "./SmithyMessageEncoderStream"; diff --git a/node_modules/@smithy/eventstream-codec/dist-types/splitMessage.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/splitMessage.d.ts new file mode 100644 index 00000000..9aa7585a --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/splitMessage.d.ts @@ -0,0 +1,11 @@ +/** + * @internal + */ +export interface MessageParts { + headers: DataView; + body: Uint8Array; +} +/** + * @internal + */ +export declare function splitMessage({ byteLength, byteOffset, buffer }: ArrayBufferView): MessageParts; diff --git a/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/EventStreamCodec.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/EventStreamCodec.d.ts new file mode 100644 index 00000000..dd4bd9f8 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/EventStreamCodec.d.ts @@ -0,0 +1,31 @@ +import { AvailableMessage, AvailableMessages, Message, MessageDecoder, MessageEncoder, MessageHeaders } from "@smithy/types"; +import { Decoder, Encoder } from "@smithy/types"; +/** + * A Codec that can convert binary-packed event stream messages into + * JavaScript objects and back again into their binary format. + */ +export declare class EventStreamCodec implements MessageEncoder, MessageDecoder { + private readonly headerMarshaller; + private messageBuffer; + private isEndOfStream; + constructor(toUtf8: Encoder, fromUtf8: Decoder); + feed(message: ArrayBufferView): void; + endOfStream(): void; + getMessage(): AvailableMessage; + getAvailableMessages(): AvailableMessages; + /** + * Convert a structured JavaScript object with tagged headers into a binary + * event stream message. + */ + encode({ headers: rawHeaders, body }: Message): Uint8Array; + /** + * Convert a binary event stream message into a JavaScript object with an + * opaque, binary body and tagged, parsed headers. + */ + decode(message: ArrayBufferView): Message; + /** + * Convert a structured JavaScript object with tagged headers into a binary + * event stream message header. + */ + formatHeaders(rawHeaders: MessageHeaders): Uint8Array; +} diff --git a/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/HeaderMarshaller.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/HeaderMarshaller.d.ts new file mode 100644 index 00000000..5ecf2d1b --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/HeaderMarshaller.d.ts @@ -0,0 +1,12 @@ +import { Decoder, Encoder, MessageHeaders } from "@smithy/types"; +/** + * @internal + */ +export declare class HeaderMarshaller { + private readonly toUtf8; + private readonly fromUtf8; + constructor(toUtf8: Encoder, fromUtf8: Decoder); + format(headers: MessageHeaders): Uint8Array; + private formatHeaderValue; + parse(headers: DataView): MessageHeaders; +} diff --git a/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/Int64.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/Int64.d.ts new file mode 100644 index 00000000..aebf7e46 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/Int64.d.ts @@ -0,0 +1,20 @@ +import { Int64 as IInt64 } from "@smithy/types"; +export interface Int64 extends IInt64 { +} +/** + * A lossless representation of a signed, 64-bit integer. Instances of this + * class may be used in arithmetic expressions as if they were numeric + * primitives, but the binary representation will be preserved unchanged as the + * `bytes` property of the object. The bytes should be encoded as big-endian, + * two's complement integers. + */ +export declare class Int64 { + readonly bytes: Uint8Array; + constructor(bytes: Uint8Array); + static fromNumber(number: number): Int64; + /** + * Called implicitly by infix arithmetic operators. + */ + valueOf(): number; + toString(): string; +} diff --git a/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/Message.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/Message.d.ts new file mode 100644 index 00000000..ef57685a --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/Message.d.ts @@ -0,0 +1,26 @@ +import { Int64 } from "./Int64"; +/** + * An event stream message. The headers and body properties will always be + * defined, with empty headers represented as an object with no keys and an + * empty body represented as a zero-length Uint8Array. + */ +export interface Message { + headers: MessageHeaders; + body: Uint8Array; +} +export type MessageHeaders = Record; +type HeaderValue = { + type: K; + value: V; +}; +export type BooleanHeaderValue = HeaderValue<"boolean", boolean>; +export type ByteHeaderValue = HeaderValue<"byte", number>; +export type ShortHeaderValue = HeaderValue<"short", number>; +export type IntegerHeaderValue = HeaderValue<"integer", number>; +export type LongHeaderValue = HeaderValue<"long", Int64>; +export type BinaryHeaderValue = HeaderValue<"binary", Uint8Array>; +export type StringHeaderValue = HeaderValue<"string", string>; +export type TimestampHeaderValue = HeaderValue<"timestamp", Date>; +export type UuidHeaderValue = HeaderValue<"uuid", string>; +export type MessageHeaderValue = BooleanHeaderValue | ByteHeaderValue | ShortHeaderValue | IntegerHeaderValue | LongHeaderValue | BinaryHeaderValue | StringHeaderValue | TimestampHeaderValue | UuidHeaderValue; +export {}; diff --git a/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/MessageDecoderStream.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/MessageDecoderStream.d.ts new file mode 100644 index 00000000..df23a0ea --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/MessageDecoderStream.d.ts @@ -0,0 +1,17 @@ +import { Message, MessageDecoder } from "@smithy/types"; +/** + * @internal + */ +export interface MessageDecoderStreamOptions { + inputStream: AsyncIterable; + decoder: MessageDecoder; +} +/** + * @internal + */ +export declare class MessageDecoderStream implements AsyncIterable { + private readonly options; + constructor(options: MessageDecoderStreamOptions); + [Symbol.asyncIterator](): AsyncIterator; + private asyncIterator; +} diff --git a/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/MessageEncoderStream.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/MessageEncoderStream.d.ts new file mode 100644 index 00000000..a88e5470 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/MessageEncoderStream.d.ts @@ -0,0 +1,18 @@ +import { Message, MessageEncoder } from "@smithy/types"; +/** + * @internal + */ +export interface MessageEncoderStreamOptions { + messageStream: AsyncIterable; + encoder: MessageEncoder; + includeEndFrame?: Boolean; +} +/** + * @internal + */ +export declare class MessageEncoderStream implements AsyncIterable { + private readonly options; + constructor(options: MessageEncoderStreamOptions); + [Symbol.asyncIterator](): AsyncIterator; + private asyncIterator; +} diff --git a/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/SmithyMessageDecoderStream.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/SmithyMessageDecoderStream.d.ts new file mode 100644 index 00000000..e9c13bae --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/SmithyMessageDecoderStream.d.ts @@ -0,0 +1,17 @@ +import { Message } from "@smithy/types"; +/** + * @internal + */ +export interface SmithyMessageDecoderStreamOptions { + readonly messageStream: AsyncIterable; + readonly deserializer: (input: Message) => Promise; +} +/** + * @internal + */ +export declare class SmithyMessageDecoderStream implements AsyncIterable { + private readonly options; + constructor(options: SmithyMessageDecoderStreamOptions); + [Symbol.asyncIterator](): AsyncIterator; + private asyncIterator; +} diff --git a/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/SmithyMessageEncoderStream.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/SmithyMessageEncoderStream.d.ts new file mode 100644 index 00000000..9d67f428 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/SmithyMessageEncoderStream.d.ts @@ -0,0 +1,17 @@ +import { Message } from "@smithy/types"; +/** + * @internal + */ +export interface SmithyMessageEncoderStreamOptions { + inputStream: AsyncIterable; + serializer: (event: T) => Message; +} +/** + * @internal + */ +export declare class SmithyMessageEncoderStream implements AsyncIterable { + private readonly options; + constructor(options: SmithyMessageEncoderStreamOptions); + [Symbol.asyncIterator](): AsyncIterator; + private asyncIterator; +} diff --git a/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/TestVectors.fixture.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/TestVectors.fixture.d.ts new file mode 100644 index 00000000..9ed09f28 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/TestVectors.fixture.d.ts @@ -0,0 +1,2 @@ +import { TestVectors } from "./vectorTypes.fixture"; +export declare const vectors: TestVectors; diff --git a/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..01e67304 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +export * from "./EventStreamCodec"; +export * from "./HeaderMarshaller"; +export * from "./Int64"; +export * from "./Message"; +export * from "./MessageDecoderStream"; +export * from "./MessageEncoderStream"; +export * from "./SmithyMessageDecoderStream"; +export * from "./SmithyMessageEncoderStream"; diff --git a/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/splitMessage.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/splitMessage.d.ts new file mode 100644 index 00000000..48776ece --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/splitMessage.d.ts @@ -0,0 +1,11 @@ +/** + * @internal + */ +export interface MessageParts { + headers: DataView; + body: Uint8Array; +} +/** + * @internal + */ +export declare function splitMessage({ byteLength, byteOffset, buffer }: ArrayBufferView): MessageParts; diff --git a/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/vectorTypes.fixture.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/vectorTypes.fixture.d.ts new file mode 100644 index 00000000..55691949 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/ts3.4/vectorTypes.fixture.d.ts @@ -0,0 +1,12 @@ +import { Message } from "./Message"; +export interface NegativeTestVector { + expectation: "failure"; + encoded: Uint8Array; +} +export interface PositiveTestVector { + expectation: "success"; + encoded: Uint8Array; + decoded: Message; +} +export type TestVector = NegativeTestVector | PositiveTestVector; +export type TestVectors = Record; diff --git a/node_modules/@smithy/eventstream-codec/dist-types/vectorTypes.fixture.d.ts b/node_modules/@smithy/eventstream-codec/dist-types/vectorTypes.fixture.d.ts new file mode 100644 index 00000000..ba9ca72e --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/dist-types/vectorTypes.fixture.d.ts @@ -0,0 +1,12 @@ +import { Message } from "./Message"; +export interface NegativeTestVector { + expectation: "failure"; + encoded: Uint8Array; +} +export interface PositiveTestVector { + expectation: "success"; + encoded: Uint8Array; + decoded: Message; +} +export type TestVector = NegativeTestVector | PositiveTestVector; +export type TestVectors = Record; diff --git a/node_modules/@smithy/eventstream-codec/package.json b/node_modules/@smithy/eventstream-codec/package.json new file mode 100644 index 00000000..c0cc4394 --- /dev/null +++ b/node_modules/@smithy/eventstream-codec/package.json @@ -0,0 +1,64 @@ +{ + "name": "@smithy/eventstream-codec", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline eventstream-codec", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/crc32": "5.2.0", + "@smithy/types": "^4.2.0", + "@smithy/util-hex-encoding": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@smithy/util-utf8": "^4.0.0", + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/eventstream-codec", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/eventstream-codec" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-serde-browser/LICENSE b/node_modules/@smithy/eventstream-serde-browser/LICENSE new file mode 100644 index 00000000..e907b586 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-browser/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@smithy/eventstream-serde-browser/README.md b/node_modules/@smithy/eventstream-serde-browser/README.md new file mode 100644 index 00000000..86830b1a --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-browser/README.md @@ -0,0 +1,10 @@ +# @smithy/eventstream-serde-browser + +[![NPM version](https://img.shields.io/npm/v/@smithy/eventstream-serde-browser/latest.svg)](https://www.npmjs.com/package/@smithy/eventstream-serde-browser) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/eventstream-serde-browser.svg)](https://www.npmjs.com/package/@smithy/eventstream-serde-browser) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/eventstream-serde-browser/dist-cjs/EventStreamMarshaller.js b/node_modules/@smithy/eventstream-serde-browser/dist-cjs/EventStreamMarshaller.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-browser/dist-cjs/EventStreamMarshaller.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-serde-browser/dist-cjs/index.js b/node_modules/@smithy/eventstream-serde-browser/dist-cjs/index.js new file mode 100644 index 00000000..0a0a99fe --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-browser/dist-cjs/index.js @@ -0,0 +1,104 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + EventStreamMarshaller: () => EventStreamMarshaller, + eventStreamSerdeProvider: () => eventStreamSerdeProvider, + iterableToReadableStream: () => iterableToReadableStream, + readableStreamtoIterable: () => readableStreamtoIterable +}); +module.exports = __toCommonJS(src_exports); + +// src/EventStreamMarshaller.ts +var import_eventstream_serde_universal = require("@smithy/eventstream-serde-universal"); + +// src/utils.ts +var readableStreamtoIterable = /* @__PURE__ */ __name((readableStream) => ({ + [Symbol.asyncIterator]: async function* () { + const reader = readableStream.getReader(); + try { + while (true) { + const { done, value } = await reader.read(); + if (done) + return; + yield value; + } + } finally { + reader.releaseLock(); + } + } +}), "readableStreamtoIterable"); +var iterableToReadableStream = /* @__PURE__ */ __name((asyncIterable) => { + const iterator = asyncIterable[Symbol.asyncIterator](); + return new ReadableStream({ + async pull(controller) { + const { done, value } = await iterator.next(); + if (done) { + return controller.close(); + } + controller.enqueue(value); + } + }); +}, "iterableToReadableStream"); + +// src/EventStreamMarshaller.ts +var EventStreamMarshaller = class { + static { + __name(this, "EventStreamMarshaller"); + } + constructor({ utf8Encoder, utf8Decoder }) { + this.universalMarshaller = new import_eventstream_serde_universal.EventStreamMarshaller({ + utf8Decoder, + utf8Encoder + }); + } + deserialize(body, deserializer) { + const bodyIterable = isReadableStream(body) ? readableStreamtoIterable(body) : body; + return this.universalMarshaller.deserialize(bodyIterable, deserializer); + } + /** + * Generate a stream that serialize events into stream of binary chunks; + * + * Caveat is that streaming request payload doesn't work on browser with native + * xhr or fetch handler currently because they don't support upload streaming. + * reference: + * * https://bugs.chromium.org/p/chromium/issues/detail?id=688906 + * * https://bugzilla.mozilla.org/show_bug.cgi?id=1387483 + * + */ + serialize(input, serializer) { + const serialziedIterable = this.universalMarshaller.serialize(input, serializer); + return typeof ReadableStream === "function" ? iterableToReadableStream(serialziedIterable) : serialziedIterable; + } +}; +var isReadableStream = /* @__PURE__ */ __name((body) => typeof ReadableStream === "function" && body instanceof ReadableStream, "isReadableStream"); + +// src/provider.ts +var eventStreamSerdeProvider = /* @__PURE__ */ __name((options) => new EventStreamMarshaller(options), "eventStreamSerdeProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + EventStreamMarshaller, + eventStreamSerdeProvider, + readableStreamtoIterable, + iterableToReadableStream +}); + diff --git a/node_modules/@smithy/eventstream-serde-browser/dist-cjs/provider.js b/node_modules/@smithy/eventstream-serde-browser/dist-cjs/provider.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-browser/dist-cjs/provider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-serde-browser/dist-cjs/utils.js b/node_modules/@smithy/eventstream-serde-browser/dist-cjs/utils.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-browser/dist-cjs/utils.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-serde-browser/dist-es/EventStreamMarshaller.js b/node_modules/@smithy/eventstream-serde-browser/dist-es/EventStreamMarshaller.js new file mode 100644 index 00000000..2bf65d88 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-browser/dist-es/EventStreamMarshaller.js @@ -0,0 +1,19 @@ +import { EventStreamMarshaller as UniversalEventStreamMarshaller } from "@smithy/eventstream-serde-universal"; +import { iterableToReadableStream, readableStreamtoIterable } from "./utils"; +export class EventStreamMarshaller { + constructor({ utf8Encoder, utf8Decoder }) { + this.universalMarshaller = new UniversalEventStreamMarshaller({ + utf8Decoder, + utf8Encoder, + }); + } + deserialize(body, deserializer) { + const bodyIterable = isReadableStream(body) ? readableStreamtoIterable(body) : body; + return this.universalMarshaller.deserialize(bodyIterable, deserializer); + } + serialize(input, serializer) { + const serialziedIterable = this.universalMarshaller.serialize(input, serializer); + return typeof ReadableStream === "function" ? iterableToReadableStream(serialziedIterable) : serialziedIterable; + } +} +const isReadableStream = (body) => typeof ReadableStream === "function" && body instanceof ReadableStream; diff --git a/node_modules/@smithy/eventstream-serde-browser/dist-es/index.js b/node_modules/@smithy/eventstream-serde-browser/dist-es/index.js new file mode 100644 index 00000000..f05a6fb0 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-browser/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./EventStreamMarshaller"; +export * from "./provider"; +export * from "./utils"; diff --git a/node_modules/@smithy/eventstream-serde-browser/dist-es/provider.js b/node_modules/@smithy/eventstream-serde-browser/dist-es/provider.js new file mode 100644 index 00000000..b71c3f0e --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-browser/dist-es/provider.js @@ -0,0 +1,2 @@ +import { EventStreamMarshaller } from "./EventStreamMarshaller"; +export const eventStreamSerdeProvider = (options) => new EventStreamMarshaller(options); diff --git a/node_modules/@smithy/eventstream-serde-browser/dist-es/utils.js b/node_modules/@smithy/eventstream-serde-browser/dist-es/utils.js new file mode 100644 index 00000000..8f0376f7 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-browser/dist-es/utils.js @@ -0,0 +1,28 @@ +export const readableStreamtoIterable = (readableStream) => ({ + [Symbol.asyncIterator]: async function* () { + const reader = readableStream.getReader(); + try { + while (true) { + const { done, value } = await reader.read(); + if (done) + return; + yield value; + } + } + finally { + reader.releaseLock(); + } + }, +}); +export const iterableToReadableStream = (asyncIterable) => { + const iterator = asyncIterable[Symbol.asyncIterator](); + return new ReadableStream({ + async pull(controller) { + const { done, value } = await iterator.next(); + if (done) { + return controller.close(); + } + controller.enqueue(value); + }, + }); +}; diff --git a/node_modules/@smithy/eventstream-serde-browser/dist-types/EventStreamMarshaller.d.ts b/node_modules/@smithy/eventstream-serde-browser/dist-types/EventStreamMarshaller.d.ts new file mode 100644 index 00000000..c5448876 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-browser/dist-types/EventStreamMarshaller.d.ts @@ -0,0 +1,47 @@ +import { Decoder, Encoder, EventStreamMarshaller as IEventStreamMarshaller, Message } from "@smithy/types"; +/** + * @internal + */ +export interface EventStreamMarshaller extends IEventStreamMarshaller { +} +/** + * @internal + */ +export interface EventStreamMarshallerOptions { + utf8Encoder: Encoder; + utf8Decoder: Decoder; +} +/** + * @internal + * + * Utility class used to serialize and deserialize event streams in + * browsers and ReactNative. + * + * In browsers where ReadableStream API is available: + * * deserialize from ReadableStream to an async iterable of output structure + * * serialize from async iterable of input structure to ReadableStream + * In ReactNative where only async iterable API is available: + * * deserialize from async iterable of binaries to async iterable of output structure + * * serialize from async iterable of input structure to async iterable of binaries + * + * We use ReadableStream API in browsers because of the consistency with other + * streaming operations, where ReadableStream API is used to denote streaming data. + * Whereas in ReactNative, ReadableStream API is not available, we use async iterable + * for streaming data although it has lower throughput. + */ +export declare class EventStreamMarshaller { + private readonly universalMarshaller; + constructor({ utf8Encoder, utf8Decoder }: EventStreamMarshallerOptions); + deserialize(body: ReadableStream | AsyncIterable, deserializer: (input: Record) => Promise): AsyncIterable; + /** + * Generate a stream that serialize events into stream of binary chunks; + * + * Caveat is that streaming request payload doesn't work on browser with native + * xhr or fetch handler currently because they don't support upload streaming. + * reference: + * * https://bugs.chromium.org/p/chromium/issues/detail?id=688906 + * * https://bugzilla.mozilla.org/show_bug.cgi?id=1387483 + * + */ + serialize(input: AsyncIterable, serializer: (event: T) => Message): ReadableStream | AsyncIterable; +} diff --git a/node_modules/@smithy/eventstream-serde-browser/dist-types/index.d.ts b/node_modules/@smithy/eventstream-serde-browser/dist-types/index.d.ts new file mode 100644 index 00000000..2fb476ea --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-browser/dist-types/index.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export * from "./EventStreamMarshaller"; +/** + * @internal + */ +export * from "./provider"; +/** + * @internal + */ +export * from "./utils"; diff --git a/node_modules/@smithy/eventstream-serde-browser/dist-types/provider.d.ts b/node_modules/@smithy/eventstream-serde-browser/dist-types/provider.d.ts new file mode 100644 index 00000000..d5dca09e --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-browser/dist-types/provider.d.ts @@ -0,0 +1,3 @@ +import { EventStreamSerdeProvider } from "@smithy/types"; +/** browser event stream serde utils provider */ +export declare const eventStreamSerdeProvider: EventStreamSerdeProvider; diff --git a/node_modules/@smithy/eventstream-serde-browser/dist-types/ts3.4/EventStreamMarshaller.d.ts b/node_modules/@smithy/eventstream-serde-browser/dist-types/ts3.4/EventStreamMarshaller.d.ts new file mode 100644 index 00000000..ec0481d3 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-browser/dist-types/ts3.4/EventStreamMarshaller.d.ts @@ -0,0 +1,47 @@ +import { Decoder, Encoder, EventStreamMarshaller as IEventStreamMarshaller, Message } from "@smithy/types"; +/** + * @internal + */ +export interface EventStreamMarshaller extends IEventStreamMarshaller { +} +/** + * @internal + */ +export interface EventStreamMarshallerOptions { + utf8Encoder: Encoder; + utf8Decoder: Decoder; +} +/** + * @internal + * + * Utility class used to serialize and deserialize event streams in + * browsers and ReactNative. + * + * In browsers where ReadableStream API is available: + * * deserialize from ReadableStream to an async iterable of output structure + * * serialize from async iterable of input structure to ReadableStream + * In ReactNative where only async iterable API is available: + * * deserialize from async iterable of binaries to async iterable of output structure + * * serialize from async iterable of input structure to async iterable of binaries + * + * We use ReadableStream API in browsers because of the consistency with other + * streaming operations, where ReadableStream API is used to denote streaming data. + * Whereas in ReactNative, ReadableStream API is not available, we use async iterable + * for streaming data although it has lower throughput. + */ +export declare class EventStreamMarshaller { + private readonly universalMarshaller; + constructor({ utf8Encoder, utf8Decoder }: EventStreamMarshallerOptions); + deserialize(body: ReadableStream | AsyncIterable, deserializer: (input: Record) => Promise): AsyncIterable; + /** + * Generate a stream that serialize events into stream of binary chunks; + * + * Caveat is that streaming request payload doesn't work on browser with native + * xhr or fetch handler currently because they don't support upload streaming. + * reference: + * * https://bugs.chromium.org/p/chromium/issues/detail?id=688906 + * * https://bugzilla.mozilla.org/show_bug.cgi?id=1387483 + * + */ + serialize(input: AsyncIterable, serializer: (event: T) => Message): ReadableStream | AsyncIterable; +} diff --git a/node_modules/@smithy/eventstream-serde-browser/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/eventstream-serde-browser/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..8931756e --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-browser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export * from "./EventStreamMarshaller"; +/** + * @internal + */ +export * from "./provider"; +/** + * @internal + */ +export * from "./utils"; diff --git a/node_modules/@smithy/eventstream-serde-browser/dist-types/ts3.4/provider.d.ts b/node_modules/@smithy/eventstream-serde-browser/dist-types/ts3.4/provider.d.ts new file mode 100644 index 00000000..c051e0de --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-browser/dist-types/ts3.4/provider.d.ts @@ -0,0 +1,3 @@ +import { EventStreamSerdeProvider } from "@smithy/types"; +/** browser event stream serde utils provider */ +export declare const eventStreamSerdeProvider: EventStreamSerdeProvider; diff --git a/node_modules/@smithy/eventstream-serde-browser/dist-types/ts3.4/utils.d.ts b/node_modules/@smithy/eventstream-serde-browser/dist-types/ts3.4/utils.d.ts new file mode 100644 index 00000000..3007e1b5 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-browser/dist-types/ts3.4/utils.d.ts @@ -0,0 +1,13 @@ +/** + * @internal + * + * A util function converting ReadableStream into an async iterable. + * Reference: https://jakearchibald.com/2017/async-iterators-and-generators/#making-streams-iterate + */ +export declare const readableStreamtoIterable: (readableStream: ReadableStream) => AsyncIterable; +/** + * @internal + * + * A util function converting async iterable to a ReadableStream. + */ +export declare const iterableToReadableStream: (asyncIterable: AsyncIterable) => ReadableStream; diff --git a/node_modules/@smithy/eventstream-serde-browser/dist-types/utils.d.ts b/node_modules/@smithy/eventstream-serde-browser/dist-types/utils.d.ts new file mode 100644 index 00000000..2718fca3 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-browser/dist-types/utils.d.ts @@ -0,0 +1,13 @@ +/** + * @internal + * + * A util function converting ReadableStream into an async iterable. + * Reference: https://jakearchibald.com/2017/async-iterators-and-generators/#making-streams-iterate + */ +export declare const readableStreamtoIterable: (readableStream: ReadableStream) => AsyncIterable; +/** + * @internal + * + * A util function converting async iterable to a ReadableStream. + */ +export declare const iterableToReadableStream: (asyncIterable: AsyncIterable) => ReadableStream; diff --git a/node_modules/@smithy/eventstream-serde-browser/package.json b/node_modules/@smithy/eventstream-serde-browser/package.json new file mode 100644 index 00000000..3858fa80 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-browser/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/eventstream-serde-browser", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline eventstream-serde-browser", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "exit 0" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/eventstream-serde-universal": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/eventstream-serde-browser", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/eventstream-serde-browser" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-serde-config-resolver/LICENSE b/node_modules/@smithy/eventstream-serde-config-resolver/LICENSE new file mode 100644 index 00000000..e907b586 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-config-resolver/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@smithy/eventstream-serde-config-resolver/README.md b/node_modules/@smithy/eventstream-serde-config-resolver/README.md new file mode 100644 index 00000000..6539fe1b --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-config-resolver/README.md @@ -0,0 +1,10 @@ +# @smithy/eventstream-serde-config-resolver + +[![NPM version](https://img.shields.io/npm/v/@smithy/eventstream-serde-config-resolver/latest.svg)](https://www.npmjs.com/package/@smithy/eventstream-serde-config-resolver) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/eventstream-serde-config-resolver.svg)](https://www.npmjs.com/package/@smithy/eventstream-serde-config-resolver) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/eventstream-serde-config-resolver/dist-cjs/EventStreamSerdeConfig.js b/node_modules/@smithy/eventstream-serde-config-resolver/dist-cjs/EventStreamSerdeConfig.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-config-resolver/dist-cjs/EventStreamSerdeConfig.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-serde-config-resolver/dist-cjs/index.js b/node_modules/@smithy/eventstream-serde-config-resolver/dist-cjs/index.js new file mode 100644 index 00000000..acf64f6f --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-config-resolver/dist-cjs/index.js @@ -0,0 +1,36 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + resolveEventStreamSerdeConfig: () => resolveEventStreamSerdeConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/EventStreamSerdeConfig.ts +var resolveEventStreamSerdeConfig = /* @__PURE__ */ __name((input) => Object.assign(input, { + eventStreamMarshaller: input.eventStreamSerdeProvider(input) +}), "resolveEventStreamSerdeConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + resolveEventStreamSerdeConfig +}); + diff --git a/node_modules/@smithy/eventstream-serde-config-resolver/dist-es/EventStreamSerdeConfig.js b/node_modules/@smithy/eventstream-serde-config-resolver/dist-es/EventStreamSerdeConfig.js new file mode 100644 index 00000000..8acd419c --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-config-resolver/dist-es/EventStreamSerdeConfig.js @@ -0,0 +1,3 @@ +export const resolveEventStreamSerdeConfig = (input) => Object.assign(input, { + eventStreamMarshaller: input.eventStreamSerdeProvider(input), +}); diff --git a/node_modules/@smithy/eventstream-serde-config-resolver/dist-es/index.js b/node_modules/@smithy/eventstream-serde-config-resolver/dist-es/index.js new file mode 100644 index 00000000..515a9c61 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-config-resolver/dist-es/index.js @@ -0,0 +1 @@ +export * from "./EventStreamSerdeConfig"; diff --git a/node_modules/@smithy/eventstream-serde-config-resolver/dist-types/EventStreamSerdeConfig.d.ts b/node_modules/@smithy/eventstream-serde-config-resolver/dist-types/EventStreamSerdeConfig.d.ts new file mode 100644 index 00000000..61db6c21 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-config-resolver/dist-types/EventStreamSerdeConfig.d.ts @@ -0,0 +1,27 @@ +import { EventStreamMarshaller, EventStreamSerdeProvider } from "@smithy/types"; +/** + * @public + */ +export interface EventStreamSerdeInputConfig { +} +/** + * @internal + */ +export interface EventStreamSerdeResolvedConfig { + eventStreamMarshaller: EventStreamMarshaller; +} +/** + * @internal + */ +interface PreviouslyResolved { + /** + * Provide the event stream marshaller for the given runtime + * @internal + */ + eventStreamSerdeProvider: EventStreamSerdeProvider; +} +/** + * @internal + */ +export declare const resolveEventStreamSerdeConfig: (input: T & PreviouslyResolved & EventStreamSerdeInputConfig) => T & EventStreamSerdeResolvedConfig; +export {}; diff --git a/node_modules/@smithy/eventstream-serde-config-resolver/dist-types/index.d.ts b/node_modules/@smithy/eventstream-serde-config-resolver/dist-types/index.d.ts new file mode 100644 index 00000000..49ec397c --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-config-resolver/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./EventStreamSerdeConfig"; diff --git a/node_modules/@smithy/eventstream-serde-config-resolver/dist-types/ts3.4/EventStreamSerdeConfig.d.ts b/node_modules/@smithy/eventstream-serde-config-resolver/dist-types/ts3.4/EventStreamSerdeConfig.d.ts new file mode 100644 index 00000000..fb9057d6 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-config-resolver/dist-types/ts3.4/EventStreamSerdeConfig.d.ts @@ -0,0 +1,27 @@ +import { EventStreamMarshaller, EventStreamSerdeProvider } from "@smithy/types"; +/** + * @public + */ +export interface EventStreamSerdeInputConfig { +} +/** + * @internal + */ +export interface EventStreamSerdeResolvedConfig { + eventStreamMarshaller: EventStreamMarshaller; +} +/** + * @internal + */ +interface PreviouslyResolved { + /** + * Provide the event stream marshaller for the given runtime + * @internal + */ + eventStreamSerdeProvider: EventStreamSerdeProvider; +} +/** + * @internal + */ +export declare const resolveEventStreamSerdeConfig: (input: T & PreviouslyResolved & EventStreamSerdeInputConfig) => T & EventStreamSerdeResolvedConfig; +export {}; diff --git a/node_modules/@smithy/eventstream-serde-config-resolver/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/eventstream-serde-config-resolver/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..6ec9b4e2 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-config-resolver/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./EventStreamSerdeConfig"; diff --git a/node_modules/@smithy/eventstream-serde-config-resolver/package.json b/node_modules/@smithy/eventstream-serde-config-resolver/package.json new file mode 100644 index 00000000..a5e9d1ab --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-config-resolver/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/eventstream-serde-config-resolver", + "version": "4.1.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline eventstream-serde-config-resolver", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "extract:docs": "api-extractor run --local", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/eventstream-serde-config-resolver", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/eventstream-serde-config-resolver" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-serde-node/LICENSE b/node_modules/@smithy/eventstream-serde-node/LICENSE new file mode 100644 index 00000000..e907b586 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-node/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@smithy/eventstream-serde-node/README.md b/node_modules/@smithy/eventstream-serde-node/README.md new file mode 100644 index 00000000..f1f8db95 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-node/README.md @@ -0,0 +1,10 @@ +# @smithy/eventstream-serde-node + +[![NPM version](https://img.shields.io/npm/v/@smithy/eventstream-serde-node/latest.svg)](https://www.npmjs.com/package/@smithy/eventstream-serde-node) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/eventstream-serde-node.svg)](https://www.npmjs.com/package/@smithy/eventstream-serde-node) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/eventstream-serde-node/dist-cjs/EventStreamMarshaller.js b/node_modules/@smithy/eventstream-serde-node/dist-cjs/EventStreamMarshaller.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-node/dist-cjs/EventStreamMarshaller.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-serde-node/dist-cjs/index.js b/node_modules/@smithy/eventstream-serde-node/dist-cjs/index.js new file mode 100644 index 00000000..b51bcd6d --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-node/dist-cjs/index.js @@ -0,0 +1,89 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + EventStreamMarshaller: () => EventStreamMarshaller, + eventStreamSerdeProvider: () => eventStreamSerdeProvider +}); +module.exports = __toCommonJS(src_exports); + +// src/EventStreamMarshaller.ts +var import_eventstream_serde_universal = require("@smithy/eventstream-serde-universal"); +var import_stream = require("stream"); + +// src/utils.ts +async function* readabletoIterable(readStream) { + let streamEnded = false; + let generationEnded = false; + const records = new Array(); + readStream.on("error", (err) => { + if (!streamEnded) { + streamEnded = true; + } + if (err) { + throw err; + } + }); + readStream.on("data", (data) => { + records.push(data); + }); + readStream.on("end", () => { + streamEnded = true; + }); + while (!generationEnded) { + const value = await new Promise((resolve) => setTimeout(() => resolve(records.shift()), 0)); + if (value) { + yield value; + } + generationEnded = streamEnded && records.length === 0; + } +} +__name(readabletoIterable, "readabletoIterable"); + +// src/EventStreamMarshaller.ts +var EventStreamMarshaller = class { + static { + __name(this, "EventStreamMarshaller"); + } + constructor({ utf8Encoder, utf8Decoder }) { + this.universalMarshaller = new import_eventstream_serde_universal.EventStreamMarshaller({ + utf8Decoder, + utf8Encoder + }); + } + deserialize(body, deserializer) { + const bodyIterable = typeof body[Symbol.asyncIterator] === "function" ? body : readabletoIterable(body); + return this.universalMarshaller.deserialize(bodyIterable, deserializer); + } + serialize(input, serializer) { + return import_stream.Readable.from(this.universalMarshaller.serialize(input, serializer)); + } +}; + +// src/provider.ts +var eventStreamSerdeProvider = /* @__PURE__ */ __name((options) => new EventStreamMarshaller(options), "eventStreamSerdeProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + EventStreamMarshaller, + eventStreamSerdeProvider +}); + diff --git a/node_modules/@smithy/eventstream-serde-node/dist-cjs/provider.js b/node_modules/@smithy/eventstream-serde-node/dist-cjs/provider.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-node/dist-cjs/provider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-serde-node/dist-cjs/utils.js b/node_modules/@smithy/eventstream-serde-node/dist-cjs/utils.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-node/dist-cjs/utils.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-serde-node/dist-es/EventStreamMarshaller.js b/node_modules/@smithy/eventstream-serde-node/dist-es/EventStreamMarshaller.js new file mode 100644 index 00000000..ad12ff23 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-node/dist-es/EventStreamMarshaller.js @@ -0,0 +1,18 @@ +import { EventStreamMarshaller as UniversalEventStreamMarshaller } from "@smithy/eventstream-serde-universal"; +import { Readable } from "stream"; +import { readabletoIterable } from "./utils"; +export class EventStreamMarshaller { + constructor({ utf8Encoder, utf8Decoder }) { + this.universalMarshaller = new UniversalEventStreamMarshaller({ + utf8Decoder, + utf8Encoder, + }); + } + deserialize(body, deserializer) { + const bodyIterable = typeof body[Symbol.asyncIterator] === "function" ? body : readabletoIterable(body); + return this.universalMarshaller.deserialize(bodyIterable, deserializer); + } + serialize(input, serializer) { + return Readable.from(this.universalMarshaller.serialize(input, serializer)); + } +} diff --git a/node_modules/@smithy/eventstream-serde-node/dist-es/index.js b/node_modules/@smithy/eventstream-serde-node/dist-es/index.js new file mode 100644 index 00000000..294fec53 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-node/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./EventStreamMarshaller"; +export * from "./provider"; diff --git a/node_modules/@smithy/eventstream-serde-node/dist-es/provider.js b/node_modules/@smithy/eventstream-serde-node/dist-es/provider.js new file mode 100644 index 00000000..b71c3f0e --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-node/dist-es/provider.js @@ -0,0 +1,2 @@ +import { EventStreamMarshaller } from "./EventStreamMarshaller"; +export const eventStreamSerdeProvider = (options) => new EventStreamMarshaller(options); diff --git a/node_modules/@smithy/eventstream-serde-node/dist-es/utils.js b/node_modules/@smithy/eventstream-serde-node/dist-es/utils.js new file mode 100644 index 00000000..a7baf9f6 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-node/dist-es/utils.js @@ -0,0 +1,26 @@ +export async function* readabletoIterable(readStream) { + let streamEnded = false; + let generationEnded = false; + const records = new Array(); + readStream.on("error", (err) => { + if (!streamEnded) { + streamEnded = true; + } + if (err) { + throw err; + } + }); + readStream.on("data", (data) => { + records.push(data); + }); + readStream.on("end", () => { + streamEnded = true; + }); + while (!generationEnded) { + const value = await new Promise((resolve) => setTimeout(() => resolve(records.shift()), 0)); + if (value) { + yield value; + } + generationEnded = streamEnded && records.length === 0; + } +} diff --git a/node_modules/@smithy/eventstream-serde-node/dist-types/EventStreamMarshaller.d.ts b/node_modules/@smithy/eventstream-serde-node/dist-types/EventStreamMarshaller.d.ts new file mode 100644 index 00000000..e3b9929e --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-node/dist-types/EventStreamMarshaller.d.ts @@ -0,0 +1,24 @@ +/// +import { Decoder, Encoder, EventStreamMarshaller as IEventStreamMarshaller, Message } from "@smithy/types"; +import { Readable } from "stream"; +/** + * @internal + */ +export interface EventStreamMarshaller extends IEventStreamMarshaller { +} +/** + * @internal + */ +export interface EventStreamMarshallerOptions { + utf8Encoder: Encoder; + utf8Decoder: Decoder; +} +/** + * @internal + */ +export declare class EventStreamMarshaller { + private readonly universalMarshaller; + constructor({ utf8Encoder, utf8Decoder }: EventStreamMarshallerOptions); + deserialize(body: Readable, deserializer: (input: Record) => Promise): AsyncIterable; + serialize(input: AsyncIterable, serializer: (event: T) => Message): Readable; +} diff --git a/node_modules/@smithy/eventstream-serde-node/dist-types/index.d.ts b/node_modules/@smithy/eventstream-serde-node/dist-types/index.d.ts new file mode 100644 index 00000000..9f8e9f7e --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-node/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./EventStreamMarshaller"; +/** + * @internal + */ +export * from "./provider"; diff --git a/node_modules/@smithy/eventstream-serde-node/dist-types/provider.d.ts b/node_modules/@smithy/eventstream-serde-node/dist-types/provider.d.ts new file mode 100644 index 00000000..538197d1 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-node/dist-types/provider.d.ts @@ -0,0 +1,3 @@ +import { EventStreamSerdeProvider } from "@smithy/types"; +/** NodeJS event stream utils provider */ +export declare const eventStreamSerdeProvider: EventStreamSerdeProvider; diff --git a/node_modules/@smithy/eventstream-serde-node/dist-types/ts3.4/EventStreamMarshaller.d.ts b/node_modules/@smithy/eventstream-serde-node/dist-types/ts3.4/EventStreamMarshaller.d.ts new file mode 100644 index 00000000..a0781aa1 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-node/dist-types/ts3.4/EventStreamMarshaller.d.ts @@ -0,0 +1,24 @@ +/// +import { Decoder, Encoder, EventStreamMarshaller as IEventStreamMarshaller, Message } from "@smithy/types"; +import { Readable } from "stream"; +/** + * @internal + */ +export interface EventStreamMarshaller extends IEventStreamMarshaller { +} +/** + * @internal + */ +export interface EventStreamMarshallerOptions { + utf8Encoder: Encoder; + utf8Decoder: Decoder; +} +/** + * @internal + */ +export declare class EventStreamMarshaller { + private readonly universalMarshaller; + constructor({ utf8Encoder, utf8Decoder }: EventStreamMarshallerOptions); + deserialize(body: Readable, deserializer: (input: Record) => Promise): AsyncIterable; + serialize(input: AsyncIterable, serializer: (event: T) => Message): Readable; +} diff --git a/node_modules/@smithy/eventstream-serde-node/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/eventstream-serde-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..a82a7873 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./EventStreamMarshaller"; +/** + * @internal + */ +export * from "./provider"; diff --git a/node_modules/@smithy/eventstream-serde-node/dist-types/ts3.4/provider.d.ts b/node_modules/@smithy/eventstream-serde-node/dist-types/ts3.4/provider.d.ts new file mode 100644 index 00000000..ef02e9a5 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-node/dist-types/ts3.4/provider.d.ts @@ -0,0 +1,3 @@ +import { EventStreamSerdeProvider } from "@smithy/types"; +/** NodeJS event stream utils provider */ +export declare const eventStreamSerdeProvider: EventStreamSerdeProvider; diff --git a/node_modules/@smithy/eventstream-serde-node/dist-types/ts3.4/utils.d.ts b/node_modules/@smithy/eventstream-serde-node/dist-types/ts3.4/utils.d.ts new file mode 100644 index 00000000..36481c13 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-node/dist-types/ts3.4/utils.d.ts @@ -0,0 +1,13 @@ +/// +import { Readable } from "stream"; +/** + * Convert object stream piped in into an async iterable. This + * daptor should be deprecated when Node stream iterator is stable. + * Caveat: this adaptor won't have backpressure to inwards stream + * + * Reference: https://nodejs.org/docs/latest-v11.x/api/stream.html#stream_readable_symbol_asynciterator + */ +/** + * @internal + */ +export declare function readabletoIterable(readStream: Readable): AsyncIterable; diff --git a/node_modules/@smithy/eventstream-serde-node/dist-types/utils.d.ts b/node_modules/@smithy/eventstream-serde-node/dist-types/utils.d.ts new file mode 100644 index 00000000..271592d3 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-node/dist-types/utils.d.ts @@ -0,0 +1,13 @@ +/// +import { Readable } from "stream"; +/** + * Convert object stream piped in into an async iterable. This + * daptor should be deprecated when Node stream iterator is stable. + * Caveat: this adaptor won't have backpressure to inwards stream + * + * Reference: https://nodejs.org/docs/latest-v11.x/api/stream.html#stream_readable_symbol_asynciterator + */ +/** + * @internal + */ +export declare function readabletoIterable(readStream: Readable): AsyncIterable; diff --git a/node_modules/@smithy/eventstream-serde-node/package.json b/node_modules/@smithy/eventstream-serde-node/package.json new file mode 100644 index 00000000..0a340f61 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-node/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/eventstream-serde-node", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline eventstream-serde-node", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "exit 0" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/eventstream-serde-universal": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/eventstream-serde-node", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/eventstream-serde-node" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-serde-universal/LICENSE b/node_modules/@smithy/eventstream-serde-universal/LICENSE new file mode 100644 index 00000000..e907b586 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@smithy/eventstream-serde-universal/README.md b/node_modules/@smithy/eventstream-serde-universal/README.md new file mode 100644 index 00000000..3c7fee12 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/README.md @@ -0,0 +1,10 @@ +# @smithy/eventstream-serde-universal + +[![NPM version](https://img.shields.io/npm/v/@smithy/eventstream-serde-universal/latest.svg)](https://www.npmjs.com/package/@smithy/eventstream-serde-universal) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/eventstream-serde-universal.svg)](https://www.npmjs.com/package/@smithy/eventstream-serde-universal) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-cjs/EventStreamMarshaller.js b/node_modules/@smithy/eventstream-serde-universal/dist-cjs/EventStreamMarshaller.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-cjs/EventStreamMarshaller.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-cjs/getChunkedStream.js b/node_modules/@smithy/eventstream-serde-universal/dist-cjs/getChunkedStream.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-cjs/getChunkedStream.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-cjs/getUnmarshalledStream.js b/node_modules/@smithy/eventstream-serde-universal/dist-cjs/getUnmarshalledStream.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-cjs/getUnmarshalledStream.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-cjs/index.js b/node_modules/@smithy/eventstream-serde-universal/dist-cjs/index.js new file mode 100644 index 00000000..c3c69289 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-cjs/index.js @@ -0,0 +1,183 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + EventStreamMarshaller: () => EventStreamMarshaller, + eventStreamSerdeProvider: () => eventStreamSerdeProvider +}); +module.exports = __toCommonJS(src_exports); + +// src/EventStreamMarshaller.ts +var import_eventstream_codec = require("@smithy/eventstream-codec"); + +// src/getChunkedStream.ts +function getChunkedStream(source) { + let currentMessageTotalLength = 0; + let currentMessagePendingLength = 0; + let currentMessage = null; + let messageLengthBuffer = null; + const allocateMessage = /* @__PURE__ */ __name((size) => { + if (typeof size !== "number") { + throw new Error("Attempted to allocate an event message where size was not a number: " + size); + } + currentMessageTotalLength = size; + currentMessagePendingLength = 4; + currentMessage = new Uint8Array(size); + const currentMessageView = new DataView(currentMessage.buffer); + currentMessageView.setUint32(0, size, false); + }, "allocateMessage"); + const iterator = /* @__PURE__ */ __name(async function* () { + const sourceIterator = source[Symbol.asyncIterator](); + while (true) { + const { value, done } = await sourceIterator.next(); + if (done) { + if (!currentMessageTotalLength) { + return; + } else if (currentMessageTotalLength === currentMessagePendingLength) { + yield currentMessage; + } else { + throw new Error("Truncated event message received."); + } + return; + } + const chunkLength = value.length; + let currentOffset = 0; + while (currentOffset < chunkLength) { + if (!currentMessage) { + const bytesRemaining = chunkLength - currentOffset; + if (!messageLengthBuffer) { + messageLengthBuffer = new Uint8Array(4); + } + const numBytesForTotal = Math.min( + 4 - currentMessagePendingLength, + // remaining bytes to fill the messageLengthBuffer + bytesRemaining + // bytes left in chunk + ); + messageLengthBuffer.set( + // @ts-ignore error TS2532: Object is possibly 'undefined' for value + value.slice(currentOffset, currentOffset + numBytesForTotal), + currentMessagePendingLength + ); + currentMessagePendingLength += numBytesForTotal; + currentOffset += numBytesForTotal; + if (currentMessagePendingLength < 4) { + break; + } + allocateMessage(new DataView(messageLengthBuffer.buffer).getUint32(0, false)); + messageLengthBuffer = null; + } + const numBytesToWrite = Math.min( + currentMessageTotalLength - currentMessagePendingLength, + // number of bytes left to complete message + chunkLength - currentOffset + // number of bytes left in the original chunk + ); + currentMessage.set( + // @ts-ignore error TS2532: Object is possibly 'undefined' for value + value.slice(currentOffset, currentOffset + numBytesToWrite), + currentMessagePendingLength + ); + currentMessagePendingLength += numBytesToWrite; + currentOffset += numBytesToWrite; + if (currentMessageTotalLength && currentMessageTotalLength === currentMessagePendingLength) { + yield currentMessage; + currentMessage = null; + currentMessageTotalLength = 0; + currentMessagePendingLength = 0; + } + } + } + }, "iterator"); + return { + [Symbol.asyncIterator]: iterator + }; +} +__name(getChunkedStream, "getChunkedStream"); + +// src/getUnmarshalledStream.ts +function getMessageUnmarshaller(deserializer, toUtf8) { + return async function(message) { + const { value: messageType } = message.headers[":message-type"]; + if (messageType === "error") { + const unmodeledError = new Error(message.headers[":error-message"].value || "UnknownError"); + unmodeledError.name = message.headers[":error-code"].value; + throw unmodeledError; + } else if (messageType === "exception") { + const code = message.headers[":exception-type"].value; + const exception = { [code]: message }; + const deserializedException = await deserializer(exception); + if (deserializedException.$unknown) { + const error = new Error(toUtf8(message.body)); + error.name = code; + throw error; + } + throw deserializedException[code]; + } else if (messageType === "event") { + const event = { + [message.headers[":event-type"].value]: message + }; + const deserialized = await deserializer(event); + if (deserialized.$unknown) + return; + return deserialized; + } else { + throw Error(`Unrecognizable event type: ${message.headers[":event-type"].value}`); + } + }; +} +__name(getMessageUnmarshaller, "getMessageUnmarshaller"); + +// src/EventStreamMarshaller.ts +var EventStreamMarshaller = class { + static { + __name(this, "EventStreamMarshaller"); + } + constructor({ utf8Encoder, utf8Decoder }) { + this.eventStreamCodec = new import_eventstream_codec.EventStreamCodec(utf8Encoder, utf8Decoder); + this.utfEncoder = utf8Encoder; + } + deserialize(body, deserializer) { + const inputStream = getChunkedStream(body); + return new import_eventstream_codec.SmithyMessageDecoderStream({ + messageStream: new import_eventstream_codec.MessageDecoderStream({ inputStream, decoder: this.eventStreamCodec }), + // @ts-expect-error Type 'T' is not assignable to type 'Record' + deserializer: getMessageUnmarshaller(deserializer, this.utfEncoder) + }); + } + serialize(inputStream, serializer) { + return new import_eventstream_codec.MessageEncoderStream({ + messageStream: new import_eventstream_codec.SmithyMessageEncoderStream({ inputStream, serializer }), + encoder: this.eventStreamCodec, + includeEndFrame: true + }); + } +}; + +// src/provider.ts +var eventStreamSerdeProvider = /* @__PURE__ */ __name((options) => new EventStreamMarshaller(options), "eventStreamSerdeProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + EventStreamMarshaller, + eventStreamSerdeProvider +}); + diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-cjs/provider.js b/node_modules/@smithy/eventstream-serde-universal/dist-cjs/provider.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-cjs/provider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-es/EventStreamMarshaller.js b/node_modules/@smithy/eventstream-serde-universal/dist-es/EventStreamMarshaller.js new file mode 100644 index 00000000..ca1d175f --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-es/EventStreamMarshaller.js @@ -0,0 +1,23 @@ +import { EventStreamCodec, MessageDecoderStream, MessageEncoderStream, SmithyMessageDecoderStream, SmithyMessageEncoderStream, } from "@smithy/eventstream-codec"; +import { getChunkedStream } from "./getChunkedStream"; +import { getMessageUnmarshaller } from "./getUnmarshalledStream"; +export class EventStreamMarshaller { + constructor({ utf8Encoder, utf8Decoder }) { + this.eventStreamCodec = new EventStreamCodec(utf8Encoder, utf8Decoder); + this.utfEncoder = utf8Encoder; + } + deserialize(body, deserializer) { + const inputStream = getChunkedStream(body); + return new SmithyMessageDecoderStream({ + messageStream: new MessageDecoderStream({ inputStream, decoder: this.eventStreamCodec }), + deserializer: getMessageUnmarshaller(deserializer, this.utfEncoder), + }); + } + serialize(inputStream, serializer) { + return new MessageEncoderStream({ + messageStream: new SmithyMessageEncoderStream({ inputStream, serializer }), + encoder: this.eventStreamCodec, + includeEndFrame: true, + }); + } +} diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-es/getChunkedStream.js b/node_modules/@smithy/eventstream-serde-universal/dist-es/getChunkedStream.js new file mode 100644 index 00000000..b738b459 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-es/getChunkedStream.js @@ -0,0 +1,66 @@ +export function getChunkedStream(source) { + let currentMessageTotalLength = 0; + let currentMessagePendingLength = 0; + let currentMessage = null; + let messageLengthBuffer = null; + const allocateMessage = (size) => { + if (typeof size !== "number") { + throw new Error("Attempted to allocate an event message where size was not a number: " + size); + } + currentMessageTotalLength = size; + currentMessagePendingLength = 4; + currentMessage = new Uint8Array(size); + const currentMessageView = new DataView(currentMessage.buffer); + currentMessageView.setUint32(0, size, false); + }; + const iterator = async function* () { + const sourceIterator = source[Symbol.asyncIterator](); + while (true) { + const { value, done } = await sourceIterator.next(); + if (done) { + if (!currentMessageTotalLength) { + return; + } + else if (currentMessageTotalLength === currentMessagePendingLength) { + yield currentMessage; + } + else { + throw new Error("Truncated event message received."); + } + return; + } + const chunkLength = value.length; + let currentOffset = 0; + while (currentOffset < chunkLength) { + if (!currentMessage) { + const bytesRemaining = chunkLength - currentOffset; + if (!messageLengthBuffer) { + messageLengthBuffer = new Uint8Array(4); + } + const numBytesForTotal = Math.min(4 - currentMessagePendingLength, bytesRemaining); + messageLengthBuffer.set(value.slice(currentOffset, currentOffset + numBytesForTotal), currentMessagePendingLength); + currentMessagePendingLength += numBytesForTotal; + currentOffset += numBytesForTotal; + if (currentMessagePendingLength < 4) { + break; + } + allocateMessage(new DataView(messageLengthBuffer.buffer).getUint32(0, false)); + messageLengthBuffer = null; + } + const numBytesToWrite = Math.min(currentMessageTotalLength - currentMessagePendingLength, chunkLength - currentOffset); + currentMessage.set(value.slice(currentOffset, currentOffset + numBytesToWrite), currentMessagePendingLength); + currentMessagePendingLength += numBytesToWrite; + currentOffset += numBytesToWrite; + if (currentMessageTotalLength && currentMessageTotalLength === currentMessagePendingLength) { + yield currentMessage; + currentMessage = null; + currentMessageTotalLength = 0; + currentMessagePendingLength = 0; + } + } + } + }; + return { + [Symbol.asyncIterator]: iterator, + }; +} diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-es/getUnmarshalledStream.js b/node_modules/@smithy/eventstream-serde-universal/dist-es/getUnmarshalledStream.js new file mode 100644 index 00000000..119399cd --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-es/getUnmarshalledStream.js @@ -0,0 +1,47 @@ +export function getUnmarshalledStream(source, options) { + const messageUnmarshaller = getMessageUnmarshaller(options.deserializer, options.toUtf8); + return { + [Symbol.asyncIterator]: async function* () { + for await (const chunk of source) { + const message = options.eventStreamCodec.decode(chunk); + const type = await messageUnmarshaller(message); + if (type === undefined) + continue; + yield type; + } + }, + }; +} +export function getMessageUnmarshaller(deserializer, toUtf8) { + return async function (message) { + const { value: messageType } = message.headers[":message-type"]; + if (messageType === "error") { + const unmodeledError = new Error(message.headers[":error-message"].value || "UnknownError"); + unmodeledError.name = message.headers[":error-code"].value; + throw unmodeledError; + } + else if (messageType === "exception") { + const code = message.headers[":exception-type"].value; + const exception = { [code]: message }; + const deserializedException = await deserializer(exception); + if (deserializedException.$unknown) { + const error = new Error(toUtf8(message.body)); + error.name = code; + throw error; + } + throw deserializedException[code]; + } + else if (messageType === "event") { + const event = { + [message.headers[":event-type"].value]: message, + }; + const deserialized = await deserializer(event); + if (deserialized.$unknown) + return; + return deserialized; + } + else { + throw Error(`Unrecognizable event type: ${message.headers[":event-type"].value}`); + } + }; +} diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-es/index.js b/node_modules/@smithy/eventstream-serde-universal/dist-es/index.js new file mode 100644 index 00000000..294fec53 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./EventStreamMarshaller"; +export * from "./provider"; diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-es/provider.js b/node_modules/@smithy/eventstream-serde-universal/dist-es/provider.js new file mode 100644 index 00000000..b71c3f0e --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-es/provider.js @@ -0,0 +1,2 @@ +import { EventStreamMarshaller } from "./EventStreamMarshaller"; +export const eventStreamSerdeProvider = (options) => new EventStreamMarshaller(options); diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-types/EventStreamMarshaller.d.ts b/node_modules/@smithy/eventstream-serde-universal/dist-types/EventStreamMarshaller.d.ts new file mode 100644 index 00000000..49941161 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-types/EventStreamMarshaller.d.ts @@ -0,0 +1,23 @@ +import { Decoder, Encoder, EventStreamMarshaller as IEventStreamMarshaller, Message } from "@smithy/types"; +/** + * @internal + */ +export interface EventStreamMarshaller extends IEventStreamMarshaller { +} +/** + * @internal + */ +export interface EventStreamMarshallerOptions { + utf8Encoder: Encoder; + utf8Decoder: Decoder; +} +/** + * @internal + */ +export declare class EventStreamMarshaller { + private readonly eventStreamCodec; + private readonly utfEncoder; + constructor({ utf8Encoder, utf8Decoder }: EventStreamMarshallerOptions); + deserialize(body: AsyncIterable, deserializer: (input: Record) => Promise): AsyncIterable; + serialize(inputStream: AsyncIterable, serializer: (event: T) => Message): AsyncIterable; +} diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-types/getChunkedStream.d.ts b/node_modules/@smithy/eventstream-serde-universal/dist-types/getChunkedStream.d.ts new file mode 100644 index 00000000..d2a58c0b --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-types/getChunkedStream.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare function getChunkedStream(source: AsyncIterable): AsyncIterable; diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-types/getUnmarshalledStream.d.ts b/node_modules/@smithy/eventstream-serde-universal/dist-types/getUnmarshalledStream.d.ts new file mode 100644 index 00000000..8cb0a664 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-types/getUnmarshalledStream.d.ts @@ -0,0 +1,18 @@ +import { EventStreamCodec } from "@smithy/eventstream-codec"; +import { Encoder, Message } from "@smithy/types"; +/** + * @internal + */ +export type UnmarshalledStreamOptions = { + eventStreamCodec: EventStreamCodec; + deserializer: (input: Record) => Promise; + toUtf8: Encoder; +}; +/** + * @internal + */ +export declare function getUnmarshalledStream>(source: AsyncIterable, options: UnmarshalledStreamOptions): AsyncIterable; +/** + * @internal + */ +export declare function getMessageUnmarshaller>(deserializer: (input: Record) => Promise, toUtf8: Encoder): (input: Message) => Promise; diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-types/index.d.ts b/node_modules/@smithy/eventstream-serde-universal/dist-types/index.d.ts new file mode 100644 index 00000000..9f8e9f7e --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./EventStreamMarshaller"; +/** + * @internal + */ +export * from "./provider"; diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-types/provider.d.ts b/node_modules/@smithy/eventstream-serde-universal/dist-types/provider.d.ts new file mode 100644 index 00000000..538197d1 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-types/provider.d.ts @@ -0,0 +1,3 @@ +import { EventStreamSerdeProvider } from "@smithy/types"; +/** NodeJS event stream utils provider */ +export declare const eventStreamSerdeProvider: EventStreamSerdeProvider; diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-types/ts3.4/EventStreamMarshaller.d.ts b/node_modules/@smithy/eventstream-serde-universal/dist-types/ts3.4/EventStreamMarshaller.d.ts new file mode 100644 index 00000000..6fa2c5af --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-types/ts3.4/EventStreamMarshaller.d.ts @@ -0,0 +1,23 @@ +import { Decoder, Encoder, EventStreamMarshaller as IEventStreamMarshaller, Message } from "@smithy/types"; +/** + * @internal + */ +export interface EventStreamMarshaller extends IEventStreamMarshaller { +} +/** + * @internal + */ +export interface EventStreamMarshallerOptions { + utf8Encoder: Encoder; + utf8Decoder: Decoder; +} +/** + * @internal + */ +export declare class EventStreamMarshaller { + private readonly eventStreamCodec; + private readonly utfEncoder; + constructor({ utf8Encoder, utf8Decoder }: EventStreamMarshallerOptions); + deserialize(body: AsyncIterable, deserializer: (input: Record) => Promise): AsyncIterable; + serialize(inputStream: AsyncIterable, serializer: (event: T) => Message): AsyncIterable; +} diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-types/ts3.4/getChunkedStream.d.ts b/node_modules/@smithy/eventstream-serde-universal/dist-types/ts3.4/getChunkedStream.d.ts new file mode 100644 index 00000000..9a7e9823 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-types/ts3.4/getChunkedStream.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare function getChunkedStream(source: AsyncIterable): AsyncIterable; diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-types/ts3.4/getUnmarshalledStream.d.ts b/node_modules/@smithy/eventstream-serde-universal/dist-types/ts3.4/getUnmarshalledStream.d.ts new file mode 100644 index 00000000..aeeafd88 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-types/ts3.4/getUnmarshalledStream.d.ts @@ -0,0 +1,18 @@ +import { EventStreamCodec } from "@smithy/eventstream-codec"; +import { Encoder, Message } from "@smithy/types"; +/** + * @internal + */ +export type UnmarshalledStreamOptions = { + eventStreamCodec: EventStreamCodec; + deserializer: (input: Record) => Promise; + toUtf8: Encoder; +}; +/** + * @internal + */ +export declare function getUnmarshalledStream>(source: AsyncIterable, options: UnmarshalledStreamOptions): AsyncIterable; +/** + * @internal + */ +export declare function getMessageUnmarshaller>(deserializer: (input: Record) => Promise, toUtf8: Encoder): (input: Message) => Promise; diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/eventstream-serde-universal/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..a82a7873 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./EventStreamMarshaller"; +/** + * @internal + */ +export * from "./provider"; diff --git a/node_modules/@smithy/eventstream-serde-universal/dist-types/ts3.4/provider.d.ts b/node_modules/@smithy/eventstream-serde-universal/dist-types/ts3.4/provider.d.ts new file mode 100644 index 00000000..ef02e9a5 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/dist-types/ts3.4/provider.d.ts @@ -0,0 +1,3 @@ +import { EventStreamSerdeProvider } from "@smithy/types"; +/** NodeJS event stream utils provider */ +export declare const eventStreamSerdeProvider: EventStreamSerdeProvider; diff --git a/node_modules/@smithy/eventstream-serde-universal/package.json b/node_modules/@smithy/eventstream-serde-universal/package.json new file mode 100644 index 00000000..a7e77163 --- /dev/null +++ b/node_modules/@smithy/eventstream-serde-universal/package.json @@ -0,0 +1,63 @@ +{ + "name": "@smithy/eventstream-serde-universal", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline eventstream-serde-universal", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/eventstream-codec": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@smithy/util-utf8": "^4.0.0", + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/eventstream-serde-universal", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/eventstream-serde-universal" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/fetch-http-handler/LICENSE b/node_modules/@smithy/fetch-http-handler/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/fetch-http-handler/README.md b/node_modules/@smithy/fetch-http-handler/README.md new file mode 100644 index 00000000..e52e8f13 --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/README.md @@ -0,0 +1,11 @@ +# @smithy/fetch-http-handler + +[![NPM version](https://img.shields.io/npm/v/@smithy/fetch-http-handler/latest.svg)](https://www.npmjs.com/package/@smithy/fetch-http-handler) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/fetch-http-handler.svg)](https://www.npmjs.com/package/@smithy/fetch-http-handler) + +This is the default `requestHandler` used for browser applications. +Since Node.js introduced experimental Web Streams API in v16.5.0 and made it stable in v21.0.0, +you can consider using `fetch-http-handler` in Node.js, although it's not recommended. + +For the Node.js default `requestHandler` implementation, see instead +[`@smithy/node-http-handler`](https://www.npmjs.com/package/@smithy/node-http-handler). diff --git a/node_modules/@smithy/fetch-http-handler/dist-cjs/create-request.js b/node_modules/@smithy/fetch-http-handler/dist-cjs/create-request.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-cjs/create-request.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/fetch-http-handler/dist-cjs/fetch-http-handler.js b/node_modules/@smithy/fetch-http-handler/dist-cjs/fetch-http-handler.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-cjs/fetch-http-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/fetch-http-handler/dist-cjs/index.js b/node_modules/@smithy/fetch-http-handler/dist-cjs/index.js new file mode 100644 index 00000000..9c9c44b9 --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-cjs/index.js @@ -0,0 +1,264 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + FetchHttpHandler: () => FetchHttpHandler, + keepAliveSupport: () => keepAliveSupport, + streamCollector: () => streamCollector +}); +module.exports = __toCommonJS(src_exports); + +// src/fetch-http-handler.ts +var import_protocol_http = require("@smithy/protocol-http"); +var import_querystring_builder = require("@smithy/querystring-builder"); + +// src/create-request.ts +function createRequest(url, requestOptions) { + return new Request(url, requestOptions); +} +__name(createRequest, "createRequest"); + +// src/request-timeout.ts +function requestTimeout(timeoutInMs = 0) { + return new Promise((resolve, reject) => { + if (timeoutInMs) { + setTimeout(() => { + const timeoutError = new Error(`Request did not complete within ${timeoutInMs} ms`); + timeoutError.name = "TimeoutError"; + reject(timeoutError); + }, timeoutInMs); + } + }); +} +__name(requestTimeout, "requestTimeout"); + +// src/fetch-http-handler.ts +var keepAliveSupport = { + supported: void 0 +}; +var FetchHttpHandler = class _FetchHttpHandler { + static { + __name(this, "FetchHttpHandler"); + } + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new _FetchHttpHandler(instanceOrOptions); + } + constructor(options) { + if (typeof options === "function") { + this.configProvider = options().then((opts) => opts || {}); + } else { + this.config = options ?? {}; + this.configProvider = Promise.resolve(this.config); + } + if (keepAliveSupport.supported === void 0) { + keepAliveSupport.supported = Boolean( + typeof Request !== "undefined" && "keepalive" in createRequest("https://[::1]") + ); + } + } + destroy() { + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + const requestTimeoutInMs = this.config.requestTimeout; + const keepAlive = this.config.keepAlive === true; + const credentials = this.config.credentials; + if (abortSignal?.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + return Promise.reject(abortError); + } + let path = request.path; + const queryString = (0, import_querystring_builder.buildQueryString)(request.query || {}); + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + const { port, method } = request; + const url = `${request.protocol}//${auth}${request.hostname}${port ? `:${port}` : ""}${path}`; + const body = method === "GET" || method === "HEAD" ? void 0 : request.body; + const requestOptions = { + body, + headers: new Headers(request.headers), + method, + credentials + }; + if (this.config?.cache) { + requestOptions.cache = this.config.cache; + } + if (body) { + requestOptions.duplex = "half"; + } + if (typeof AbortController !== "undefined") { + requestOptions.signal = abortSignal; + } + if (keepAliveSupport.supported) { + requestOptions.keepalive = keepAlive; + } + if (typeof this.config.requestInit === "function") { + Object.assign(requestOptions, this.config.requestInit(request)); + } + let removeSignalEventListener = /* @__PURE__ */ __name(() => { + }, "removeSignalEventListener"); + const fetchRequest = createRequest(url, requestOptions); + const raceOfPromises = [ + fetch(fetchRequest).then((response) => { + const fetchHeaders = response.headers; + const transformedHeaders = {}; + for (const pair of fetchHeaders.entries()) { + transformedHeaders[pair[0]] = pair[1]; + } + const hasReadableStream = response.body != void 0; + if (!hasReadableStream) { + return response.blob().then((body2) => ({ + response: new import_protocol_http.HttpResponse({ + headers: transformedHeaders, + reason: response.statusText, + statusCode: response.status, + body: body2 + }) + })); + } + return { + response: new import_protocol_http.HttpResponse({ + headers: transformedHeaders, + reason: response.statusText, + statusCode: response.status, + body: response.body + }) + }; + }), + requestTimeout(requestTimeoutInMs) + ]; + if (abortSignal) { + raceOfPromises.push( + new Promise((resolve, reject) => { + const onAbort = /* @__PURE__ */ __name(() => { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }, "onAbort"); + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + removeSignalEventListener = /* @__PURE__ */ __name(() => signal.removeEventListener("abort", onAbort), "removeSignalEventListener"); + } else { + abortSignal.onabort = onAbort; + } + }) + ); + } + return Promise.race(raceOfPromises).finally(removeSignalEventListener); + } + updateHttpClientConfig(key, value) { + this.config = void 0; + this.configProvider = this.configProvider.then((config) => { + config[key] = value; + return config; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } +}; + +// src/stream-collector.ts +var import_util_base64 = require("@smithy/util-base64"); +var streamCollector = /* @__PURE__ */ __name(async (stream) => { + if (typeof Blob === "function" && stream instanceof Blob || stream.constructor?.name === "Blob") { + if (Blob.prototype.arrayBuffer !== void 0) { + return new Uint8Array(await stream.arrayBuffer()); + } + return collectBlob(stream); + } + return collectStream(stream); +}, "streamCollector"); +async function collectBlob(blob) { + const base64 = await readToBase64(blob); + const arrayBuffer = (0, import_util_base64.fromBase64)(base64); + return new Uint8Array(arrayBuffer); +} +__name(collectBlob, "collectBlob"); +async function collectStream(stream) { + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + let length = 0; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + length += value.length; + } + isDone = done; + } + const collected = new Uint8Array(length); + let offset = 0; + for (const chunk of chunks) { + collected.set(chunk, offset); + offset += chunk.length; + } + return collected; +} +__name(collectStream, "collectStream"); +function readToBase64(blob) { + return new Promise((resolve, reject) => { + const reader = new FileReader(); + reader.onloadend = () => { + if (reader.readyState !== 2) { + return reject(new Error("Reader aborted too early")); + } + const result = reader.result ?? ""; + const commaIndex = result.indexOf(","); + const dataOffset = commaIndex > -1 ? commaIndex + 1 : result.length; + resolve(result.substring(dataOffset)); + }; + reader.onabort = () => reject(new Error("Read aborted")); + reader.onerror = () => reject(reader.error); + reader.readAsDataURL(blob); + }); +} +__name(readToBase64, "readToBase64"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + keepAliveSupport, + FetchHttpHandler, + streamCollector +}); + diff --git a/node_modules/@smithy/fetch-http-handler/dist-cjs/request-timeout.js b/node_modules/@smithy/fetch-http-handler/dist-cjs/request-timeout.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-cjs/request-timeout.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/fetch-http-handler/dist-cjs/stream-collector.js b/node_modules/@smithy/fetch-http-handler/dist-cjs/stream-collector.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-cjs/stream-collector.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/fetch-http-handler/dist-es/create-request.js b/node_modules/@smithy/fetch-http-handler/dist-es/create-request.js new file mode 100644 index 00000000..b6f1816e --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-es/create-request.js @@ -0,0 +1,3 @@ +export function createRequest(url, requestOptions) { + return new Request(url, requestOptions); +} diff --git a/node_modules/@smithy/fetch-http-handler/dist-es/fetch-http-handler.js b/node_modules/@smithy/fetch-http-handler/dist-es/fetch-http-handler.js new file mode 100644 index 00000000..dd56e370 --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-es/fetch-http-handler.js @@ -0,0 +1,139 @@ +import { HttpResponse } from "@smithy/protocol-http"; +import { buildQueryString } from "@smithy/querystring-builder"; +import { createRequest } from "./create-request"; +import { requestTimeout } from "./request-timeout"; +export const keepAliveSupport = { + supported: undefined, +}; +export class FetchHttpHandler { + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new FetchHttpHandler(instanceOrOptions); + } + constructor(options) { + if (typeof options === "function") { + this.configProvider = options().then((opts) => opts || {}); + } + else { + this.config = options ?? {}; + this.configProvider = Promise.resolve(this.config); + } + if (keepAliveSupport.supported === undefined) { + keepAliveSupport.supported = Boolean(typeof Request !== "undefined" && "keepalive" in createRequest("https://[::1]")); + } + } + destroy() { + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + const requestTimeoutInMs = this.config.requestTimeout; + const keepAlive = this.config.keepAlive === true; + const credentials = this.config.credentials; + if (abortSignal?.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + return Promise.reject(abortError); + } + let path = request.path; + const queryString = buildQueryString(request.query || {}); + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + const { port, method } = request; + const url = `${request.protocol}//${auth}${request.hostname}${port ? `:${port}` : ""}${path}`; + const body = method === "GET" || method === "HEAD" ? undefined : request.body; + const requestOptions = { + body, + headers: new Headers(request.headers), + method: method, + credentials, + }; + if (this.config?.cache) { + requestOptions.cache = this.config.cache; + } + if (body) { + requestOptions.duplex = "half"; + } + if (typeof AbortController !== "undefined") { + requestOptions.signal = abortSignal; + } + if (keepAliveSupport.supported) { + requestOptions.keepalive = keepAlive; + } + if (typeof this.config.requestInit === "function") { + Object.assign(requestOptions, this.config.requestInit(request)); + } + let removeSignalEventListener = () => { }; + const fetchRequest = createRequest(url, requestOptions); + const raceOfPromises = [ + fetch(fetchRequest).then((response) => { + const fetchHeaders = response.headers; + const transformedHeaders = {}; + for (const pair of fetchHeaders.entries()) { + transformedHeaders[pair[0]] = pair[1]; + } + const hasReadableStream = response.body != undefined; + if (!hasReadableStream) { + return response.blob().then((body) => ({ + response: new HttpResponse({ + headers: transformedHeaders, + reason: response.statusText, + statusCode: response.status, + body, + }), + })); + } + return { + response: new HttpResponse({ + headers: transformedHeaders, + reason: response.statusText, + statusCode: response.status, + body: response.body, + }), + }; + }), + requestTimeout(requestTimeoutInMs), + ]; + if (abortSignal) { + raceOfPromises.push(new Promise((resolve, reject) => { + const onAbort = () => { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }; + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + removeSignalEventListener = () => signal.removeEventListener("abort", onAbort); + } + else { + abortSignal.onabort = onAbort; + } + })); + } + return Promise.race(raceOfPromises).finally(removeSignalEventListener); + } + updateHttpClientConfig(key, value) { + this.config = undefined; + this.configProvider = this.configProvider.then((config) => { + config[key] = value; + return config; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } +} diff --git a/node_modules/@smithy/fetch-http-handler/dist-es/index.js b/node_modules/@smithy/fetch-http-handler/dist-es/index.js new file mode 100644 index 00000000..a0c61f1b --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./fetch-http-handler"; +export * from "./stream-collector"; diff --git a/node_modules/@smithy/fetch-http-handler/dist-es/request-timeout.js b/node_modules/@smithy/fetch-http-handler/dist-es/request-timeout.js new file mode 100644 index 00000000..66b09b26 --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-es/request-timeout.js @@ -0,0 +1,11 @@ +export function requestTimeout(timeoutInMs = 0) { + return new Promise((resolve, reject) => { + if (timeoutInMs) { + setTimeout(() => { + const timeoutError = new Error(`Request did not complete within ${timeoutInMs} ms`); + timeoutError.name = "TimeoutError"; + reject(timeoutError); + }, timeoutInMs); + } + }); +} diff --git a/node_modules/@smithy/fetch-http-handler/dist-es/stream-collector.js b/node_modules/@smithy/fetch-http-handler/dist-es/stream-collector.js new file mode 100644 index 00000000..a400d9b1 --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-es/stream-collector.js @@ -0,0 +1,53 @@ +import { fromBase64 } from "@smithy/util-base64"; +export const streamCollector = async (stream) => { + if ((typeof Blob === "function" && stream instanceof Blob) || stream.constructor?.name === "Blob") { + if (Blob.prototype.arrayBuffer !== undefined) { + return new Uint8Array(await stream.arrayBuffer()); + } + return collectBlob(stream); + } + return collectStream(stream); +}; +async function collectBlob(blob) { + const base64 = await readToBase64(blob); + const arrayBuffer = fromBase64(base64); + return new Uint8Array(arrayBuffer); +} +async function collectStream(stream) { + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + let length = 0; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + length += value.length; + } + isDone = done; + } + const collected = new Uint8Array(length); + let offset = 0; + for (const chunk of chunks) { + collected.set(chunk, offset); + offset += chunk.length; + } + return collected; +} +function readToBase64(blob) { + return new Promise((resolve, reject) => { + const reader = new FileReader(); + reader.onloadend = () => { + if (reader.readyState !== 2) { + return reject(new Error("Reader aborted too early")); + } + const result = (reader.result ?? ""); + const commaIndex = result.indexOf(","); + const dataOffset = commaIndex > -1 ? commaIndex + 1 : result.length; + resolve(result.substring(dataOffset)); + }; + reader.onabort = () => reject(new Error("Read aborted")); + reader.onerror = () => reject(reader.error); + reader.readAsDataURL(blob); + }); +} diff --git a/node_modules/@smithy/fetch-http-handler/dist-types/create-request.d.ts b/node_modules/@smithy/fetch-http-handler/dist-types/create-request.d.ts new file mode 100644 index 00000000..d668b065 --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-types/create-request.d.ts @@ -0,0 +1,6 @@ +import { AdditionalRequestParameters } from "./fetch-http-handler"; +/** + * @internal + * For mocking/interception. + */ +export declare function createRequest(url: string, requestOptions?: RequestInit & AdditionalRequestParameters): Request; diff --git a/node_modules/@smithy/fetch-http-handler/dist-types/fetch-http-handler.d.ts b/node_modules/@smithy/fetch-http-handler/dist-types/fetch-http-handler.d.ts new file mode 100644 index 00000000..446301cb --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-types/fetch-http-handler.d.ts @@ -0,0 +1,41 @@ +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import type { FetchHttpHandlerOptions } from "@smithy/types"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +/** + * @public + */ +export { FetchHttpHandlerOptions }; +/** + * @internal + * Detection of keepalive support. Can be overridden for testing. + */ +export declare const keepAliveSupport: { + supported: boolean | undefined; +}; +/** + * @internal + */ +export type AdditionalRequestParameters = { + duplex?: "half"; +}; +/** + * @public + * + * HttpHandler implementation using browsers' `fetch` global function. + */ +export declare class FetchHttpHandler implements HttpHandler { + private config?; + private configProvider; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | FetchHttpHandlerOptions | Provider): FetchHttpHandler | HttpHandler; + constructor(options?: FetchHttpHandlerOptions | Provider); + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof FetchHttpHandlerOptions, value: FetchHttpHandlerOptions[typeof key]): void; + httpHandlerConfigs(): FetchHttpHandlerOptions; +} diff --git a/node_modules/@smithy/fetch-http-handler/dist-types/index.d.ts b/node_modules/@smithy/fetch-http-handler/dist-types/index.d.ts new file mode 100644 index 00000000..a0c61f1b --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fetch-http-handler"; +export * from "./stream-collector"; diff --git a/node_modules/@smithy/fetch-http-handler/dist-types/request-timeout.d.ts b/node_modules/@smithy/fetch-http-handler/dist-types/request-timeout.d.ts new file mode 100644 index 00000000..28d784b2 --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-types/request-timeout.d.ts @@ -0,0 +1 @@ +export declare function requestTimeout(timeoutInMs?: number): Promise; diff --git a/node_modules/@smithy/fetch-http-handler/dist-types/stream-collector.d.ts b/node_modules/@smithy/fetch-http-handler/dist-types/stream-collector.d.ts new file mode 100644 index 00000000..b2ca8122 --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-types/stream-collector.d.ts @@ -0,0 +1,2 @@ +import { StreamCollector } from "@smithy/types"; +export declare const streamCollector: StreamCollector; diff --git a/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/create-request.d.ts b/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/create-request.d.ts new file mode 100644 index 00000000..5f0b074d --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/create-request.d.ts @@ -0,0 +1,6 @@ +import { AdditionalRequestParameters } from "./fetch-http-handler"; +/** + * @internal + * For mocking/interception. + */ +export declare function createRequest(url: string, requestOptions?: RequestInit & AdditionalRequestParameters): Request; diff --git a/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/fetch-http-handler.d.ts b/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/fetch-http-handler.d.ts new file mode 100644 index 00000000..19a29435 --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/fetch-http-handler.d.ts @@ -0,0 +1,41 @@ +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { FetchHttpHandlerOptions } from "@smithy/types"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +/** + * @public + */ +export { FetchHttpHandlerOptions }; +/** + * @internal + * Detection of keepalive support. Can be overridden for testing. + */ +export declare const keepAliveSupport: { + supported: boolean | undefined; +}; +/** + * @internal + */ +export type AdditionalRequestParameters = { + duplex?: "half"; +}; +/** + * @public + * + * HttpHandler implementation using browsers' `fetch` global function. + */ +export declare class FetchHttpHandler implements HttpHandler { + private config?; + private configProvider; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | FetchHttpHandlerOptions | Provider): FetchHttpHandler | HttpHandler; + constructor(options?: FetchHttpHandlerOptions | Provider); + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof FetchHttpHandlerOptions, value: FetchHttpHandlerOptions[typeof key]): void; + httpHandlerConfigs(): FetchHttpHandlerOptions; +} diff --git a/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..d30edab2 --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fetch-http-handler"; +export * from "./stream-collector"; diff --git a/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/request-timeout.d.ts b/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/request-timeout.d.ts new file mode 100644 index 00000000..ca24128d --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/request-timeout.d.ts @@ -0,0 +1 @@ +export declare function requestTimeout(timeoutInMs?: number): Promise; diff --git a/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/stream-collector.d.ts b/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/stream-collector.d.ts new file mode 100644 index 00000000..82590970 --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/stream-collector.d.ts @@ -0,0 +1,2 @@ +import { StreamCollector } from "@smithy/types"; +export declare const streamCollector: StreamCollector; diff --git a/node_modules/@smithy/fetch-http-handler/package.json b/node_modules/@smithy/fetch-http-handler/package.json new file mode 100644 index 00000000..8ebcaa13 --- /dev/null +++ b/node_modules/@smithy/fetch-http-handler/package.json @@ -0,0 +1,68 @@ +{ + "name": "@smithy/fetch-http-handler", + "version": "5.0.2", + "description": "Provides a way to make requests", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline fetch-http-handler", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run && yarn test:browser", + "test:watch": "yarn g:vitest watch", + "test:browser": "yarn g:vitest run -c vitest.config.browser.ts", + "test:browser:watch": "yarn g:vitest watch -c vitest.config.browser.ts" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@smithy/abort-controller": "^4.0.2", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/fetch-http-handler", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/fetch-http-handler" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/hash-blob-browser/LICENSE b/node_modules/@smithy/hash-blob-browser/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/hash-blob-browser/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/hash-blob-browser/README.md b/node_modules/@smithy/hash-blob-browser/README.md new file mode 100644 index 00000000..287e8e2d --- /dev/null +++ b/node_modules/@smithy/hash-blob-browser/README.md @@ -0,0 +1,10 @@ +# @smithy/sha256-blob-browser + +[![NPM version](https://img.shields.io/npm/v/@smithy/hash-blob-browser/latest.svg)](https://www.npmjs.com/package/@smithy/hash-blob-browser) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/hash-blob-browser.svg)](https://www.npmjs.com/package/@smithy/hash-blob-browser) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/hash-blob-browser/dist-cjs/index.js b/node_modules/@smithy/hash-blob-browser/dist-cjs/index.js new file mode 100644 index 00000000..c4ab3d6b --- /dev/null +++ b/node_modules/@smithy/hash-blob-browser/dist-cjs/index.js @@ -0,0 +1,39 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + blobHasher: () => blobHasher +}); +module.exports = __toCommonJS(src_exports); +var import_chunked_blob_reader = require("@smithy/chunked-blob-reader"); +var blobHasher = /* @__PURE__ */ __name(async function blobHasher2(hashCtor, blob) { + const hash = new hashCtor(); + await (0, import_chunked_blob_reader.blobReader)(blob, (chunk) => { + hash.update(chunk); + }); + return hash.digest(); +}, "blobHasher"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + blobHasher +}); + diff --git a/node_modules/@smithy/hash-blob-browser/dist-es/index.js b/node_modules/@smithy/hash-blob-browser/dist-es/index.js new file mode 100644 index 00000000..49fa6f76 --- /dev/null +++ b/node_modules/@smithy/hash-blob-browser/dist-es/index.js @@ -0,0 +1,8 @@ +import { blobReader } from "@smithy/chunked-blob-reader"; +export const blobHasher = async function blobHasher(hashCtor, blob) { + const hash = new hashCtor(); + await blobReader(blob, (chunk) => { + hash.update(chunk); + }); + return hash.digest(); +}; diff --git a/node_modules/@smithy/hash-blob-browser/dist-types/index.d.ts b/node_modules/@smithy/hash-blob-browser/dist-types/index.d.ts new file mode 100644 index 00000000..abb935f0 --- /dev/null +++ b/node_modules/@smithy/hash-blob-browser/dist-types/index.d.ts @@ -0,0 +1,5 @@ +import { StreamHasher } from "@smithy/types"; +/** + * @internal + */ +export declare const blobHasher: StreamHasher; diff --git a/node_modules/@smithy/hash-blob-browser/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/hash-blob-browser/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..1c7e63af --- /dev/null +++ b/node_modules/@smithy/hash-blob-browser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +import { StreamHasher } from "@smithy/types"; +/** + * @internal + */ +export declare const blobHasher: StreamHasher; diff --git a/node_modules/@smithy/hash-blob-browser/package.json b/node_modules/@smithy/hash-blob-browser/package.json new file mode 100644 index 00000000..f85507ca --- /dev/null +++ b/node_modules/@smithy/hash-blob-browser/package.json @@ -0,0 +1,69 @@ +{ + "name": "@smithy/hash-blob-browser", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline hash-blob-browser", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/chunked-blob-reader": "^5.0.0", + "@smithy/chunked-blob-reader-native": "^4.0.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@smithy/util-hex-encoding": "^4.0.0", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "react-native": { + "@smithy/chunked-blob-reader": "@smithy/chunked-blob-reader-native" + }, + "browser": { + "@smithy/chunked-blob-reader": "@smithy/chunked-blob-reader" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/hash-blob-browser", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/hash-blob-browser" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/hash-node/LICENSE b/node_modules/@smithy/hash-node/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/hash-node/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/hash-node/README.md b/node_modules/@smithy/hash-node/README.md new file mode 100644 index 00000000..a160019e --- /dev/null +++ b/node_modules/@smithy/hash-node/README.md @@ -0,0 +1,10 @@ +# @smithy/md5-node + +[![NPM version](https://img.shields.io/npm/v/@smithy/hash-node/latest.svg)](https://www.npmjs.com/package/@smithy/hash-node) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/hash-node.svg)](https://www.npmjs.com/package/@smithy/hash-node) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/hash-node/dist-cjs/index.js b/node_modules/@smithy/hash-node/dist-cjs/index.js new file mode 100644 index 00000000..fc7f7dec --- /dev/null +++ b/node_modules/@smithy/hash-node/dist-cjs/index.js @@ -0,0 +1,67 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Hash: () => Hash +}); +module.exports = __toCommonJS(src_exports); +var import_util_buffer_from = require("@smithy/util-buffer-from"); +var import_util_utf8 = require("@smithy/util-utf8"); +var import_buffer = require("buffer"); +var import_crypto = require("crypto"); +var Hash = class { + static { + __name(this, "Hash"); + } + constructor(algorithmIdentifier, secret) { + this.algorithmIdentifier = algorithmIdentifier; + this.secret = secret; + this.reset(); + } + update(toHash, encoding) { + this.hash.update((0, import_util_utf8.toUint8Array)(castSourceData(toHash, encoding))); + } + digest() { + return Promise.resolve(this.hash.digest()); + } + reset() { + this.hash = this.secret ? (0, import_crypto.createHmac)(this.algorithmIdentifier, castSourceData(this.secret)) : (0, import_crypto.createHash)(this.algorithmIdentifier); + } +}; +function castSourceData(toCast, encoding) { + if (import_buffer.Buffer.isBuffer(toCast)) { + return toCast; + } + if (typeof toCast === "string") { + return (0, import_util_buffer_from.fromString)(toCast, encoding); + } + if (ArrayBuffer.isView(toCast)) { + return (0, import_util_buffer_from.fromArrayBuffer)(toCast.buffer, toCast.byteOffset, toCast.byteLength); + } + return (0, import_util_buffer_from.fromArrayBuffer)(toCast); +} +__name(castSourceData, "castSourceData"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + Hash +}); + diff --git a/node_modules/@smithy/hash-node/dist-es/index.js b/node_modules/@smithy/hash-node/dist-es/index.js new file mode 100644 index 00000000..718d9c6a --- /dev/null +++ b/node_modules/@smithy/hash-node/dist-es/index.js @@ -0,0 +1,34 @@ +import { fromArrayBuffer, fromString } from "@smithy/util-buffer-from"; +import { toUint8Array } from "@smithy/util-utf8"; +import { Buffer } from "buffer"; +import { createHash, createHmac } from "crypto"; +export class Hash { + constructor(algorithmIdentifier, secret) { + this.algorithmIdentifier = algorithmIdentifier; + this.secret = secret; + this.reset(); + } + update(toHash, encoding) { + this.hash.update(toUint8Array(castSourceData(toHash, encoding))); + } + digest() { + return Promise.resolve(this.hash.digest()); + } + reset() { + this.hash = this.secret + ? createHmac(this.algorithmIdentifier, castSourceData(this.secret)) + : createHash(this.algorithmIdentifier); + } +} +function castSourceData(toCast, encoding) { + if (Buffer.isBuffer(toCast)) { + return toCast; + } + if (typeof toCast === "string") { + return fromString(toCast, encoding); + } + if (ArrayBuffer.isView(toCast)) { + return fromArrayBuffer(toCast.buffer, toCast.byteOffset, toCast.byteLength); + } + return fromArrayBuffer(toCast); +} diff --git a/node_modules/@smithy/hash-node/dist-types/index.d.ts b/node_modules/@smithy/hash-node/dist-types/index.d.ts new file mode 100644 index 00000000..20ed5ed9 --- /dev/null +++ b/node_modules/@smithy/hash-node/dist-types/index.d.ts @@ -0,0 +1,13 @@ +import { Checksum, SourceData } from "@smithy/types"; +/** + * @internal + */ +export declare class Hash implements Checksum { + private readonly algorithmIdentifier; + private readonly secret?; + private hash; + constructor(algorithmIdentifier: string, secret?: SourceData); + update(toHash: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + digest(): Promise; + reset(): void; +} diff --git a/node_modules/@smithy/hash-node/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/hash-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..313ab7e7 --- /dev/null +++ b/node_modules/@smithy/hash-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1,13 @@ +import { Checksum, SourceData } from "@smithy/types"; +/** + * @internal + */ +export declare class Hash implements Checksum { + private readonly algorithmIdentifier; + private readonly secret?; + private hash; + constructor(algorithmIdentifier: string, secret?: SourceData); + update(toHash: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + digest(): Promise; + reset(): void; +} diff --git a/node_modules/@smithy/hash-node/package.json b/node_modules/@smithy/hash-node/package.json new file mode 100644 index 00000000..527b45a5 --- /dev/null +++ b/node_modules/@smithy/hash-node/package.json @@ -0,0 +1,64 @@ +{ + "name": "@smithy/hash-node", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline hash-node", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "hash-test-vectors": "^1.3.2", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/hash-node", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/hash-node" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/hash-stream-node/LICENSE b/node_modules/@smithy/hash-stream-node/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/hash-stream-node/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/hash-stream-node/README.md b/node_modules/@smithy/hash-stream-node/README.md new file mode 100644 index 00000000..a1d8065d --- /dev/null +++ b/node_modules/@smithy/hash-stream-node/README.md @@ -0,0 +1,12 @@ +# @smithy/hash-stream-node + +[![NPM version](https://img.shields.io/npm/v/@smithy/hash-stream-node/latest.svg)](https://www.npmjs.com/package/@smithy/hash-stream-node) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/hash-stream-node.svg)](https://www.npmjs.com/package/@smithy/hash-stream-node) + +A utility for calculating the hash of Node.JS readable streams. + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/hash-stream-node/dist-cjs/HashCalculator.js b/node_modules/@smithy/hash-stream-node/dist-cjs/HashCalculator.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/hash-stream-node/dist-cjs/HashCalculator.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/hash-stream-node/dist-cjs/fileStreamHasher.js b/node_modules/@smithy/hash-stream-node/dist-cjs/fileStreamHasher.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/hash-stream-node/dist-cjs/fileStreamHasher.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/hash-stream-node/dist-cjs/index.js b/node_modules/@smithy/hash-stream-node/dist-cjs/index.js new file mode 100644 index 00000000..6a514f90 --- /dev/null +++ b/node_modules/@smithy/hash-stream-node/dist-cjs/index.js @@ -0,0 +1,101 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fileStreamHasher: () => fileStreamHasher, + readableStreamHasher: () => readableStreamHasher +}); +module.exports = __toCommonJS(src_exports); + +// src/fileStreamHasher.ts +var import_fs = require("fs"); + +// src/HashCalculator.ts +var import_util_utf8 = require("@smithy/util-utf8"); +var import_stream = require("stream"); +var HashCalculator = class extends import_stream.Writable { + constructor(hash, options) { + super(options); + this.hash = hash; + } + static { + __name(this, "HashCalculator"); + } + _write(chunk, encoding, callback) { + try { + this.hash.update((0, import_util_utf8.toUint8Array)(chunk)); + } catch (err) { + return callback(err); + } + callback(); + } +}; + +// src/fileStreamHasher.ts +var fileStreamHasher = /* @__PURE__ */ __name((hashCtor, fileStream) => new Promise((resolve, reject) => { + if (!isReadStream(fileStream)) { + reject(new Error("Unable to calculate hash for non-file streams.")); + return; + } + const fileStreamTee = (0, import_fs.createReadStream)(fileStream.path, { + start: fileStream.start, + end: fileStream.end + }); + const hash = new hashCtor(); + const hashCalculator = new HashCalculator(hash); + fileStreamTee.pipe(hashCalculator); + fileStreamTee.on("error", (err) => { + hashCalculator.end(); + reject(err); + }); + hashCalculator.on("error", reject); + hashCalculator.on("finish", function() { + hash.digest().then(resolve).catch(reject); + }); +}), "fileStreamHasher"); +var isReadStream = /* @__PURE__ */ __name((stream) => typeof stream.path === "string", "isReadStream"); + +// src/readableStreamHasher.ts +var readableStreamHasher = /* @__PURE__ */ __name((hashCtor, readableStream) => { + if (readableStream.readableFlowing !== null) { + throw new Error("Unable to calculate hash for flowing readable stream"); + } + const hash = new hashCtor(); + const hashCalculator = new HashCalculator(hash); + readableStream.pipe(hashCalculator); + return new Promise((resolve, reject) => { + readableStream.on("error", (err) => { + hashCalculator.end(); + reject(err); + }); + hashCalculator.on("error", reject); + hashCalculator.on("finish", () => { + hash.digest().then(resolve).catch(reject); + }); + }); +}, "readableStreamHasher"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fileStreamHasher, + readableStreamHasher +}); + diff --git a/node_modules/@smithy/hash-stream-node/dist-cjs/readableStreamHasher.js b/node_modules/@smithy/hash-stream-node/dist-cjs/readableStreamHasher.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/hash-stream-node/dist-cjs/readableStreamHasher.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/hash-stream-node/dist-es/HashCalculator.js b/node_modules/@smithy/hash-stream-node/dist-es/HashCalculator.js new file mode 100644 index 00000000..eb7d8c2d --- /dev/null +++ b/node_modules/@smithy/hash-stream-node/dist-es/HashCalculator.js @@ -0,0 +1,17 @@ +import { toUint8Array } from "@smithy/util-utf8"; +import { Writable } from "stream"; +export class HashCalculator extends Writable { + constructor(hash, options) { + super(options); + this.hash = hash; + } + _write(chunk, encoding, callback) { + try { + this.hash.update(toUint8Array(chunk)); + } + catch (err) { + return callback(err); + } + callback(); + } +} diff --git a/node_modules/@smithy/hash-stream-node/dist-es/fileStreamHasher.js b/node_modules/@smithy/hash-stream-node/dist-es/fileStreamHasher.js new file mode 100644 index 00000000..a8310033 --- /dev/null +++ b/node_modules/@smithy/hash-stream-node/dist-es/fileStreamHasher.js @@ -0,0 +1,24 @@ +import { createReadStream } from "fs"; +import { HashCalculator } from "./HashCalculator"; +export const fileStreamHasher = (hashCtor, fileStream) => new Promise((resolve, reject) => { + if (!isReadStream(fileStream)) { + reject(new Error("Unable to calculate hash for non-file streams.")); + return; + } + const fileStreamTee = createReadStream(fileStream.path, { + start: fileStream.start, + end: fileStream.end, + }); + const hash = new hashCtor(); + const hashCalculator = new HashCalculator(hash); + fileStreamTee.pipe(hashCalculator); + fileStreamTee.on("error", (err) => { + hashCalculator.end(); + reject(err); + }); + hashCalculator.on("error", reject); + hashCalculator.on("finish", function () { + hash.digest().then(resolve).catch(reject); + }); +}); +const isReadStream = (stream) => typeof stream.path === "string"; diff --git a/node_modules/@smithy/hash-stream-node/dist-es/index.js b/node_modules/@smithy/hash-stream-node/dist-es/index.js new file mode 100644 index 00000000..46097762 --- /dev/null +++ b/node_modules/@smithy/hash-stream-node/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./fileStreamHasher"; +export * from "./readableStreamHasher"; diff --git a/node_modules/@smithy/hash-stream-node/dist-es/readableStreamHasher.js b/node_modules/@smithy/hash-stream-node/dist-es/readableStreamHasher.js new file mode 100644 index 00000000..141129c8 --- /dev/null +++ b/node_modules/@smithy/hash-stream-node/dist-es/readableStreamHasher.js @@ -0,0 +1,19 @@ +import { HashCalculator } from "./HashCalculator"; +export const readableStreamHasher = (hashCtor, readableStream) => { + if (readableStream.readableFlowing !== null) { + throw new Error("Unable to calculate hash for flowing readable stream"); + } + const hash = new hashCtor(); + const hashCalculator = new HashCalculator(hash); + readableStream.pipe(hashCalculator); + return new Promise((resolve, reject) => { + readableStream.on("error", (err) => { + hashCalculator.end(); + reject(err); + }); + hashCalculator.on("error", reject); + hashCalculator.on("finish", () => { + hash.digest().then(resolve).catch(reject); + }); + }); +}; diff --git a/node_modules/@smithy/hash-stream-node/dist-types/HashCalculator.d.ts b/node_modules/@smithy/hash-stream-node/dist-types/HashCalculator.d.ts new file mode 100644 index 00000000..ed012ede --- /dev/null +++ b/node_modules/@smithy/hash-stream-node/dist-types/HashCalculator.d.ts @@ -0,0 +1,13 @@ +/// +/// +/// +import { Checksum, Hash } from "@smithy/types"; +import { Writable, WritableOptions } from "stream"; +/** + * @internal + */ +export declare class HashCalculator extends Writable { + readonly hash: Checksum | Hash; + constructor(hash: Checksum | Hash, options?: WritableOptions); + _write(chunk: Buffer, encoding: string, callback: (err?: Error) => void): void; +} diff --git a/node_modules/@smithy/hash-stream-node/dist-types/fileStreamHasher.d.ts b/node_modules/@smithy/hash-stream-node/dist-types/fileStreamHasher.d.ts new file mode 100644 index 00000000..91dac8e4 --- /dev/null +++ b/node_modules/@smithy/hash-stream-node/dist-types/fileStreamHasher.d.ts @@ -0,0 +1,7 @@ +/// +import { StreamHasher } from "@smithy/types"; +import { Readable } from "stream"; +/** + * @internal + */ +export declare const fileStreamHasher: StreamHasher; diff --git a/node_modules/@smithy/hash-stream-node/dist-types/index.d.ts b/node_modules/@smithy/hash-stream-node/dist-types/index.d.ts new file mode 100644 index 00000000..391fecd5 --- /dev/null +++ b/node_modules/@smithy/hash-stream-node/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./fileStreamHasher"; +/** + * @internal + */ +export * from "./readableStreamHasher"; diff --git a/node_modules/@smithy/hash-stream-node/dist-types/readableStreamHasher.d.ts b/node_modules/@smithy/hash-stream-node/dist-types/readableStreamHasher.d.ts new file mode 100644 index 00000000..b1f48a6f --- /dev/null +++ b/node_modules/@smithy/hash-stream-node/dist-types/readableStreamHasher.d.ts @@ -0,0 +1,7 @@ +/// +import { StreamHasher } from "@smithy/types"; +import { Readable } from "stream"; +/** + * @internal + */ +export declare const readableStreamHasher: StreamHasher; diff --git a/node_modules/@smithy/hash-stream-node/dist-types/ts3.4/HashCalculator.d.ts b/node_modules/@smithy/hash-stream-node/dist-types/ts3.4/HashCalculator.d.ts new file mode 100644 index 00000000..8ceb76c4 --- /dev/null +++ b/node_modules/@smithy/hash-stream-node/dist-types/ts3.4/HashCalculator.d.ts @@ -0,0 +1,11 @@ +/// +import { Checksum, Hash } from "@smithy/types"; +import { Writable, WritableOptions } from "stream"; +/** + * @internal + */ +export declare class HashCalculator extends Writable { + readonly hash: Checksum | Hash; + constructor(hash: Checksum | Hash, options?: WritableOptions); + _write(chunk: Buffer, encoding: string, callback: (err?: Error) => void): void; +} diff --git a/node_modules/@smithy/hash-stream-node/dist-types/ts3.4/fileStreamHasher.d.ts b/node_modules/@smithy/hash-stream-node/dist-types/ts3.4/fileStreamHasher.d.ts new file mode 100644 index 00000000..ae0c6ef8 --- /dev/null +++ b/node_modules/@smithy/hash-stream-node/dist-types/ts3.4/fileStreamHasher.d.ts @@ -0,0 +1,7 @@ +/// +import { StreamHasher } from "@smithy/types"; +import { Readable } from "stream"; +/** + * @internal + */ +export declare const fileStreamHasher: StreamHasher; diff --git a/node_modules/@smithy/hash-stream-node/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/hash-stream-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..9af08450 --- /dev/null +++ b/node_modules/@smithy/hash-stream-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./fileStreamHasher"; +/** + * @internal + */ +export * from "./readableStreamHasher"; diff --git a/node_modules/@smithy/hash-stream-node/dist-types/ts3.4/readableStreamHasher.d.ts b/node_modules/@smithy/hash-stream-node/dist-types/ts3.4/readableStreamHasher.d.ts new file mode 100644 index 00000000..fe42dd1c --- /dev/null +++ b/node_modules/@smithy/hash-stream-node/dist-types/ts3.4/readableStreamHasher.d.ts @@ -0,0 +1,7 @@ +/// +import { StreamHasher } from "@smithy/types"; +import { Readable } from "stream"; +/** + * @internal + */ +export declare const readableStreamHasher: StreamHasher; diff --git a/node_modules/@smithy/hash-stream-node/package.json b/node_modules/@smithy/hash-stream-node/package.json new file mode 100644 index 00000000..29e12e81 --- /dev/null +++ b/node_modules/@smithy/hash-stream-node/package.json @@ -0,0 +1,64 @@ +{ + "name": "@smithy/hash-stream-node", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline hash-stream-node", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@aws-crypto/sha256-js": "5.2.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/hash-stream-node", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/hash-stream-node" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/invalid-dependency/LICENSE b/node_modules/@smithy/invalid-dependency/LICENSE new file mode 100644 index 00000000..e907b586 --- /dev/null +++ b/node_modules/@smithy/invalid-dependency/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@smithy/invalid-dependency/README.md b/node_modules/@smithy/invalid-dependency/README.md new file mode 100644 index 00000000..9110465a --- /dev/null +++ b/node_modules/@smithy/invalid-dependency/README.md @@ -0,0 +1,10 @@ +# @smithy/invalid-dependency + +[![NPM version](https://img.shields.io/npm/v/@smithy/invalid-dependency/latest.svg)](https://www.npmjs.com/package/@smithy/invalid-dependency) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/invalid-dependency.svg)](https://www.npmjs.com/package/@smithy/invalid-dependency) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/invalid-dependency/dist-cjs/index.js b/node_modules/@smithy/invalid-dependency/dist-cjs/index.js new file mode 100644 index 00000000..8eeb1d4c --- /dev/null +++ b/node_modules/@smithy/invalid-dependency/dist-cjs/index.js @@ -0,0 +1,41 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + invalidFunction: () => invalidFunction, + invalidProvider: () => invalidProvider +}); +module.exports = __toCommonJS(src_exports); + +// src/invalidFunction.ts +var invalidFunction = /* @__PURE__ */ __name((message) => () => { + throw new Error(message); +}, "invalidFunction"); + +// src/invalidProvider.ts +var invalidProvider = /* @__PURE__ */ __name((message) => () => Promise.reject(message), "invalidProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + invalidFunction, + invalidProvider +}); + diff --git a/node_modules/@smithy/invalid-dependency/dist-cjs/invalidFunction.js b/node_modules/@smithy/invalid-dependency/dist-cjs/invalidFunction.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/invalid-dependency/dist-cjs/invalidFunction.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/invalid-dependency/dist-cjs/invalidProvider.js b/node_modules/@smithy/invalid-dependency/dist-cjs/invalidProvider.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/invalid-dependency/dist-cjs/invalidProvider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/invalid-dependency/dist-es/index.js b/node_modules/@smithy/invalid-dependency/dist-es/index.js new file mode 100644 index 00000000..fa0f1a60 --- /dev/null +++ b/node_modules/@smithy/invalid-dependency/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./invalidFunction"; +export * from "./invalidProvider"; diff --git a/node_modules/@smithy/invalid-dependency/dist-es/invalidFunction.js b/node_modules/@smithy/invalid-dependency/dist-es/invalidFunction.js new file mode 100644 index 00000000..676f9cb0 --- /dev/null +++ b/node_modules/@smithy/invalid-dependency/dist-es/invalidFunction.js @@ -0,0 +1,3 @@ +export const invalidFunction = (message) => () => { + throw new Error(message); +}; diff --git a/node_modules/@smithy/invalid-dependency/dist-es/invalidProvider.js b/node_modules/@smithy/invalid-dependency/dist-es/invalidProvider.js new file mode 100644 index 00000000..5305a0bc --- /dev/null +++ b/node_modules/@smithy/invalid-dependency/dist-es/invalidProvider.js @@ -0,0 +1 @@ +export const invalidProvider = (message) => () => Promise.reject(message); diff --git a/node_modules/@smithy/invalid-dependency/dist-types/index.d.ts b/node_modules/@smithy/invalid-dependency/dist-types/index.d.ts new file mode 100644 index 00000000..1c99a568 --- /dev/null +++ b/node_modules/@smithy/invalid-dependency/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./invalidFunction"; +/** + * @internal + */ +export * from "./invalidProvider"; diff --git a/node_modules/@smithy/invalid-dependency/dist-types/invalidFunction.d.ts b/node_modules/@smithy/invalid-dependency/dist-types/invalidFunction.d.ts new file mode 100644 index 00000000..2118b326 --- /dev/null +++ b/node_modules/@smithy/invalid-dependency/dist-types/invalidFunction.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const invalidFunction: (message: string) => () => never; diff --git a/node_modules/@smithy/invalid-dependency/dist-types/invalidProvider.d.ts b/node_modules/@smithy/invalid-dependency/dist-types/invalidProvider.d.ts new file mode 100644 index 00000000..3e9c28cc --- /dev/null +++ b/node_modules/@smithy/invalid-dependency/dist-types/invalidProvider.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const invalidProvider: (message: string) => Provider; diff --git a/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..6818f1c0 --- /dev/null +++ b/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./invalidFunction"; +/** + * @internal + */ +export * from "./invalidProvider"; diff --git a/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidFunction.d.ts b/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidFunction.d.ts new file mode 100644 index 00000000..b0e8f32b --- /dev/null +++ b/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidFunction.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const invalidFunction: (message: string) => () => never; diff --git a/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidProvider.d.ts b/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidProvider.d.ts new file mode 100644 index 00000000..765ee5aa --- /dev/null +++ b/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidProvider.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const invalidProvider: (message: string) => Provider; diff --git a/node_modules/@smithy/invalid-dependency/package.json b/node_modules/@smithy/invalid-dependency/package.json new file mode 100644 index 00000000..4782ea42 --- /dev/null +++ b/node_modules/@smithy/invalid-dependency/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/invalid-dependency", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline invalid-dependency", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/invalid-dependency", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/invalid-dependency" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/is-array-buffer/LICENSE b/node_modules/@smithy/is-array-buffer/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/is-array-buffer/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/is-array-buffer/README.md b/node_modules/@smithy/is-array-buffer/README.md new file mode 100644 index 00000000..31853f24 --- /dev/null +++ b/node_modules/@smithy/is-array-buffer/README.md @@ -0,0 +1,10 @@ +# @smithy/is-array-buffer + +[![NPM version](https://img.shields.io/npm/v/@smithy/is-array-buffer/latest.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/is-array-buffer.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/is-array-buffer/dist-cjs/index.js b/node_modules/@smithy/is-array-buffer/dist-cjs/index.js new file mode 100644 index 00000000..5d792e71 --- /dev/null +++ b/node_modules/@smithy/is-array-buffer/dist-cjs/index.js @@ -0,0 +1,32 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isArrayBuffer: () => isArrayBuffer +}); +module.exports = __toCommonJS(src_exports); +var isArrayBuffer = /* @__PURE__ */ __name((arg) => typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer || Object.prototype.toString.call(arg) === "[object ArrayBuffer]", "isArrayBuffer"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + isArrayBuffer +}); + diff --git a/node_modules/@smithy/is-array-buffer/dist-es/index.js b/node_modules/@smithy/is-array-buffer/dist-es/index.js new file mode 100644 index 00000000..8096cca3 --- /dev/null +++ b/node_modules/@smithy/is-array-buffer/dist-es/index.js @@ -0,0 +1,2 @@ +export const isArrayBuffer = (arg) => (typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer) || + Object.prototype.toString.call(arg) === "[object ArrayBuffer]"; diff --git a/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts b/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts new file mode 100644 index 00000000..64f452e7 --- /dev/null +++ b/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..ca8fd6bd --- /dev/null +++ b/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/node_modules/@smithy/is-array-buffer/package.json b/node_modules/@smithy/is-array-buffer/package.json new file mode 100644 index 00000000..93a468c9 --- /dev/null +++ b/node_modules/@smithy/is-array-buffer/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/is-array-buffer", + "version": "4.0.0", + "description": "Provides a function for detecting if an argument is an ArrayBuffer", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline is-array-buffer", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/is-array-buffer", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/is-array-buffer" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/md5-js/LICENSE b/node_modules/@smithy/md5-js/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/md5-js/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/md5-js/README.md b/node_modules/@smithy/md5-js/README.md new file mode 100644 index 00000000..ffc786b1 --- /dev/null +++ b/node_modules/@smithy/md5-js/README.md @@ -0,0 +1,10 @@ +# @smithy/md5-js + +[![NPM version](https://img.shields.io/npm/v/@smithy/md5-js/latest.svg)](https://www.npmjs.com/package/@smithy/md5-js) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/md5-js.svg)](https://www.npmjs.com/package/@smithy/md5-js) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/md5-js/dist-cjs/constants.js b/node_modules/@smithy/md5-js/dist-cjs/constants.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/md5-js/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/md5-js/dist-cjs/index.js b/node_modules/@smithy/md5-js/dist-cjs/index.js new file mode 100644 index 00000000..4612926a --- /dev/null +++ b/node_modules/@smithy/md5-js/dist-cjs/index.js @@ -0,0 +1,209 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Md5: () => Md5 +}); +module.exports = __toCommonJS(src_exports); +var import_util_utf8 = require("@smithy/util-utf8"); + +// src/constants.ts +var BLOCK_SIZE = 64; +var DIGEST_LENGTH = 16; +var INIT = [1732584193, 4023233417, 2562383102, 271733878]; + +// src/index.ts +var Md5 = class { + static { + __name(this, "Md5"); + } + constructor() { + this.reset(); + } + update(sourceData) { + if (isEmptyData(sourceData)) { + return; + } else if (this.finished) { + throw new Error("Attempted to update an already finished hash."); + } + const data = convertToBuffer(sourceData); + let position = 0; + let { byteLength } = data; + this.bytesHashed += byteLength; + while (byteLength > 0) { + this.buffer.setUint8(this.bufferLength++, data[position++]); + byteLength--; + if (this.bufferLength === BLOCK_SIZE) { + this.hashBuffer(); + this.bufferLength = 0; + } + } + } + async digest() { + if (!this.finished) { + const { buffer, bufferLength: undecoratedLength, bytesHashed } = this; + const bitsHashed = bytesHashed * 8; + buffer.setUint8(this.bufferLength++, 128); + if (undecoratedLength % BLOCK_SIZE >= BLOCK_SIZE - 8) { + for (let i = this.bufferLength; i < BLOCK_SIZE; i++) { + buffer.setUint8(i, 0); + } + this.hashBuffer(); + this.bufferLength = 0; + } + for (let i = this.bufferLength; i < BLOCK_SIZE - 8; i++) { + buffer.setUint8(i, 0); + } + buffer.setUint32(BLOCK_SIZE - 8, bitsHashed >>> 0, true); + buffer.setUint32(BLOCK_SIZE - 4, Math.floor(bitsHashed / 4294967296), true); + this.hashBuffer(); + this.finished = true; + } + const out = new DataView(new ArrayBuffer(DIGEST_LENGTH)); + for (let i = 0; i < 4; i++) { + out.setUint32(i * 4, this.state[i], true); + } + return new Uint8Array(out.buffer, out.byteOffset, out.byteLength); + } + hashBuffer() { + const { buffer, state } = this; + let a = state[0], b = state[1], c = state[2], d = state[3]; + a = ff(a, b, c, d, buffer.getUint32(0, true), 7, 3614090360); + d = ff(d, a, b, c, buffer.getUint32(4, true), 12, 3905402710); + c = ff(c, d, a, b, buffer.getUint32(8, true), 17, 606105819); + b = ff(b, c, d, a, buffer.getUint32(12, true), 22, 3250441966); + a = ff(a, b, c, d, buffer.getUint32(16, true), 7, 4118548399); + d = ff(d, a, b, c, buffer.getUint32(20, true), 12, 1200080426); + c = ff(c, d, a, b, buffer.getUint32(24, true), 17, 2821735955); + b = ff(b, c, d, a, buffer.getUint32(28, true), 22, 4249261313); + a = ff(a, b, c, d, buffer.getUint32(32, true), 7, 1770035416); + d = ff(d, a, b, c, buffer.getUint32(36, true), 12, 2336552879); + c = ff(c, d, a, b, buffer.getUint32(40, true), 17, 4294925233); + b = ff(b, c, d, a, buffer.getUint32(44, true), 22, 2304563134); + a = ff(a, b, c, d, buffer.getUint32(48, true), 7, 1804603682); + d = ff(d, a, b, c, buffer.getUint32(52, true), 12, 4254626195); + c = ff(c, d, a, b, buffer.getUint32(56, true), 17, 2792965006); + b = ff(b, c, d, a, buffer.getUint32(60, true), 22, 1236535329); + a = gg(a, b, c, d, buffer.getUint32(4, true), 5, 4129170786); + d = gg(d, a, b, c, buffer.getUint32(24, true), 9, 3225465664); + c = gg(c, d, a, b, buffer.getUint32(44, true), 14, 643717713); + b = gg(b, c, d, a, buffer.getUint32(0, true), 20, 3921069994); + a = gg(a, b, c, d, buffer.getUint32(20, true), 5, 3593408605); + d = gg(d, a, b, c, buffer.getUint32(40, true), 9, 38016083); + c = gg(c, d, a, b, buffer.getUint32(60, true), 14, 3634488961); + b = gg(b, c, d, a, buffer.getUint32(16, true), 20, 3889429448); + a = gg(a, b, c, d, buffer.getUint32(36, true), 5, 568446438); + d = gg(d, a, b, c, buffer.getUint32(56, true), 9, 3275163606); + c = gg(c, d, a, b, buffer.getUint32(12, true), 14, 4107603335); + b = gg(b, c, d, a, buffer.getUint32(32, true), 20, 1163531501); + a = gg(a, b, c, d, buffer.getUint32(52, true), 5, 2850285829); + d = gg(d, a, b, c, buffer.getUint32(8, true), 9, 4243563512); + c = gg(c, d, a, b, buffer.getUint32(28, true), 14, 1735328473); + b = gg(b, c, d, a, buffer.getUint32(48, true), 20, 2368359562); + a = hh(a, b, c, d, buffer.getUint32(20, true), 4, 4294588738); + d = hh(d, a, b, c, buffer.getUint32(32, true), 11, 2272392833); + c = hh(c, d, a, b, buffer.getUint32(44, true), 16, 1839030562); + b = hh(b, c, d, a, buffer.getUint32(56, true), 23, 4259657740); + a = hh(a, b, c, d, buffer.getUint32(4, true), 4, 2763975236); + d = hh(d, a, b, c, buffer.getUint32(16, true), 11, 1272893353); + c = hh(c, d, a, b, buffer.getUint32(28, true), 16, 4139469664); + b = hh(b, c, d, a, buffer.getUint32(40, true), 23, 3200236656); + a = hh(a, b, c, d, buffer.getUint32(52, true), 4, 681279174); + d = hh(d, a, b, c, buffer.getUint32(0, true), 11, 3936430074); + c = hh(c, d, a, b, buffer.getUint32(12, true), 16, 3572445317); + b = hh(b, c, d, a, buffer.getUint32(24, true), 23, 76029189); + a = hh(a, b, c, d, buffer.getUint32(36, true), 4, 3654602809); + d = hh(d, a, b, c, buffer.getUint32(48, true), 11, 3873151461); + c = hh(c, d, a, b, buffer.getUint32(60, true), 16, 530742520); + b = hh(b, c, d, a, buffer.getUint32(8, true), 23, 3299628645); + a = ii(a, b, c, d, buffer.getUint32(0, true), 6, 4096336452); + d = ii(d, a, b, c, buffer.getUint32(28, true), 10, 1126891415); + c = ii(c, d, a, b, buffer.getUint32(56, true), 15, 2878612391); + b = ii(b, c, d, a, buffer.getUint32(20, true), 21, 4237533241); + a = ii(a, b, c, d, buffer.getUint32(48, true), 6, 1700485571); + d = ii(d, a, b, c, buffer.getUint32(12, true), 10, 2399980690); + c = ii(c, d, a, b, buffer.getUint32(40, true), 15, 4293915773); + b = ii(b, c, d, a, buffer.getUint32(4, true), 21, 2240044497); + a = ii(a, b, c, d, buffer.getUint32(32, true), 6, 1873313359); + d = ii(d, a, b, c, buffer.getUint32(60, true), 10, 4264355552); + c = ii(c, d, a, b, buffer.getUint32(24, true), 15, 2734768916); + b = ii(b, c, d, a, buffer.getUint32(52, true), 21, 1309151649); + a = ii(a, b, c, d, buffer.getUint32(16, true), 6, 4149444226); + d = ii(d, a, b, c, buffer.getUint32(44, true), 10, 3174756917); + c = ii(c, d, a, b, buffer.getUint32(8, true), 15, 718787259); + b = ii(b, c, d, a, buffer.getUint32(36, true), 21, 3951481745); + state[0] = a + state[0] & 4294967295; + state[1] = b + state[1] & 4294967295; + state[2] = c + state[2] & 4294967295; + state[3] = d + state[3] & 4294967295; + } + reset() { + this.state = Uint32Array.from(INIT); + this.buffer = new DataView(new ArrayBuffer(BLOCK_SIZE)); + this.bufferLength = 0; + this.bytesHashed = 0; + this.finished = false; + } +}; +function cmn(q, a, b, x, s, t) { + a = (a + q & 4294967295) + (x + t & 4294967295) & 4294967295; + return (a << s | a >>> 32 - s) + b & 4294967295; +} +__name(cmn, "cmn"); +function ff(a, b, c, d, x, s, t) { + return cmn(b & c | ~b & d, a, b, x, s, t); +} +__name(ff, "ff"); +function gg(a, b, c, d, x, s, t) { + return cmn(b & d | c & ~d, a, b, x, s, t); +} +__name(gg, "gg"); +function hh(a, b, c, d, x, s, t) { + return cmn(b ^ c ^ d, a, b, x, s, t); +} +__name(hh, "hh"); +function ii(a, b, c, d, x, s, t) { + return cmn(c ^ (b | ~d), a, b, x, s, t); +} +__name(ii, "ii"); +function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +__name(isEmptyData, "isEmptyData"); +function convertToBuffer(data) { + if (typeof data === "string") { + return (0, import_util_utf8.fromUtf8)(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +} +__name(convertToBuffer, "convertToBuffer"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + Md5 +}); + diff --git a/node_modules/@smithy/md5-js/dist-es/constants.js b/node_modules/@smithy/md5-js/dist-es/constants.js new file mode 100644 index 00000000..263b8ed0 --- /dev/null +++ b/node_modules/@smithy/md5-js/dist-es/constants.js @@ -0,0 +1,3 @@ +export const BLOCK_SIZE = 64; +export const DIGEST_LENGTH = 16; +export const INIT = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476]; diff --git a/node_modules/@smithy/md5-js/dist-es/index.js b/node_modules/@smithy/md5-js/dist-es/index.js new file mode 100644 index 00000000..084316c2 --- /dev/null +++ b/node_modules/@smithy/md5-js/dist-es/index.js @@ -0,0 +1,163 @@ +import { fromUtf8 } from "@smithy/util-utf8"; +import { BLOCK_SIZE, DIGEST_LENGTH, INIT } from "./constants"; +export class Md5 { + constructor() { + this.reset(); + } + update(sourceData) { + if (isEmptyData(sourceData)) { + return; + } + else if (this.finished) { + throw new Error("Attempted to update an already finished hash."); + } + const data = convertToBuffer(sourceData); + let position = 0; + let { byteLength } = data; + this.bytesHashed += byteLength; + while (byteLength > 0) { + this.buffer.setUint8(this.bufferLength++, data[position++]); + byteLength--; + if (this.bufferLength === BLOCK_SIZE) { + this.hashBuffer(); + this.bufferLength = 0; + } + } + } + async digest() { + if (!this.finished) { + const { buffer, bufferLength: undecoratedLength, bytesHashed } = this; + const bitsHashed = bytesHashed * 8; + buffer.setUint8(this.bufferLength++, 0b10000000); + if (undecoratedLength % BLOCK_SIZE >= BLOCK_SIZE - 8) { + for (let i = this.bufferLength; i < BLOCK_SIZE; i++) { + buffer.setUint8(i, 0); + } + this.hashBuffer(); + this.bufferLength = 0; + } + for (let i = this.bufferLength; i < BLOCK_SIZE - 8; i++) { + buffer.setUint8(i, 0); + } + buffer.setUint32(BLOCK_SIZE - 8, bitsHashed >>> 0, true); + buffer.setUint32(BLOCK_SIZE - 4, Math.floor(bitsHashed / 0x100000000), true); + this.hashBuffer(); + this.finished = true; + } + const out = new DataView(new ArrayBuffer(DIGEST_LENGTH)); + for (let i = 0; i < 4; i++) { + out.setUint32(i * 4, this.state[i], true); + } + return new Uint8Array(out.buffer, out.byteOffset, out.byteLength); + } + hashBuffer() { + const { buffer, state } = this; + let a = state[0], b = state[1], c = state[2], d = state[3]; + a = ff(a, b, c, d, buffer.getUint32(0, true), 7, 0xd76aa478); + d = ff(d, a, b, c, buffer.getUint32(4, true), 12, 0xe8c7b756); + c = ff(c, d, a, b, buffer.getUint32(8, true), 17, 0x242070db); + b = ff(b, c, d, a, buffer.getUint32(12, true), 22, 0xc1bdceee); + a = ff(a, b, c, d, buffer.getUint32(16, true), 7, 0xf57c0faf); + d = ff(d, a, b, c, buffer.getUint32(20, true), 12, 0x4787c62a); + c = ff(c, d, a, b, buffer.getUint32(24, true), 17, 0xa8304613); + b = ff(b, c, d, a, buffer.getUint32(28, true), 22, 0xfd469501); + a = ff(a, b, c, d, buffer.getUint32(32, true), 7, 0x698098d8); + d = ff(d, a, b, c, buffer.getUint32(36, true), 12, 0x8b44f7af); + c = ff(c, d, a, b, buffer.getUint32(40, true), 17, 0xffff5bb1); + b = ff(b, c, d, a, buffer.getUint32(44, true), 22, 0x895cd7be); + a = ff(a, b, c, d, buffer.getUint32(48, true), 7, 0x6b901122); + d = ff(d, a, b, c, buffer.getUint32(52, true), 12, 0xfd987193); + c = ff(c, d, a, b, buffer.getUint32(56, true), 17, 0xa679438e); + b = ff(b, c, d, a, buffer.getUint32(60, true), 22, 0x49b40821); + a = gg(a, b, c, d, buffer.getUint32(4, true), 5, 0xf61e2562); + d = gg(d, a, b, c, buffer.getUint32(24, true), 9, 0xc040b340); + c = gg(c, d, a, b, buffer.getUint32(44, true), 14, 0x265e5a51); + b = gg(b, c, d, a, buffer.getUint32(0, true), 20, 0xe9b6c7aa); + a = gg(a, b, c, d, buffer.getUint32(20, true), 5, 0xd62f105d); + d = gg(d, a, b, c, buffer.getUint32(40, true), 9, 0x02441453); + c = gg(c, d, a, b, buffer.getUint32(60, true), 14, 0xd8a1e681); + b = gg(b, c, d, a, buffer.getUint32(16, true), 20, 0xe7d3fbc8); + a = gg(a, b, c, d, buffer.getUint32(36, true), 5, 0x21e1cde6); + d = gg(d, a, b, c, buffer.getUint32(56, true), 9, 0xc33707d6); + c = gg(c, d, a, b, buffer.getUint32(12, true), 14, 0xf4d50d87); + b = gg(b, c, d, a, buffer.getUint32(32, true), 20, 0x455a14ed); + a = gg(a, b, c, d, buffer.getUint32(52, true), 5, 0xa9e3e905); + d = gg(d, a, b, c, buffer.getUint32(8, true), 9, 0xfcefa3f8); + c = gg(c, d, a, b, buffer.getUint32(28, true), 14, 0x676f02d9); + b = gg(b, c, d, a, buffer.getUint32(48, true), 20, 0x8d2a4c8a); + a = hh(a, b, c, d, buffer.getUint32(20, true), 4, 0xfffa3942); + d = hh(d, a, b, c, buffer.getUint32(32, true), 11, 0x8771f681); + c = hh(c, d, a, b, buffer.getUint32(44, true), 16, 0x6d9d6122); + b = hh(b, c, d, a, buffer.getUint32(56, true), 23, 0xfde5380c); + a = hh(a, b, c, d, buffer.getUint32(4, true), 4, 0xa4beea44); + d = hh(d, a, b, c, buffer.getUint32(16, true), 11, 0x4bdecfa9); + c = hh(c, d, a, b, buffer.getUint32(28, true), 16, 0xf6bb4b60); + b = hh(b, c, d, a, buffer.getUint32(40, true), 23, 0xbebfbc70); + a = hh(a, b, c, d, buffer.getUint32(52, true), 4, 0x289b7ec6); + d = hh(d, a, b, c, buffer.getUint32(0, true), 11, 0xeaa127fa); + c = hh(c, d, a, b, buffer.getUint32(12, true), 16, 0xd4ef3085); + b = hh(b, c, d, a, buffer.getUint32(24, true), 23, 0x04881d05); + a = hh(a, b, c, d, buffer.getUint32(36, true), 4, 0xd9d4d039); + d = hh(d, a, b, c, buffer.getUint32(48, true), 11, 0xe6db99e5); + c = hh(c, d, a, b, buffer.getUint32(60, true), 16, 0x1fa27cf8); + b = hh(b, c, d, a, buffer.getUint32(8, true), 23, 0xc4ac5665); + a = ii(a, b, c, d, buffer.getUint32(0, true), 6, 0xf4292244); + d = ii(d, a, b, c, buffer.getUint32(28, true), 10, 0x432aff97); + c = ii(c, d, a, b, buffer.getUint32(56, true), 15, 0xab9423a7); + b = ii(b, c, d, a, buffer.getUint32(20, true), 21, 0xfc93a039); + a = ii(a, b, c, d, buffer.getUint32(48, true), 6, 0x655b59c3); + d = ii(d, a, b, c, buffer.getUint32(12, true), 10, 0x8f0ccc92); + c = ii(c, d, a, b, buffer.getUint32(40, true), 15, 0xffeff47d); + b = ii(b, c, d, a, buffer.getUint32(4, true), 21, 0x85845dd1); + a = ii(a, b, c, d, buffer.getUint32(32, true), 6, 0x6fa87e4f); + d = ii(d, a, b, c, buffer.getUint32(60, true), 10, 0xfe2ce6e0); + c = ii(c, d, a, b, buffer.getUint32(24, true), 15, 0xa3014314); + b = ii(b, c, d, a, buffer.getUint32(52, true), 21, 0x4e0811a1); + a = ii(a, b, c, d, buffer.getUint32(16, true), 6, 0xf7537e82); + d = ii(d, a, b, c, buffer.getUint32(44, true), 10, 0xbd3af235); + c = ii(c, d, a, b, buffer.getUint32(8, true), 15, 0x2ad7d2bb); + b = ii(b, c, d, a, buffer.getUint32(36, true), 21, 0xeb86d391); + state[0] = (a + state[0]) & 0xffffffff; + state[1] = (b + state[1]) & 0xffffffff; + state[2] = (c + state[2]) & 0xffffffff; + state[3] = (d + state[3]) & 0xffffffff; + } + reset() { + this.state = Uint32Array.from(INIT); + this.buffer = new DataView(new ArrayBuffer(BLOCK_SIZE)); + this.bufferLength = 0; + this.bytesHashed = 0; + this.finished = false; + } +} +function cmn(q, a, b, x, s, t) { + a = (((a + q) & 0xffffffff) + ((x + t) & 0xffffffff)) & 0xffffffff; + return (((a << s) | (a >>> (32 - s))) + b) & 0xffffffff; +} +function ff(a, b, c, d, x, s, t) { + return cmn((b & c) | (~b & d), a, b, x, s, t); +} +function gg(a, b, c, d, x, s, t) { + return cmn((b & d) | (c & ~d), a, b, x, s, t); +} +function hh(a, b, c, d, x, s, t) { + return cmn(b ^ c ^ d, a, b, x, s, t); +} +function ii(a, b, c, d, x, s, t) { + return cmn(c ^ (b | ~d), a, b, x, s, t); +} +function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +function convertToBuffer(data) { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +} diff --git a/node_modules/@smithy/md5-js/dist-types/constants.d.ts b/node_modules/@smithy/md5-js/dist-types/constants.d.ts new file mode 100644 index 00000000..f1ae1d3d --- /dev/null +++ b/node_modules/@smithy/md5-js/dist-types/constants.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export declare const BLOCK_SIZE = 64; +/** + * @internal + */ +export declare const DIGEST_LENGTH = 16; +/** + * @internal + */ +export declare const INIT: number[]; diff --git a/node_modules/@smithy/md5-js/dist-types/index.d.ts b/node_modules/@smithy/md5-js/dist-types/index.d.ts new file mode 100644 index 00000000..c274177b --- /dev/null +++ b/node_modules/@smithy/md5-js/dist-types/index.d.ts @@ -0,0 +1,16 @@ +import { Checksum, SourceData } from "@smithy/types"; +/** + * @internal + */ +export declare class Md5 implements Checksum { + private state; + private buffer; + private bufferLength; + private bytesHashed; + private finished; + constructor(); + update(sourceData: SourceData): void; + digest(): Promise; + private hashBuffer; + reset(): void; +} diff --git a/node_modules/@smithy/md5-js/dist-types/ts3.4/constants.d.ts b/node_modules/@smithy/md5-js/dist-types/ts3.4/constants.d.ts new file mode 100644 index 00000000..2509454a --- /dev/null +++ b/node_modules/@smithy/md5-js/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export declare const BLOCK_SIZE = 64; +/** + * @internal + */ +export declare const DIGEST_LENGTH = 16; +/** + * @internal + */ +export declare const INIT: number[]; diff --git a/node_modules/@smithy/md5-js/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/md5-js/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..042d3a9b --- /dev/null +++ b/node_modules/@smithy/md5-js/dist-types/ts3.4/index.d.ts @@ -0,0 +1,16 @@ +import { Checksum, SourceData } from "@smithy/types"; +/** + * @internal + */ +export declare class Md5 implements Checksum { + private state; + private buffer; + private bufferLength; + private bytesHashed; + private finished; + constructor(); + update(sourceData: SourceData): void; + digest(): Promise; + private hashBuffer; + reset(): void; +} diff --git a/node_modules/@smithy/md5-js/package.json b/node_modules/@smithy/md5-js/package.json new file mode 100644 index 00000000..ea1df50a --- /dev/null +++ b/node_modules/@smithy/md5-js/package.json @@ -0,0 +1,65 @@ +{ + "name": "@smithy/md5-js", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline md5-js", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "devDependencies": { + "@smithy/util-base64": "^4.0.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "hash-test-vectors": "^1.3.2", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/md5-js", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/md5-js" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/middleware-content-length/LICENSE b/node_modules/@smithy/middleware-content-length/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/middleware-content-length/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/middleware-content-length/README.md b/node_modules/@smithy/middleware-content-length/README.md new file mode 100644 index 00000000..2d40d92a --- /dev/null +++ b/node_modules/@smithy/middleware-content-length/README.md @@ -0,0 +1,4 @@ +# @smithy/middleware-content-length + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-content-length/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-content-length) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-content-length.svg)](https://www.npmjs.com/package/@smithy/middleware-content-length) diff --git a/node_modules/@smithy/middleware-content-length/dist-cjs/index.js b/node_modules/@smithy/middleware-content-length/dist-cjs/index.js new file mode 100644 index 00000000..9585153e --- /dev/null +++ b/node_modules/@smithy/middleware-content-length/dist-cjs/index.js @@ -0,0 +1,71 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + contentLengthMiddleware: () => contentLengthMiddleware, + contentLengthMiddlewareOptions: () => contentLengthMiddlewareOptions, + getContentLengthPlugin: () => getContentLengthPlugin +}); +module.exports = __toCommonJS(src_exports); +var import_protocol_http = require("@smithy/protocol-http"); +var CONTENT_LENGTH_HEADER = "content-length"; +function contentLengthMiddleware(bodyLengthChecker) { + return (next) => async (args) => { + const request = args.request; + if (import_protocol_http.HttpRequest.isInstance(request)) { + const { body, headers } = request; + if (body && Object.keys(headers).map((str) => str.toLowerCase()).indexOf(CONTENT_LENGTH_HEADER) === -1) { + try { + const length = bodyLengthChecker(body); + request.headers = { + ...request.headers, + [CONTENT_LENGTH_HEADER]: String(length) + }; + } catch (error) { + } + } + } + return next({ + ...args, + request + }); + }; +} +__name(contentLengthMiddleware, "contentLengthMiddleware"); +var contentLengthMiddlewareOptions = { + step: "build", + tags: ["SET_CONTENT_LENGTH", "CONTENT_LENGTH"], + name: "contentLengthMiddleware", + override: true +}; +var getContentLengthPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.add(contentLengthMiddleware(options.bodyLengthChecker), contentLengthMiddlewareOptions); + } +}), "getContentLengthPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + contentLengthMiddleware, + contentLengthMiddlewareOptions, + getContentLengthPlugin +}); + diff --git a/node_modules/@smithy/middleware-content-length/dist-es/index.js b/node_modules/@smithy/middleware-content-length/dist-es/index.js new file mode 100644 index 00000000..fa18e710 --- /dev/null +++ b/node_modules/@smithy/middleware-content-length/dist-es/index.js @@ -0,0 +1,39 @@ +import { HttpRequest } from "@smithy/protocol-http"; +const CONTENT_LENGTH_HEADER = "content-length"; +export function contentLengthMiddleware(bodyLengthChecker) { + return (next) => async (args) => { + const request = args.request; + if (HttpRequest.isInstance(request)) { + const { body, headers } = request; + if (body && + Object.keys(headers) + .map((str) => str.toLowerCase()) + .indexOf(CONTENT_LENGTH_HEADER) === -1) { + try { + const length = bodyLengthChecker(body); + request.headers = { + ...request.headers, + [CONTENT_LENGTH_HEADER]: String(length), + }; + } + catch (error) { + } + } + } + return next({ + ...args, + request, + }); + }; +} +export const contentLengthMiddlewareOptions = { + step: "build", + tags: ["SET_CONTENT_LENGTH", "CONTENT_LENGTH"], + name: "contentLengthMiddleware", + override: true, +}; +export const getContentLengthPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(contentLengthMiddleware(options.bodyLengthChecker), contentLengthMiddlewareOptions); + }, +}); diff --git a/node_modules/@smithy/middleware-content-length/dist-types/index.d.ts b/node_modules/@smithy/middleware-content-length/dist-types/index.d.ts new file mode 100644 index 00000000..91a7000a --- /dev/null +++ b/node_modules/@smithy/middleware-content-length/dist-types/index.d.ts @@ -0,0 +1,6 @@ +import { BodyLengthCalculator, BuildHandlerOptions, BuildMiddleware, Pluggable } from "@smithy/types"; +export declare function contentLengthMiddleware(bodyLengthChecker: BodyLengthCalculator): BuildMiddleware; +export declare const contentLengthMiddlewareOptions: BuildHandlerOptions; +export declare const getContentLengthPlugin: (options: { + bodyLengthChecker: BodyLengthCalculator; +}) => Pluggable; diff --git a/node_modules/@smithy/middleware-content-length/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/middleware-content-length/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..10e1e18d --- /dev/null +++ b/node_modules/@smithy/middleware-content-length/dist-types/ts3.4/index.d.ts @@ -0,0 +1,6 @@ +import { BodyLengthCalculator, BuildHandlerOptions, BuildMiddleware, Pluggable } from "@smithy/types"; +export declare function contentLengthMiddleware(bodyLengthChecker: BodyLengthCalculator): BuildMiddleware; +export declare const contentLengthMiddlewareOptions: BuildHandlerOptions; +export declare const getContentLengthPlugin: (options: { + bodyLengthChecker: BodyLengthCalculator; +}) => Pluggable; diff --git a/node_modules/@smithy/middleware-content-length/package.json b/node_modules/@smithy/middleware-content-length/package.json new file mode 100644 index 00000000..807c95bc --- /dev/null +++ b/node_modules/@smithy/middleware-content-length/package.json @@ -0,0 +1,63 @@ +{ + "name": "@smithy/middleware-content-length", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-content-length", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "exit 0", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-content-length", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-content-length" + }, + "devDependencies": { + "@smithy/util-test": "^0.2.8", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/middleware-endpoint/LICENSE b/node_modules/@smithy/middleware-endpoint/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/middleware-endpoint/README.md b/node_modules/@smithy/middleware-endpoint/README.md new file mode 100644 index 00000000..e03cbb28 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/README.md @@ -0,0 +1,10 @@ +# @smithy/middleware-endpoint + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-endpoint/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-endpoint) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-endpoint.svg)](https://www.npmjs.com/package/@smithy/middleware-endpoint) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/createConfigValueProvider.js b/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/createConfigValueProvider.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/createConfigValueProvider.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.browser.js b/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.browser.js new file mode 100644 index 00000000..9b578a7a --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.browser.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getEndpointFromConfig = void 0; +const getEndpointFromConfig = async (serviceId) => undefined; +exports.getEndpointFromConfig = getEndpointFromConfig; diff --git a/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.js b/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.js new file mode 100644 index 00000000..c7c302b8 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getEndpointFromConfig = void 0; +const node_config_provider_1 = require("@smithy/node-config-provider"); +const getEndpointUrlConfig_1 = require("./getEndpointUrlConfig"); +const getEndpointFromConfig = async (serviceId) => (0, node_config_provider_1.loadConfig)((0, getEndpointUrlConfig_1.getEndpointUrlConfig)(serviceId !== null && serviceId !== void 0 ? serviceId : ""))(); +exports.getEndpointFromConfig = getEndpointFromConfig; diff --git a/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromInstructions.js b/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromInstructions.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromInstructions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointUrlConfig.js b/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointUrlConfig.js new file mode 100644 index 00000000..fe5c010a --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointUrlConfig.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getEndpointUrlConfig = void 0; +const shared_ini_file_loader_1 = require("@smithy/shared-ini-file-loader"); +const ENV_ENDPOINT_URL = "AWS_ENDPOINT_URL"; +const CONFIG_ENDPOINT_URL = "endpoint_url"; +const getEndpointUrlConfig = (serviceId) => ({ + environmentVariableSelector: (env) => { + const serviceSuffixParts = serviceId.split(" ").map((w) => w.toUpperCase()); + const serviceEndpointUrl = env[[ENV_ENDPOINT_URL, ...serviceSuffixParts].join("_")]; + if (serviceEndpointUrl) + return serviceEndpointUrl; + const endpointUrl = env[ENV_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + configFileSelector: (profile, config) => { + if (config && profile.services) { + const servicesSection = config[["services", profile.services].join(shared_ini_file_loader_1.CONFIG_PREFIX_SEPARATOR)]; + if (servicesSection) { + const servicePrefixParts = serviceId.split(" ").map((w) => w.toLowerCase()); + const endpointUrl = servicesSection[[servicePrefixParts.join("_"), CONFIG_ENDPOINT_URL].join(shared_ini_file_loader_1.CONFIG_PREFIX_SEPARATOR)]; + if (endpointUrl) + return endpointUrl; + } + } + const endpointUrl = profile[CONFIG_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + default: undefined, +}); +exports.getEndpointUrlConfig = getEndpointUrlConfig; diff --git a/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/index.js b/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/toEndpointV1.js b/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/toEndpointV1.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/toEndpointV1.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-endpoint/dist-cjs/endpointMiddleware.js b/node_modules/@smithy/middleware-endpoint/dist-cjs/endpointMiddleware.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-cjs/endpointMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-endpoint/dist-cjs/getEndpointPlugin.js b/node_modules/@smithy/middleware-endpoint/dist-cjs/getEndpointPlugin.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-cjs/getEndpointPlugin.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-endpoint/dist-cjs/index.js b/node_modules/@smithy/middleware-endpoint/dist-cjs/index.js new file mode 100644 index 00000000..177fdc3f --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-cjs/index.js @@ -0,0 +1,279 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + endpointMiddleware: () => endpointMiddleware, + endpointMiddlewareOptions: () => endpointMiddlewareOptions, + getEndpointFromInstructions: () => getEndpointFromInstructions, + getEndpointPlugin: () => getEndpointPlugin, + resolveEndpointConfig: () => resolveEndpointConfig, + resolveParams: () => resolveParams, + toEndpointV1: () => toEndpointV1 +}); +module.exports = __toCommonJS(src_exports); + +// src/service-customizations/s3.ts +var resolveParamsForS3 = /* @__PURE__ */ __name(async (endpointParams) => { + const bucket = endpointParams?.Bucket || ""; + if (typeof endpointParams.Bucket === "string") { + endpointParams.Bucket = bucket.replace(/#/g, encodeURIComponent("#")).replace(/\?/g, encodeURIComponent("?")); + } + if (isArnBucketName(bucket)) { + if (endpointParams.ForcePathStyle === true) { + throw new Error("Path-style addressing cannot be used with ARN buckets"); + } + } else if (!isDnsCompatibleBucketName(bucket) || bucket.indexOf(".") !== -1 && !String(endpointParams.Endpoint).startsWith("http:") || bucket.toLowerCase() !== bucket || bucket.length < 3) { + endpointParams.ForcePathStyle = true; + } + if (endpointParams.DisableMultiRegionAccessPoints) { + endpointParams.disableMultiRegionAccessPoints = true; + endpointParams.DisableMRAP = true; + } + return endpointParams; +}, "resolveParamsForS3"); +var DOMAIN_PATTERN = /^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$/; +var IP_ADDRESS_PATTERN = /(\d+\.){3}\d+/; +var DOTS_PATTERN = /\.\./; +var isDnsCompatibleBucketName = /* @__PURE__ */ __name((bucketName) => DOMAIN_PATTERN.test(bucketName) && !IP_ADDRESS_PATTERN.test(bucketName) && !DOTS_PATTERN.test(bucketName), "isDnsCompatibleBucketName"); +var isArnBucketName = /* @__PURE__ */ __name((bucketName) => { + const [arn, partition, service, , , bucket] = bucketName.split(":"); + const isArn = arn === "arn" && bucketName.split(":").length >= 6; + const isValidArn = Boolean(isArn && partition && service && bucket); + if (isArn && !isValidArn) { + throw new Error(`Invalid ARN: ${bucketName} was an invalid ARN.`); + } + return isValidArn; +}, "isArnBucketName"); + +// src/adaptors/createConfigValueProvider.ts +var createConfigValueProvider = /* @__PURE__ */ __name((configKey, canonicalEndpointParamKey, config) => { + const configProvider = /* @__PURE__ */ __name(async () => { + const configValue = config[configKey] ?? config[canonicalEndpointParamKey]; + if (typeof configValue === "function") { + return configValue(); + } + return configValue; + }, "configProvider"); + if (configKey === "credentialScope" || canonicalEndpointParamKey === "CredentialScope") { + return async () => { + const credentials = typeof config.credentials === "function" ? await config.credentials() : config.credentials; + const configValue = credentials?.credentialScope ?? credentials?.CredentialScope; + return configValue; + }; + } + if (configKey === "accountId" || canonicalEndpointParamKey === "AccountId") { + return async () => { + const credentials = typeof config.credentials === "function" ? await config.credentials() : config.credentials; + const configValue = credentials?.accountId ?? credentials?.AccountId; + return configValue; + }; + } + if (configKey === "endpoint" || canonicalEndpointParamKey === "endpoint") { + return async () => { + const endpoint = await configProvider(); + if (endpoint && typeof endpoint === "object") { + if ("url" in endpoint) { + return endpoint.url.href; + } + if ("hostname" in endpoint) { + const { protocol, hostname, port, path } = endpoint; + return `${protocol}//${hostname}${port ? ":" + port : ""}${path}`; + } + } + return endpoint; + }; + } + return configProvider; +}, "createConfigValueProvider"); + +// src/adaptors/getEndpointFromInstructions.ts +var import_getEndpointFromConfig = require("./adaptors/getEndpointFromConfig"); + +// src/adaptors/toEndpointV1.ts +var import_url_parser = require("@smithy/url-parser"); +var toEndpointV1 = /* @__PURE__ */ __name((endpoint) => { + if (typeof endpoint === "object") { + if ("url" in endpoint) { + return (0, import_url_parser.parseUrl)(endpoint.url); + } + return endpoint; + } + return (0, import_url_parser.parseUrl)(endpoint); +}, "toEndpointV1"); + +// src/adaptors/getEndpointFromInstructions.ts +var getEndpointFromInstructions = /* @__PURE__ */ __name(async (commandInput, instructionsSupplier, clientConfig, context) => { + if (!clientConfig.endpoint) { + let endpointFromConfig; + if (clientConfig.serviceConfiguredEndpoint) { + endpointFromConfig = await clientConfig.serviceConfiguredEndpoint(); + } else { + endpointFromConfig = await (0, import_getEndpointFromConfig.getEndpointFromConfig)(clientConfig.serviceId); + } + if (endpointFromConfig) { + clientConfig.endpoint = () => Promise.resolve(toEndpointV1(endpointFromConfig)); + } + } + const endpointParams = await resolveParams(commandInput, instructionsSupplier, clientConfig); + if (typeof clientConfig.endpointProvider !== "function") { + throw new Error("config.endpointProvider is not set."); + } + const endpoint = clientConfig.endpointProvider(endpointParams, context); + return endpoint; +}, "getEndpointFromInstructions"); +var resolveParams = /* @__PURE__ */ __name(async (commandInput, instructionsSupplier, clientConfig) => { + const endpointParams = {}; + const instructions = instructionsSupplier?.getEndpointParameterInstructions?.() || {}; + for (const [name, instruction] of Object.entries(instructions)) { + switch (instruction.type) { + case "staticContextParams": + endpointParams[name] = instruction.value; + break; + case "contextParams": + endpointParams[name] = commandInput[instruction.name]; + break; + case "clientContextParams": + case "builtInParams": + endpointParams[name] = await createConfigValueProvider(instruction.name, name, clientConfig)(); + break; + case "operationContextParams": + endpointParams[name] = instruction.get(commandInput); + break; + default: + throw new Error("Unrecognized endpoint parameter instruction: " + JSON.stringify(instruction)); + } + } + if (Object.keys(instructions).length === 0) { + Object.assign(endpointParams, clientConfig); + } + if (String(clientConfig.serviceId).toLowerCase() === "s3") { + await resolveParamsForS3(endpointParams); + } + return endpointParams; +}, "resolveParams"); + +// src/endpointMiddleware.ts +var import_core = require("@smithy/core"); +var import_util_middleware = require("@smithy/util-middleware"); +var endpointMiddleware = /* @__PURE__ */ __name(({ + config, + instructions +}) => { + return (next, context) => async (args) => { + if (config.endpoint) { + (0, import_core.setFeature)(context, "ENDPOINT_OVERRIDE", "N"); + } + const endpoint = await getEndpointFromInstructions( + args.input, + { + getEndpointParameterInstructions() { + return instructions; + } + }, + { ...config }, + context + ); + context.endpointV2 = endpoint; + context.authSchemes = endpoint.properties?.authSchemes; + const authScheme = context.authSchemes?.[0]; + if (authScheme) { + context["signing_region"] = authScheme.signingRegion; + context["signing_service"] = authScheme.signingName; + const smithyContext = (0, import_util_middleware.getSmithyContext)(context); + const httpAuthOption = smithyContext?.selectedHttpAuthScheme?.httpAuthOption; + if (httpAuthOption) { + httpAuthOption.signingProperties = Object.assign( + httpAuthOption.signingProperties || {}, + { + signing_region: authScheme.signingRegion, + signingRegion: authScheme.signingRegion, + signing_service: authScheme.signingName, + signingName: authScheme.signingName, + signingRegionSet: authScheme.signingRegionSet + }, + authScheme.properties + ); + } + } + return next({ + ...args + }); + }; +}, "endpointMiddleware"); + +// src/getEndpointPlugin.ts +var import_middleware_serde = require("@smithy/middleware-serde"); +var endpointMiddlewareOptions = { + step: "serialize", + tags: ["ENDPOINT_PARAMETERS", "ENDPOINT_V2", "ENDPOINT"], + name: "endpointV2Middleware", + override: true, + relation: "before", + toMiddleware: import_middleware_serde.serializerMiddlewareOption.name +}; +var getEndpointPlugin = /* @__PURE__ */ __name((config, instructions) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo( + endpointMiddleware({ + config, + instructions + }), + endpointMiddlewareOptions + ); + } +}), "getEndpointPlugin"); + +// src/resolveEndpointConfig.ts + +var import_getEndpointFromConfig2 = require("./adaptors/getEndpointFromConfig"); +var resolveEndpointConfig = /* @__PURE__ */ __name((input) => { + const tls = input.tls ?? true; + const { endpoint, useDualstackEndpoint, useFipsEndpoint } = input; + const customEndpointProvider = endpoint != null ? async () => toEndpointV1(await (0, import_util_middleware.normalizeProvider)(endpoint)()) : void 0; + const isCustomEndpoint = !!endpoint; + const resolvedConfig = Object.assign(input, { + endpoint: customEndpointProvider, + tls, + isCustomEndpoint, + useDualstackEndpoint: (0, import_util_middleware.normalizeProvider)(useDualstackEndpoint ?? false), + useFipsEndpoint: (0, import_util_middleware.normalizeProvider)(useFipsEndpoint ?? false) + }); + let configuredEndpointPromise = void 0; + resolvedConfig.serviceConfiguredEndpoint = async () => { + if (input.serviceId && !configuredEndpointPromise) { + configuredEndpointPromise = (0, import_getEndpointFromConfig2.getEndpointFromConfig)(input.serviceId); + } + return configuredEndpointPromise; + }; + return resolvedConfig; +}, "resolveEndpointConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getEndpointFromInstructions, + resolveParams, + toEndpointV1, + endpointMiddleware, + endpointMiddlewareOptions, + getEndpointPlugin, + resolveEndpointConfig +}); + diff --git a/node_modules/@smithy/middleware-endpoint/dist-cjs/resolveEndpointConfig.js b/node_modules/@smithy/middleware-endpoint/dist-cjs/resolveEndpointConfig.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-cjs/resolveEndpointConfig.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/index.js b/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/s3.js b/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/s3.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/s3.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-endpoint/dist-cjs/types.js b/node_modules/@smithy/middleware-endpoint/dist-cjs/types.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/createConfigValueProvider.js b/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/createConfigValueProvider.js new file mode 100644 index 00000000..b468b83c --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/createConfigValueProvider.js @@ -0,0 +1,39 @@ +export const createConfigValueProvider = (configKey, canonicalEndpointParamKey, config) => { + const configProvider = async () => { + const configValue = config[configKey] ?? config[canonicalEndpointParamKey]; + if (typeof configValue === "function") { + return configValue(); + } + return configValue; + }; + if (configKey === "credentialScope" || canonicalEndpointParamKey === "CredentialScope") { + return async () => { + const credentials = typeof config.credentials === "function" ? await config.credentials() : config.credentials; + const configValue = credentials?.credentialScope ?? credentials?.CredentialScope; + return configValue; + }; + } + if (configKey === "accountId" || canonicalEndpointParamKey === "AccountId") { + return async () => { + const credentials = typeof config.credentials === "function" ? await config.credentials() : config.credentials; + const configValue = credentials?.accountId ?? credentials?.AccountId; + return configValue; + }; + } + if (configKey === "endpoint" || canonicalEndpointParamKey === "endpoint") { + return async () => { + const endpoint = await configProvider(); + if (endpoint && typeof endpoint === "object") { + if ("url" in endpoint) { + return endpoint.url.href; + } + if ("hostname" in endpoint) { + const { protocol, hostname, port, path } = endpoint; + return `${protocol}//${hostname}${port ? ":" + port : ""}${path}`; + } + } + return endpoint; + }; + } + return configProvider; +}; diff --git a/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.browser.js b/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.browser.js new file mode 100644 index 00000000..75fc1365 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.browser.js @@ -0,0 +1 @@ +export const getEndpointFromConfig = async (serviceId) => undefined; diff --git a/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.js b/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.js new file mode 100644 index 00000000..33c1d45a --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.js @@ -0,0 +1,3 @@ +import { loadConfig } from "@smithy/node-config-provider"; +import { getEndpointUrlConfig } from "./getEndpointUrlConfig"; +export const getEndpointFromConfig = async (serviceId) => loadConfig(getEndpointUrlConfig(serviceId ?? ""))(); diff --git a/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromInstructions.js b/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromInstructions.js new file mode 100644 index 00000000..e445646c --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromInstructions.js @@ -0,0 +1,54 @@ +import { resolveParamsForS3 } from "../service-customizations"; +import { createConfigValueProvider } from "./createConfigValueProvider"; +import { getEndpointFromConfig } from "./getEndpointFromConfig"; +import { toEndpointV1 } from "./toEndpointV1"; +export const getEndpointFromInstructions = async (commandInput, instructionsSupplier, clientConfig, context) => { + if (!clientConfig.endpoint) { + let endpointFromConfig; + if (clientConfig.serviceConfiguredEndpoint) { + endpointFromConfig = await clientConfig.serviceConfiguredEndpoint(); + } + else { + endpointFromConfig = await getEndpointFromConfig(clientConfig.serviceId); + } + if (endpointFromConfig) { + clientConfig.endpoint = () => Promise.resolve(toEndpointV1(endpointFromConfig)); + } + } + const endpointParams = await resolveParams(commandInput, instructionsSupplier, clientConfig); + if (typeof clientConfig.endpointProvider !== "function") { + throw new Error("config.endpointProvider is not set."); + } + const endpoint = clientConfig.endpointProvider(endpointParams, context); + return endpoint; +}; +export const resolveParams = async (commandInput, instructionsSupplier, clientConfig) => { + const endpointParams = {}; + const instructions = instructionsSupplier?.getEndpointParameterInstructions?.() || {}; + for (const [name, instruction] of Object.entries(instructions)) { + switch (instruction.type) { + case "staticContextParams": + endpointParams[name] = instruction.value; + break; + case "contextParams": + endpointParams[name] = commandInput[instruction.name]; + break; + case "clientContextParams": + case "builtInParams": + endpointParams[name] = await createConfigValueProvider(instruction.name, name, clientConfig)(); + break; + case "operationContextParams": + endpointParams[name] = instruction.get(commandInput); + break; + default: + throw new Error("Unrecognized endpoint parameter instruction: " + JSON.stringify(instruction)); + } + } + if (Object.keys(instructions).length === 0) { + Object.assign(endpointParams, clientConfig); + } + if (String(clientConfig.serviceId).toLowerCase() === "s3") { + await resolveParamsForS3(endpointParams); + } + return endpointParams; +}; diff --git a/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointUrlConfig.js b/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointUrlConfig.js new file mode 100644 index 00000000..82a15195 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointUrlConfig.js @@ -0,0 +1,31 @@ +import { CONFIG_PREFIX_SEPARATOR } from "@smithy/shared-ini-file-loader"; +const ENV_ENDPOINT_URL = "AWS_ENDPOINT_URL"; +const CONFIG_ENDPOINT_URL = "endpoint_url"; +export const getEndpointUrlConfig = (serviceId) => ({ + environmentVariableSelector: (env) => { + const serviceSuffixParts = serviceId.split(" ").map((w) => w.toUpperCase()); + const serviceEndpointUrl = env[[ENV_ENDPOINT_URL, ...serviceSuffixParts].join("_")]; + if (serviceEndpointUrl) + return serviceEndpointUrl; + const endpointUrl = env[ENV_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + configFileSelector: (profile, config) => { + if (config && profile.services) { + const servicesSection = config[["services", profile.services].join(CONFIG_PREFIX_SEPARATOR)]; + if (servicesSection) { + const servicePrefixParts = serviceId.split(" ").map((w) => w.toLowerCase()); + const endpointUrl = servicesSection[[servicePrefixParts.join("_"), CONFIG_ENDPOINT_URL].join(CONFIG_PREFIX_SEPARATOR)]; + if (endpointUrl) + return endpointUrl; + } + } + const endpointUrl = profile[CONFIG_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + default: undefined, +}); diff --git a/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/index.js b/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/index.js new file mode 100644 index 00000000..17752da2 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/index.js @@ -0,0 +1,2 @@ +export * from "./getEndpointFromInstructions"; +export * from "./toEndpointV1"; diff --git a/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/toEndpointV1.js b/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/toEndpointV1.js new file mode 100644 index 00000000..83f43244 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/toEndpointV1.js @@ -0,0 +1,10 @@ +import { parseUrl } from "@smithy/url-parser"; +export const toEndpointV1 = (endpoint) => { + if (typeof endpoint === "object") { + if ("url" in endpoint) { + return parseUrl(endpoint.url); + } + return endpoint; + } + return parseUrl(endpoint); +}; diff --git a/node_modules/@smithy/middleware-endpoint/dist-es/endpointMiddleware.js b/node_modules/@smithy/middleware-endpoint/dist-es/endpointMiddleware.js new file mode 100644 index 00000000..df257958 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-es/endpointMiddleware.js @@ -0,0 +1,36 @@ +import { setFeature } from "@smithy/core"; +import { getSmithyContext } from "@smithy/util-middleware"; +import { getEndpointFromInstructions } from "./adaptors/getEndpointFromInstructions"; +export const endpointMiddleware = ({ config, instructions, }) => { + return (next, context) => async (args) => { + if (config.endpoint) { + setFeature(context, "ENDPOINT_OVERRIDE", "N"); + } + const endpoint = await getEndpointFromInstructions(args.input, { + getEndpointParameterInstructions() { + return instructions; + }, + }, { ...config }, context); + context.endpointV2 = endpoint; + context.authSchemes = endpoint.properties?.authSchemes; + const authScheme = context.authSchemes?.[0]; + if (authScheme) { + context["signing_region"] = authScheme.signingRegion; + context["signing_service"] = authScheme.signingName; + const smithyContext = getSmithyContext(context); + const httpAuthOption = smithyContext?.selectedHttpAuthScheme?.httpAuthOption; + if (httpAuthOption) { + httpAuthOption.signingProperties = Object.assign(httpAuthOption.signingProperties || {}, { + signing_region: authScheme.signingRegion, + signingRegion: authScheme.signingRegion, + signing_service: authScheme.signingName, + signingName: authScheme.signingName, + signingRegionSet: authScheme.signingRegionSet, + }, authScheme.properties); + } + } + return next({ + ...args, + }); + }; +}; diff --git a/node_modules/@smithy/middleware-endpoint/dist-es/getEndpointPlugin.js b/node_modules/@smithy/middleware-endpoint/dist-es/getEndpointPlugin.js new file mode 100644 index 00000000..e2335f4b --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-es/getEndpointPlugin.js @@ -0,0 +1,18 @@ +import { serializerMiddlewareOption } from "@smithy/middleware-serde"; +import { endpointMiddleware } from "./endpointMiddleware"; +export const endpointMiddlewareOptions = { + step: "serialize", + tags: ["ENDPOINT_PARAMETERS", "ENDPOINT_V2", "ENDPOINT"], + name: "endpointV2Middleware", + override: true, + relation: "before", + toMiddleware: serializerMiddlewareOption.name, +}; +export const getEndpointPlugin = (config, instructions) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(endpointMiddleware({ + config, + instructions, + }), endpointMiddlewareOptions); + }, +}); diff --git a/node_modules/@smithy/middleware-endpoint/dist-es/index.js b/node_modules/@smithy/middleware-endpoint/dist-es/index.js new file mode 100644 index 00000000..f89653ed --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-es/index.js @@ -0,0 +1,5 @@ +export * from "./adaptors"; +export * from "./endpointMiddleware"; +export * from "./getEndpointPlugin"; +export * from "./resolveEndpointConfig"; +export * from "./types"; diff --git a/node_modules/@smithy/middleware-endpoint/dist-es/resolveEndpointConfig.js b/node_modules/@smithy/middleware-endpoint/dist-es/resolveEndpointConfig.js new file mode 100644 index 00000000..c3a0eea2 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-es/resolveEndpointConfig.js @@ -0,0 +1,24 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +import { getEndpointFromConfig } from "./adaptors/getEndpointFromConfig"; +import { toEndpointV1 } from "./adaptors/toEndpointV1"; +export const resolveEndpointConfig = (input) => { + const tls = input.tls ?? true; + const { endpoint, useDualstackEndpoint, useFipsEndpoint } = input; + const customEndpointProvider = endpoint != null ? async () => toEndpointV1(await normalizeProvider(endpoint)()) : undefined; + const isCustomEndpoint = !!endpoint; + const resolvedConfig = Object.assign(input, { + endpoint: customEndpointProvider, + tls, + isCustomEndpoint, + useDualstackEndpoint: normalizeProvider(useDualstackEndpoint ?? false), + useFipsEndpoint: normalizeProvider(useFipsEndpoint ?? false), + }); + let configuredEndpointPromise = undefined; + resolvedConfig.serviceConfiguredEndpoint = async () => { + if (input.serviceId && !configuredEndpointPromise) { + configuredEndpointPromise = getEndpointFromConfig(input.serviceId); + } + return configuredEndpointPromise; + }; + return resolvedConfig; +}; diff --git a/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/index.js b/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/index.js new file mode 100644 index 00000000..e50e1079 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/index.js @@ -0,0 +1 @@ +export * from "./s3"; diff --git a/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/s3.js b/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/s3.js new file mode 100644 index 00000000..e993fc73 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/s3.js @@ -0,0 +1,37 @@ +export const resolveParamsForS3 = async (endpointParams) => { + const bucket = endpointParams?.Bucket || ""; + if (typeof endpointParams.Bucket === "string") { + endpointParams.Bucket = bucket.replace(/#/g, encodeURIComponent("#")).replace(/\?/g, encodeURIComponent("?")); + } + if (isArnBucketName(bucket)) { + if (endpointParams.ForcePathStyle === true) { + throw new Error("Path-style addressing cannot be used with ARN buckets"); + } + } + else if (!isDnsCompatibleBucketName(bucket) || + (bucket.indexOf(".") !== -1 && !String(endpointParams.Endpoint).startsWith("http:")) || + bucket.toLowerCase() !== bucket || + bucket.length < 3) { + endpointParams.ForcePathStyle = true; + } + if (endpointParams.DisableMultiRegionAccessPoints) { + endpointParams.disableMultiRegionAccessPoints = true; + endpointParams.DisableMRAP = true; + } + return endpointParams; +}; +const DOMAIN_PATTERN = /^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$/; +const IP_ADDRESS_PATTERN = /(\d+\.){3}\d+/; +const DOTS_PATTERN = /\.\./; +export const DOT_PATTERN = /\./; +export const S3_HOSTNAME_PATTERN = /^(.+\.)?s3(-fips)?(\.dualstack)?[.-]([a-z0-9-]+)\./; +export const isDnsCompatibleBucketName = (bucketName) => DOMAIN_PATTERN.test(bucketName) && !IP_ADDRESS_PATTERN.test(bucketName) && !DOTS_PATTERN.test(bucketName); +export const isArnBucketName = (bucketName) => { + const [arn, partition, service, , , bucket] = bucketName.split(":"); + const isArn = arn === "arn" && bucketName.split(":").length >= 6; + const isValidArn = Boolean(isArn && partition && service && bucket); + if (isArn && !isValidArn) { + throw new Error(`Invalid ARN: ${bucketName} was an invalid ARN.`); + } + return isValidArn; +}; diff --git a/node_modules/@smithy/middleware-endpoint/dist-es/types.js b/node_modules/@smithy/middleware-endpoint/dist-es/types.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/createConfigValueProvider.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/createConfigValueProvider.d.ts new file mode 100644 index 00000000..df659140 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/createConfigValueProvider.d.ts @@ -0,0 +1,13 @@ +/** + * Normalize some key of the client config to an async provider. + * @internal + * + * @param configKey - the key to look up in config. + * @param canonicalEndpointParamKey - this is the name the EndpointRuleSet uses. + * it will most likely not contain the config + * value, but we use it as a fallback. + * @param config - container of the config values. + * + * @returns async function that will resolve with the value. + */ +export declare const createConfigValueProvider: >(configKey: string, canonicalEndpointParamKey: string, config: Config) => () => Promise; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.browser.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.browser.d.ts new file mode 100644 index 00000000..de05fa52 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.browser.d.ts @@ -0,0 +1 @@ +export declare const getEndpointFromConfig: (serviceId: string) => Promise; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.d.ts new file mode 100644 index 00000000..42a3566b --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getEndpointFromConfig: (serviceId?: string) => Promise; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromInstructions.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromInstructions.d.ts new file mode 100644 index 00000000..49cef2a6 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromInstructions.d.ts @@ -0,0 +1,28 @@ +import { EndpointParameters, EndpointV2, HandlerExecutionContext } from "@smithy/types"; +import { EndpointResolvedConfig } from "../resolveEndpointConfig"; +import { EndpointParameterInstructions } from "../types"; +/** + * @internal + */ +export type EndpointParameterInstructionsSupplier = Partial<{ + getEndpointParameterInstructions(): EndpointParameterInstructions; +}>; +/** + * This step in the endpoint resolution process is exposed as a function + * to allow packages such as signers, lib-upload, etc. to get + * the V2 Endpoint associated to an instance of some api operation command + * without needing to send it or resolve its middleware stack. + * + * @internal + * @param commandInput - the input of the Command in question. + * @param instructionsSupplier - this is typically a Command constructor. A static function supplying the + * endpoint parameter instructions will exist for commands in services + * having an endpoints ruleset trait. + * @param clientConfig - config of the service client. + * @param context - optional context. + */ +export declare const getEndpointFromInstructions: , Config extends Record>(commandInput: CommandInput, instructionsSupplier: EndpointParameterInstructionsSupplier, clientConfig: Partial> & Config, context?: HandlerExecutionContext) => Promise; +/** + * @internal + */ +export declare const resolveParams: , Config extends Record>(commandInput: CommandInput, instructionsSupplier: EndpointParameterInstructionsSupplier, clientConfig: Partial> & Config) => Promise; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointUrlConfig.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointUrlConfig.d.ts new file mode 100644 index 00000000..0971010f --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointUrlConfig.d.ts @@ -0,0 +1,2 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const getEndpointUrlConfig: (serviceId: string) => LoadedConfigSelectors; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/index.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/index.d.ts new file mode 100644 index 00000000..cc134883 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./getEndpointFromInstructions"; +/** + * @internal + */ +export * from "./toEndpointV1"; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/toEndpointV1.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/toEndpointV1.d.ts new file mode 100644 index 00000000..834aabb1 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/toEndpointV1.d.ts @@ -0,0 +1,5 @@ +import { Endpoint, EndpointV2 } from "@smithy/types"; +/** + * @internal + */ +export declare const toEndpointV1: (endpoint: string | Endpoint | EndpointV2) => Endpoint; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/endpointMiddleware.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/endpointMiddleware.d.ts new file mode 100644 index 00000000..67cee648 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/endpointMiddleware.d.ts @@ -0,0 +1,10 @@ +import { EndpointParameters, SerializeMiddleware } from "@smithy/types"; +import { EndpointResolvedConfig } from "./resolveEndpointConfig"; +import { EndpointParameterInstructions } from "./types"; +/** + * @internal + */ +export declare const endpointMiddleware: ({ config, instructions, }: { + config: EndpointResolvedConfig; + instructions: EndpointParameterInstructions; +}) => SerializeMiddleware; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/getEndpointPlugin.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/getEndpointPlugin.d.ts new file mode 100644 index 00000000..910f44d7 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/getEndpointPlugin.d.ts @@ -0,0 +1,11 @@ +import { EndpointParameters, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { EndpointResolvedConfig } from "./resolveEndpointConfig"; +import { EndpointParameterInstructions } from "./types"; +/** + * @internal + */ +export declare const endpointMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getEndpointPlugin: (config: EndpointResolvedConfig, instructions: EndpointParameterInstructions) => Pluggable; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/index.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/index.d.ts new file mode 100644 index 00000000..bea06cfd --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/index.d.ts @@ -0,0 +1,17 @@ +/** + * @internal + */ +export * from "./adaptors"; +/** + * @internal + */ +export * from "./endpointMiddleware"; +/** + * @internal + */ +export * from "./getEndpointPlugin"; +export * from "./resolveEndpointConfig"; +/** + * @internal + */ +export * from "./types"; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/resolveEndpointConfig.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/resolveEndpointConfig.d.ts new file mode 100644 index 00000000..ec7dc708 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/resolveEndpointConfig.d.ts @@ -0,0 +1,107 @@ +import { Endpoint, EndpointParameters, EndpointV2, Logger, Provider, UrlParser } from "@smithy/types"; +/** + * @public + * + * Endpoint config interfaces and resolver for Endpoint v2. They live in separate package to allow per-service onboarding. + * When all services onboard Endpoint v2, the resolver in config-resolver package can be removed. + * This interface includes all the endpoint parameters with built-in bindings of "AWS::*" and "SDK::*" + */ +export interface EndpointInputConfig { + /** + * The fully qualified endpoint of the webservice. This is only for using + * a custom endpoint (for example, when using a local version of S3). + * + * Endpoint transformations such as S3 applying a bucket to the hostname are + * still applicable to this custom endpoint. + */ + endpoint?: string | Endpoint | Provider | EndpointV2 | Provider; + /** + * Providing a custom endpointProvider will override + * built-in transformations of the endpoint such as S3 adding the bucket + * name to the hostname, since they are part of the default endpointProvider. + */ + endpointProvider?: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + /** + * Whether TLS is enabled for requests. + * @deprecated + */ + tls?: boolean; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; + /** + * @internal + * This field is used internally so you should not fill any value to this field. + */ + serviceConfiguredEndpoint?: never; +} +/** + * @internal + */ +interface PreviouslyResolved { + urlParser: UrlParser; + region: Provider; + endpointProvider: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + logger?: Logger; + serviceId?: string; +} +/** + * @internal + * + * This supercedes the similarly named EndpointsResolvedConfig (no parametric types) + * from resolveEndpointsConfig.ts in \@smithy/config-resolver. + */ +export interface EndpointResolvedConfig { + /** + * Custom endpoint provided by the user. + * This is normalized to a single interface from the various acceptable types. + * This field will be undefined if a custom endpoint is not provided. + */ + endpoint?: Provider; + endpointProvider: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + /** + * Whether TLS is enabled for requests. + * @deprecated + */ + tls: boolean; + /** + * Whether the endpoint is specified by caller. + * @internal + * @deprecated + */ + isCustomEndpoint?: boolean; + /** + * Resolved value for input {@link EndpointsInputConfig.useDualstackEndpoint} + */ + useDualstackEndpoint: Provider; + /** + * Resolved value for input {@link EndpointsInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * A configured endpoint global or specific to the service from ENV or AWS SDK configuration files. + * @internal + */ + serviceConfiguredEndpoint?: Provider; +} +/** + * @internal + */ +export declare const resolveEndpointConfig: (input: T & EndpointInputConfig

& PreviouslyResolved

) => T & EndpointResolvedConfig

; +export {}; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/index.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/index.d.ts new file mode 100644 index 00000000..716a15d4 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./s3"; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/s3.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/s3.d.ts new file mode 100644 index 00000000..80b2e6a1 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/s3.d.ts @@ -0,0 +1,26 @@ +import { EndpointParameters } from "@smithy/types"; +/** + * @internal + */ +export declare const resolveParamsForS3: (endpointParams: EndpointParameters) => Promise; +/** + * @internal + */ +export declare const DOT_PATTERN: RegExp; +/** + * @internal + */ +export declare const S3_HOSTNAME_PATTERN: RegExp; +/** + * Determines whether a given string is DNS compliant per the rules outlined by + * S3. Length, capitaization, and leading dot restrictions are enforced by the + * DOMAIN_PATTERN regular expression. + * @internal + * + * @see https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html + */ +export declare const isDnsCompatibleBucketName: (bucketName: string) => boolean; +/** + * @internal + */ +export declare const isArnBucketName: (bucketName: string) => boolean; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/createConfigValueProvider.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/createConfigValueProvider.d.ts new file mode 100644 index 00000000..842f8fa2 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/createConfigValueProvider.d.ts @@ -0,0 +1,13 @@ +/** + * Normalize some key of the client config to an async provider. + * @internal + * + * @param configKey - the key to look up in config. + * @param canonicalEndpointParamKey - this is the name the EndpointRuleSet uses. + * it will most likely not contain the config + * value, but we use it as a fallback. + * @param config - container of the config values. + * + * @returns async function that will resolve with the value. + */ +export declare const createConfigValueProvider: >(configKey: string, canonicalEndpointParamKey: string, config: Config) => () => Promise; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.browser.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.browser.d.ts new file mode 100644 index 00000000..1a4f6baa --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.browser.d.ts @@ -0,0 +1 @@ +export declare const getEndpointFromConfig: (serviceId: string) => Promise; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.d.ts new file mode 100644 index 00000000..641570c9 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getEndpointFromConfig: (serviceId?: string) => Promise; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromInstructions.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromInstructions.d.ts new file mode 100644 index 00000000..82dc8df5 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromInstructions.d.ts @@ -0,0 +1,28 @@ +import { EndpointParameters, EndpointV2, HandlerExecutionContext } from "@smithy/types"; +import { EndpointResolvedConfig } from "../resolveEndpointConfig"; +import { EndpointParameterInstructions } from "../types"; +/** + * @internal + */ +export type EndpointParameterInstructionsSupplier = Partial<{ + getEndpointParameterInstructions(): EndpointParameterInstructions; +}>; +/** + * This step in the endpoint resolution process is exposed as a function + * to allow packages such as signers, lib-upload, etc. to get + * the V2 Endpoint associated to an instance of some api operation command + * without needing to send it or resolve its middleware stack. + * + * @internal + * @param commandInput - the input of the Command in question. + * @param instructionsSupplier - this is typically a Command constructor. A static function supplying the + * endpoint parameter instructions will exist for commands in services + * having an endpoints ruleset trait. + * @param clientConfig - config of the service client. + * @param context - optional context. + */ +export declare const getEndpointFromInstructions: , Config extends Record>(commandInput: CommandInput, instructionsSupplier: EndpointParameterInstructionsSupplier, clientConfig: Partial> & Config, context?: HandlerExecutionContext) => Promise; +/** + * @internal + */ +export declare const resolveParams: , Config extends Record>(commandInput: CommandInput, instructionsSupplier: EndpointParameterInstructionsSupplier, clientConfig: Partial> & Config) => Promise; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointUrlConfig.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointUrlConfig.d.ts new file mode 100644 index 00000000..7b9d0689 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointUrlConfig.d.ts @@ -0,0 +1,2 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const getEndpointUrlConfig: (serviceId: string) => LoadedConfigSelectors; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/index.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/index.d.ts new file mode 100644 index 00000000..ced05200 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./getEndpointFromInstructions"; +/** + * @internal + */ +export * from "./toEndpointV1"; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/toEndpointV1.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/toEndpointV1.d.ts new file mode 100644 index 00000000..047ded8f --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/toEndpointV1.d.ts @@ -0,0 +1,5 @@ +import { Endpoint, EndpointV2 } from "@smithy/types"; +/** + * @internal + */ +export declare const toEndpointV1: (endpoint: string | Endpoint | EndpointV2) => Endpoint; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/endpointMiddleware.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/endpointMiddleware.d.ts new file mode 100644 index 00000000..3f7e40a4 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/endpointMiddleware.d.ts @@ -0,0 +1,10 @@ +import { EndpointParameters, SerializeMiddleware } from "@smithy/types"; +import { EndpointResolvedConfig } from "./resolveEndpointConfig"; +import { EndpointParameterInstructions } from "./types"; +/** + * @internal + */ +export declare const endpointMiddleware: ({ config, instructions, }: { + config: EndpointResolvedConfig; + instructions: EndpointParameterInstructions; +}) => SerializeMiddleware; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/getEndpointPlugin.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/getEndpointPlugin.d.ts new file mode 100644 index 00000000..39f93a99 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/getEndpointPlugin.d.ts @@ -0,0 +1,11 @@ +import { EndpointParameters, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { EndpointResolvedConfig } from "./resolveEndpointConfig"; +import { EndpointParameterInstructions } from "./types"; +/** + * @internal + */ +export declare const endpointMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getEndpointPlugin: (config: EndpointResolvedConfig, instructions: EndpointParameterInstructions) => Pluggable; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..2ad75b98 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/index.d.ts @@ -0,0 +1,17 @@ +/** + * @internal + */ +export * from "./adaptors"; +/** + * @internal + */ +export * from "./endpointMiddleware"; +/** + * @internal + */ +export * from "./getEndpointPlugin"; +export * from "./resolveEndpointConfig"; +/** + * @internal + */ +export * from "./types"; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/resolveEndpointConfig.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/resolveEndpointConfig.d.ts new file mode 100644 index 00000000..875c9fc6 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/resolveEndpointConfig.d.ts @@ -0,0 +1,107 @@ +import { Endpoint, EndpointParameters, EndpointV2, Logger, Provider, UrlParser } from "@smithy/types"; +/** + * @public + * + * Endpoint config interfaces and resolver for Endpoint v2. They live in separate package to allow per-service onboarding. + * When all services onboard Endpoint v2, the resolver in config-resolver package can be removed. + * This interface includes all the endpoint parameters with built-in bindings of "AWS::*" and "SDK::*" + */ +export interface EndpointInputConfig { + /** + * The fully qualified endpoint of the webservice. This is only for using + * a custom endpoint (for example, when using a local version of S3). + * + * Endpoint transformations such as S3 applying a bucket to the hostname are + * still applicable to this custom endpoint. + */ + endpoint?: string | Endpoint | Provider | EndpointV2 | Provider; + /** + * Providing a custom endpointProvider will override + * built-in transformations of the endpoint such as S3 adding the bucket + * name to the hostname, since they are part of the default endpointProvider. + */ + endpointProvider?: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + /** + * Whether TLS is enabled for requests. + * @deprecated + */ + tls?: boolean; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; + /** + * @internal + * This field is used internally so you should not fill any value to this field. + */ + serviceConfiguredEndpoint?: never; +} +/** + * @internal + */ +interface PreviouslyResolved { + urlParser: UrlParser; + region: Provider; + endpointProvider: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + logger?: Logger; + serviceId?: string; +} +/** + * @internal + * + * This supercedes the similarly named EndpointsResolvedConfig (no parametric types) + * from resolveEndpointsConfig.ts in \@smithy/config-resolver. + */ +export interface EndpointResolvedConfig { + /** + * Custom endpoint provided by the user. + * This is normalized to a single interface from the various acceptable types. + * This field will be undefined if a custom endpoint is not provided. + */ + endpoint?: Provider; + endpointProvider: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + /** + * Whether TLS is enabled for requests. + * @deprecated + */ + tls: boolean; + /** + * Whether the endpoint is specified by caller. + * @internal + * @deprecated + */ + isCustomEndpoint?: boolean; + /** + * Resolved value for input {@link EndpointsInputConfig.useDualstackEndpoint} + */ + useDualstackEndpoint: Provider; + /** + * Resolved value for input {@link EndpointsInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * A configured endpoint global or specific to the service from ENV or AWS SDK configuration files. + * @internal + */ + serviceConfiguredEndpoint?: Provider; +} +/** + * @internal + */ +export declare const resolveEndpointConfig: (input: T & EndpointInputConfig

& PreviouslyResolved

) => T & EndpointResolvedConfig

; +export {}; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/index.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/index.d.ts new file mode 100644 index 00000000..6529752a --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./s3"; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/s3.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/s3.d.ts new file mode 100644 index 00000000..cace2272 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/s3.d.ts @@ -0,0 +1,26 @@ +import { EndpointParameters } from "@smithy/types"; +/** + * @internal + */ +export declare const resolveParamsForS3: (endpointParams: EndpointParameters) => Promise; +/** + * @internal + */ +export declare const DOT_PATTERN: RegExp; +/** + * @internal + */ +export declare const S3_HOSTNAME_PATTERN: RegExp; +/** + * Determines whether a given string is DNS compliant per the rules outlined by + * S3. Length, capitaization, and leading dot restrictions are enforced by the + * DOMAIN_PATTERN regular expression. + * @internal + * + * @see https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html + */ +export declare const isDnsCompatibleBucketName: (bucketName: string) => boolean; +/** + * @internal + */ +export declare const isArnBucketName: (bucketName: string) => boolean; diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/types.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/types.d.ts new file mode 100644 index 00000000..a6084c8a --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/types.d.ts @@ -0,0 +1,41 @@ +/** + * @internal + */ +export interface EndpointParameterInstructions { + [name: string]: BuiltInParamInstruction | ClientContextParamInstruction | StaticContextParamInstruction | ContextParamInstruction | OperationContextParamInstruction; +} +/** + * @internal + */ +export interface BuiltInParamInstruction { + type: "builtInParams"; + name: string; +} +/** + * @internal + */ +export interface ClientContextParamInstruction { + type: "clientContextParams"; + name: string; +} +/** + * @internal + */ +export interface StaticContextParamInstruction { + type: "staticContextParams"; + value: string | boolean; +} +/** + * @internal + */ +export interface ContextParamInstruction { + type: "contextParams"; + name: string; +} +/** + * @internal + */ +export interface OperationContextParamInstruction { + type: "operationContextParams"; + get(input: any): any; +} diff --git a/node_modules/@smithy/middleware-endpoint/dist-types/types.d.ts b/node_modules/@smithy/middleware-endpoint/dist-types/types.d.ts new file mode 100644 index 00000000..0d1d9e90 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/dist-types/types.d.ts @@ -0,0 +1,41 @@ +/** + * @internal + */ +export interface EndpointParameterInstructions { + [name: string]: BuiltInParamInstruction | ClientContextParamInstruction | StaticContextParamInstruction | ContextParamInstruction | OperationContextParamInstruction; +} +/** + * @internal + */ +export interface BuiltInParamInstruction { + type: "builtInParams"; + name: string; +} +/** + * @internal + */ +export interface ClientContextParamInstruction { + type: "clientContextParams"; + name: string; +} +/** + * @internal + */ +export interface StaticContextParamInstruction { + type: "staticContextParams"; + value: string | boolean; +} +/** + * @internal + */ +export interface ContextParamInstruction { + type: "contextParams"; + name: string; +} +/** + * @internal + */ +export interface OperationContextParamInstruction { + type: "operationContextParams"; + get(input: any): any; +} diff --git a/node_modules/@smithy/middleware-endpoint/package.json b/node_modules/@smithy/middleware-endpoint/package.json new file mode 100644 index 00000000..87083148 --- /dev/null +++ b/node_modules/@smithy/middleware-endpoint/package.json @@ -0,0 +1,74 @@ +{ + "name": "@smithy/middleware-endpoint", + "version": "4.1.1", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-endpoint", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "extract:docs": "api-extractor run --local", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.0", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/adaptors/getEndpointFromConfig": "./dist-es/adaptors/getEndpointFromConfig.browser" + }, + "react-native": { + "./dist-es/adaptors/getEndpointFromConfig": "./dist-es/adaptors/getEndpointFromConfig.browser", + "./dist-cjs/adaptors/getEndpointFromConfig": "./dist-cjs/adaptors/getEndpointFromConfig.browser" + }, + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-endpoint", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-endpoint" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/LICENSE b/node_modules/@smithy/middleware-retry/LICENSE new file mode 100644 index 00000000..dd65ae06 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@smithy/middleware-retry/README.md b/node_modules/@smithy/middleware-retry/README.md new file mode 100644 index 00000000..21ce9472 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/README.md @@ -0,0 +1,11 @@ +# @smithy/middleware-retry + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-retry/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-retry) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-retry.svg)](https://www.npmjs.com/package/@smithy/middleware-retry) + +## Usage + +See [@smithy/util-retry](https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-retry) +for retry behavior and configuration. + +See also: [AWS Documentation: Retry behavior](https://docs.aws.amazon.com/sdkref/latest/guide/feature-retry-behavior.html). diff --git a/node_modules/@smithy/middleware-retry/dist-cjs/AdaptiveRetryStrategy.js b/node_modules/@smithy/middleware-retry/dist-cjs/AdaptiveRetryStrategy.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-cjs/AdaptiveRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/dist-cjs/StandardRetryStrategy.js b/node_modules/@smithy/middleware-retry/dist-cjs/StandardRetryStrategy.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-cjs/StandardRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/dist-cjs/configurations.js b/node_modules/@smithy/middleware-retry/dist-cjs/configurations.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-cjs/configurations.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/dist-cjs/defaultRetryQuota.js b/node_modules/@smithy/middleware-retry/dist-cjs/defaultRetryQuota.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-cjs/defaultRetryQuota.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/dist-cjs/delayDecider.js b/node_modules/@smithy/middleware-retry/dist-cjs/delayDecider.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-cjs/delayDecider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/dist-cjs/index.js b/node_modules/@smithy/middleware-retry/dist-cjs/index.js new file mode 100644 index 00000000..c8375f0d --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-cjs/index.js @@ -0,0 +1,425 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AdaptiveRetryStrategy: () => AdaptiveRetryStrategy, + CONFIG_MAX_ATTEMPTS: () => CONFIG_MAX_ATTEMPTS, + CONFIG_RETRY_MODE: () => CONFIG_RETRY_MODE, + ENV_MAX_ATTEMPTS: () => ENV_MAX_ATTEMPTS, + ENV_RETRY_MODE: () => ENV_RETRY_MODE, + NODE_MAX_ATTEMPT_CONFIG_OPTIONS: () => NODE_MAX_ATTEMPT_CONFIG_OPTIONS, + NODE_RETRY_MODE_CONFIG_OPTIONS: () => NODE_RETRY_MODE_CONFIG_OPTIONS, + StandardRetryStrategy: () => StandardRetryStrategy, + defaultDelayDecider: () => defaultDelayDecider, + defaultRetryDecider: () => defaultRetryDecider, + getOmitRetryHeadersPlugin: () => getOmitRetryHeadersPlugin, + getRetryAfterHint: () => getRetryAfterHint, + getRetryPlugin: () => getRetryPlugin, + omitRetryHeadersMiddleware: () => omitRetryHeadersMiddleware, + omitRetryHeadersMiddlewareOptions: () => omitRetryHeadersMiddlewareOptions, + resolveRetryConfig: () => resolveRetryConfig, + retryMiddleware: () => retryMiddleware, + retryMiddlewareOptions: () => retryMiddlewareOptions +}); +module.exports = __toCommonJS(src_exports); + +// src/AdaptiveRetryStrategy.ts + + +// src/StandardRetryStrategy.ts +var import_protocol_http = require("@smithy/protocol-http"); + + +var import_uuid = require("uuid"); + +// src/defaultRetryQuota.ts +var import_util_retry = require("@smithy/util-retry"); +var getDefaultRetryQuota = /* @__PURE__ */ __name((initialRetryTokens, options) => { + const MAX_CAPACITY = initialRetryTokens; + const noRetryIncrement = options?.noRetryIncrement ?? import_util_retry.NO_RETRY_INCREMENT; + const retryCost = options?.retryCost ?? import_util_retry.RETRY_COST; + const timeoutRetryCost = options?.timeoutRetryCost ?? import_util_retry.TIMEOUT_RETRY_COST; + let availableCapacity = initialRetryTokens; + const getCapacityAmount = /* @__PURE__ */ __name((error) => error.name === "TimeoutError" ? timeoutRetryCost : retryCost, "getCapacityAmount"); + const hasRetryTokens = /* @__PURE__ */ __name((error) => getCapacityAmount(error) <= availableCapacity, "hasRetryTokens"); + const retrieveRetryTokens = /* @__PURE__ */ __name((error) => { + if (!hasRetryTokens(error)) { + throw new Error("No retry token available"); + } + const capacityAmount = getCapacityAmount(error); + availableCapacity -= capacityAmount; + return capacityAmount; + }, "retrieveRetryTokens"); + const releaseRetryTokens = /* @__PURE__ */ __name((capacityReleaseAmount) => { + availableCapacity += capacityReleaseAmount ?? noRetryIncrement; + availableCapacity = Math.min(availableCapacity, MAX_CAPACITY); + }, "releaseRetryTokens"); + return Object.freeze({ + hasRetryTokens, + retrieveRetryTokens, + releaseRetryTokens + }); +}, "getDefaultRetryQuota"); + +// src/delayDecider.ts + +var defaultDelayDecider = /* @__PURE__ */ __name((delayBase, attempts) => Math.floor(Math.min(import_util_retry.MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)), "defaultDelayDecider"); + +// src/retryDecider.ts +var import_service_error_classification = require("@smithy/service-error-classification"); +var defaultRetryDecider = /* @__PURE__ */ __name((error) => { + if (!error) { + return false; + } + return (0, import_service_error_classification.isRetryableByTrait)(error) || (0, import_service_error_classification.isClockSkewError)(error) || (0, import_service_error_classification.isThrottlingError)(error) || (0, import_service_error_classification.isTransientError)(error); +}, "defaultRetryDecider"); + +// src/util.ts +var asSdkError = /* @__PURE__ */ __name((error) => { + if (error instanceof Error) + return error; + if (error instanceof Object) + return Object.assign(new Error(), error); + if (typeof error === "string") + return new Error(error); + return new Error(`AWS SDK error wrapper for ${error}`); +}, "asSdkError"); + +// src/StandardRetryStrategy.ts +var StandardRetryStrategy = class { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = import_util_retry.RETRY_MODES.STANDARD; + this.retryDecider = options?.retryDecider ?? defaultRetryDecider; + this.delayDecider = options?.delayDecider ?? defaultDelayDecider; + this.retryQuota = options?.retryQuota ?? getDefaultRetryQuota(import_util_retry.INITIAL_RETRY_TOKENS); + } + static { + __name(this, "StandardRetryStrategy"); + } + shouldRetry(error, attempts, maxAttempts) { + return attempts < maxAttempts && this.retryDecider(error) && this.retryQuota.hasRetryTokens(error); + } + async getMaxAttempts() { + let maxAttempts; + try { + maxAttempts = await this.maxAttemptsProvider(); + } catch (error) { + maxAttempts = import_util_retry.DEFAULT_MAX_ATTEMPTS; + } + return maxAttempts; + } + async retry(next, args, options) { + let retryTokenAmount; + let attempts = 0; + let totalDelay = 0; + const maxAttempts = await this.getMaxAttempts(); + const { request } = args; + if (import_protocol_http.HttpRequest.isInstance(request)) { + request.headers[import_util_retry.INVOCATION_ID_HEADER] = (0, import_uuid.v4)(); + } + while (true) { + try { + if (import_protocol_http.HttpRequest.isInstance(request)) { + request.headers[import_util_retry.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + if (options?.beforeRequest) { + await options.beforeRequest(); + } + const { response, output } = await next(args); + if (options?.afterRequest) { + options.afterRequest(response); + } + this.retryQuota.releaseRetryTokens(retryTokenAmount); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalDelay; + return { response, output }; + } catch (e) { + const err = asSdkError(e); + attempts++; + if (this.shouldRetry(err, attempts, maxAttempts)) { + retryTokenAmount = this.retryQuota.retrieveRetryTokens(err); + const delayFromDecider = this.delayDecider( + (0, import_service_error_classification.isThrottlingError)(err) ? import_util_retry.THROTTLING_RETRY_DELAY_BASE : import_util_retry.DEFAULT_RETRY_DELAY_BASE, + attempts + ); + const delayFromResponse = getDelayFromRetryAfterHeader(err.$response); + const delay = Math.max(delayFromResponse || 0, delayFromDecider); + totalDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + continue; + } + if (!err.$metadata) { + err.$metadata = {}; + } + err.$metadata.attempts = attempts; + err.$metadata.totalRetryDelay = totalDelay; + throw err; + } + } + } +}; +var getDelayFromRetryAfterHeader = /* @__PURE__ */ __name((response) => { + if (!import_protocol_http.HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return retryAfterSeconds * 1e3; + const retryAfterDate = new Date(retryAfter); + return retryAfterDate.getTime() - Date.now(); +}, "getDelayFromRetryAfterHeader"); + +// src/AdaptiveRetryStrategy.ts +var AdaptiveRetryStrategy = class extends StandardRetryStrategy { + static { + __name(this, "AdaptiveRetryStrategy"); + } + constructor(maxAttemptsProvider, options) { + const { rateLimiter, ...superOptions } = options ?? {}; + super(maxAttemptsProvider, superOptions); + this.rateLimiter = rateLimiter ?? new import_util_retry.DefaultRateLimiter(); + this.mode = import_util_retry.RETRY_MODES.ADAPTIVE; + } + async retry(next, args) { + return super.retry(next, args, { + beforeRequest: async () => { + return this.rateLimiter.getSendToken(); + }, + afterRequest: (response) => { + this.rateLimiter.updateClientSendingRate(response); + } + }); + } +}; + +// src/configurations.ts +var import_util_middleware = require("@smithy/util-middleware"); + +var ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +var CONFIG_MAX_ATTEMPTS = "max_attempts"; +var NODE_MAX_ATTEMPT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + const value = env[ENV_MAX_ATTEMPTS]; + if (!value) + return void 0; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Environment variable ${ENV_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + configFileSelector: (profile) => { + const value = profile[CONFIG_MAX_ATTEMPTS]; + if (!value) + return void 0; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Shared config file entry ${CONFIG_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + default: import_util_retry.DEFAULT_MAX_ATTEMPTS +}; +var resolveRetryConfig = /* @__PURE__ */ __name((input) => { + const { retryStrategy, retryMode: _retryMode, maxAttempts: _maxAttempts } = input; + const maxAttempts = (0, import_util_middleware.normalizeProvider)(_maxAttempts ?? import_util_retry.DEFAULT_MAX_ATTEMPTS); + return Object.assign(input, { + maxAttempts, + retryStrategy: async () => { + if (retryStrategy) { + return retryStrategy; + } + const retryMode = await (0, import_util_middleware.normalizeProvider)(_retryMode)(); + if (retryMode === import_util_retry.RETRY_MODES.ADAPTIVE) { + return new import_util_retry.AdaptiveRetryStrategy(maxAttempts); + } + return new import_util_retry.StandardRetryStrategy(maxAttempts); + } + }); +}, "resolveRetryConfig"); +var ENV_RETRY_MODE = "AWS_RETRY_MODE"; +var CONFIG_RETRY_MODE = "retry_mode"; +var NODE_RETRY_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_RETRY_MODE], + configFileSelector: (profile) => profile[CONFIG_RETRY_MODE], + default: import_util_retry.DEFAULT_RETRY_MODE +}; + +// src/omitRetryHeadersMiddleware.ts + + +var omitRetryHeadersMiddleware = /* @__PURE__ */ __name(() => (next) => async (args) => { + const { request } = args; + if (import_protocol_http.HttpRequest.isInstance(request)) { + delete request.headers[import_util_retry.INVOCATION_ID_HEADER]; + delete request.headers[import_util_retry.REQUEST_HEADER]; + } + return next(args); +}, "omitRetryHeadersMiddleware"); +var omitRetryHeadersMiddlewareOptions = { + name: "omitRetryHeadersMiddleware", + tags: ["RETRY", "HEADERS", "OMIT_RETRY_HEADERS"], + relation: "before", + toMiddleware: "awsAuthMiddleware", + override: true +}; +var getOmitRetryHeadersPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(omitRetryHeadersMiddleware(), omitRetryHeadersMiddlewareOptions); + } +}), "getOmitRetryHeadersPlugin"); + +// src/retryMiddleware.ts + + +var import_smithy_client = require("@smithy/smithy-client"); + + +var import_isStreamingPayload = require("./isStreamingPayload/isStreamingPayload"); +var retryMiddleware = /* @__PURE__ */ __name((options) => (next, context) => async (args) => { + let retryStrategy = await options.retryStrategy(); + const maxAttempts = await options.maxAttempts(); + if (isRetryStrategyV2(retryStrategy)) { + retryStrategy = retryStrategy; + let retryToken = await retryStrategy.acquireInitialRetryToken(context["partition_id"]); + let lastError = new Error(); + let attempts = 0; + let totalRetryDelay = 0; + const { request } = args; + const isRequest = import_protocol_http.HttpRequest.isInstance(request); + if (isRequest) { + request.headers[import_util_retry.INVOCATION_ID_HEADER] = (0, import_uuid.v4)(); + } + while (true) { + try { + if (isRequest) { + request.headers[import_util_retry.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + const { response, output } = await next(args); + retryStrategy.recordSuccess(retryToken); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalRetryDelay; + return { response, output }; + } catch (e) { + const retryErrorInfo = getRetryErrorInfo(e); + lastError = asSdkError(e); + if (isRequest && (0, import_isStreamingPayload.isStreamingPayload)(request)) { + (context.logger instanceof import_smithy_client.NoOpLogger ? console : context.logger)?.warn( + "An error was encountered in a non-retryable streaming request." + ); + throw lastError; + } + try { + retryToken = await retryStrategy.refreshRetryTokenForRetry(retryToken, retryErrorInfo); + } catch (refreshError) { + if (!lastError.$metadata) { + lastError.$metadata = {}; + } + lastError.$metadata.attempts = attempts + 1; + lastError.$metadata.totalRetryDelay = totalRetryDelay; + throw lastError; + } + attempts = retryToken.getRetryCount(); + const delay = retryToken.getRetryDelay(); + totalRetryDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + } + } + } else { + retryStrategy = retryStrategy; + if (retryStrategy?.mode) + context.userAgent = [...context.userAgent || [], ["cfg/retry-mode", retryStrategy.mode]]; + return retryStrategy.retry(next, args); + } +}, "retryMiddleware"); +var isRetryStrategyV2 = /* @__PURE__ */ __name((retryStrategy) => typeof retryStrategy.acquireInitialRetryToken !== "undefined" && typeof retryStrategy.refreshRetryTokenForRetry !== "undefined" && typeof retryStrategy.recordSuccess !== "undefined", "isRetryStrategyV2"); +var getRetryErrorInfo = /* @__PURE__ */ __name((error) => { + const errorInfo = { + error, + errorType: getRetryErrorType(error) + }; + const retryAfterHint = getRetryAfterHint(error.$response); + if (retryAfterHint) { + errorInfo.retryAfterHint = retryAfterHint; + } + return errorInfo; +}, "getRetryErrorInfo"); +var getRetryErrorType = /* @__PURE__ */ __name((error) => { + if ((0, import_service_error_classification.isThrottlingError)(error)) + return "THROTTLING"; + if ((0, import_service_error_classification.isTransientError)(error)) + return "TRANSIENT"; + if ((0, import_service_error_classification.isServerError)(error)) + return "SERVER_ERROR"; + return "CLIENT_ERROR"; +}, "getRetryErrorType"); +var retryMiddlewareOptions = { + name: "retryMiddleware", + tags: ["RETRY"], + step: "finalizeRequest", + priority: "high", + override: true +}; +var getRetryPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.add(retryMiddleware(options), retryMiddlewareOptions); + } +}), "getRetryPlugin"); +var getRetryAfterHint = /* @__PURE__ */ __name((response) => { + if (!import_protocol_http.HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return new Date(retryAfterSeconds * 1e3); + const retryAfterDate = new Date(retryAfter); + return retryAfterDate; +}, "getRetryAfterHint"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + AdaptiveRetryStrategy, + StandardRetryStrategy, + ENV_MAX_ATTEMPTS, + CONFIG_MAX_ATTEMPTS, + NODE_MAX_ATTEMPT_CONFIG_OPTIONS, + resolveRetryConfig, + ENV_RETRY_MODE, + CONFIG_RETRY_MODE, + NODE_RETRY_MODE_CONFIG_OPTIONS, + defaultDelayDecider, + omitRetryHeadersMiddleware, + omitRetryHeadersMiddlewareOptions, + getOmitRetryHeadersPlugin, + defaultRetryDecider, + retryMiddleware, + retryMiddlewareOptions, + getRetryPlugin, + getRetryAfterHint +}); + diff --git a/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.browser.js b/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.browser.js new file mode 100644 index 00000000..21fc19a1 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.browser.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isStreamingPayload = void 0; +const isStreamingPayload = (request) => (request === null || request === void 0 ? void 0 : request.body) instanceof ReadableStream; +exports.isStreamingPayload = isStreamingPayload; diff --git a/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.js b/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.js new file mode 100644 index 00000000..06f420b6 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isStreamingPayload = void 0; +const stream_1 = require("stream"); +const isStreamingPayload = (request) => (request === null || request === void 0 ? void 0 : request.body) instanceof stream_1.Readable || + (typeof ReadableStream !== "undefined" && (request === null || request === void 0 ? void 0 : request.body) instanceof ReadableStream); +exports.isStreamingPayload = isStreamingPayload; diff --git a/node_modules/@smithy/middleware-retry/dist-cjs/omitRetryHeadersMiddleware.js b/node_modules/@smithy/middleware-retry/dist-cjs/omitRetryHeadersMiddleware.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-cjs/omitRetryHeadersMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/dist-cjs/retryDecider.js b/node_modules/@smithy/middleware-retry/dist-cjs/retryDecider.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-cjs/retryDecider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/dist-cjs/retryMiddleware.js b/node_modules/@smithy/middleware-retry/dist-cjs/retryMiddleware.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-cjs/retryMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/dist-cjs/types.js b/node_modules/@smithy/middleware-retry/dist-cjs/types.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/dist-cjs/util.js b/node_modules/@smithy/middleware-retry/dist-cjs/util.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-cjs/util.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/dist-es/AdaptiveRetryStrategy.js b/node_modules/@smithy/middleware-retry/dist-es/AdaptiveRetryStrategy.js new file mode 100644 index 00000000..d349451f --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-es/AdaptiveRetryStrategy.js @@ -0,0 +1,20 @@ +import { DefaultRateLimiter, RETRY_MODES } from "@smithy/util-retry"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +export class AdaptiveRetryStrategy extends StandardRetryStrategy { + constructor(maxAttemptsProvider, options) { + const { rateLimiter, ...superOptions } = options ?? {}; + super(maxAttemptsProvider, superOptions); + this.rateLimiter = rateLimiter ?? new DefaultRateLimiter(); + this.mode = RETRY_MODES.ADAPTIVE; + } + async retry(next, args) { + return super.retry(next, args, { + beforeRequest: async () => { + return this.rateLimiter.getSendToken(); + }, + afterRequest: (response) => { + this.rateLimiter.updateClientSendingRate(response); + }, + }); + } +} diff --git a/node_modules/@smithy/middleware-retry/dist-es/StandardRetryStrategy.js b/node_modules/@smithy/middleware-retry/dist-es/StandardRetryStrategy.js new file mode 100644 index 00000000..e718ad67 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-es/StandardRetryStrategy.js @@ -0,0 +1,90 @@ +import { HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { isThrottlingError } from "@smithy/service-error-classification"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_DELAY_BASE, INITIAL_RETRY_TOKENS, INVOCATION_ID_HEADER, REQUEST_HEADER, RETRY_MODES, THROTTLING_RETRY_DELAY_BASE, } from "@smithy/util-retry"; +import { v4 } from "uuid"; +import { getDefaultRetryQuota } from "./defaultRetryQuota"; +import { defaultDelayDecider } from "./delayDecider"; +import { defaultRetryDecider } from "./retryDecider"; +import { asSdkError } from "./util"; +export class StandardRetryStrategy { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = RETRY_MODES.STANDARD; + this.retryDecider = options?.retryDecider ?? defaultRetryDecider; + this.delayDecider = options?.delayDecider ?? defaultDelayDecider; + this.retryQuota = options?.retryQuota ?? getDefaultRetryQuota(INITIAL_RETRY_TOKENS); + } + shouldRetry(error, attempts, maxAttempts) { + return attempts < maxAttempts && this.retryDecider(error) && this.retryQuota.hasRetryTokens(error); + } + async getMaxAttempts() { + let maxAttempts; + try { + maxAttempts = await this.maxAttemptsProvider(); + } + catch (error) { + maxAttempts = DEFAULT_MAX_ATTEMPTS; + } + return maxAttempts; + } + async retry(next, args, options) { + let retryTokenAmount; + let attempts = 0; + let totalDelay = 0; + const maxAttempts = await this.getMaxAttempts(); + const { request } = args; + if (HttpRequest.isInstance(request)) { + request.headers[INVOCATION_ID_HEADER] = v4(); + } + while (true) { + try { + if (HttpRequest.isInstance(request)) { + request.headers[REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + if (options?.beforeRequest) { + await options.beforeRequest(); + } + const { response, output } = await next(args); + if (options?.afterRequest) { + options.afterRequest(response); + } + this.retryQuota.releaseRetryTokens(retryTokenAmount); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalDelay; + return { response, output }; + } + catch (e) { + const err = asSdkError(e); + attempts++; + if (this.shouldRetry(err, attempts, maxAttempts)) { + retryTokenAmount = this.retryQuota.retrieveRetryTokens(err); + const delayFromDecider = this.delayDecider(isThrottlingError(err) ? THROTTLING_RETRY_DELAY_BASE : DEFAULT_RETRY_DELAY_BASE, attempts); + const delayFromResponse = getDelayFromRetryAfterHeader(err.$response); + const delay = Math.max(delayFromResponse || 0, delayFromDecider); + totalDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + continue; + } + if (!err.$metadata) { + err.$metadata = {}; + } + err.$metadata.attempts = attempts; + err.$metadata.totalRetryDelay = totalDelay; + throw err; + } + } + } +} +const getDelayFromRetryAfterHeader = (response) => { + if (!HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return retryAfterSeconds * 1000; + const retryAfterDate = new Date(retryAfter); + return retryAfterDate.getTime() - Date.now(); +}; diff --git a/node_modules/@smithy/middleware-retry/dist-es/configurations.js b/node_modules/@smithy/middleware-retry/dist-es/configurations.js new file mode 100644 index 00000000..ec375e31 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-es/configurations.js @@ -0,0 +1,51 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +import { AdaptiveRetryStrategy, DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE, RETRY_MODES, StandardRetryStrategy, } from "@smithy/util-retry"; +export const ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +export const CONFIG_MAX_ATTEMPTS = "max_attempts"; +export const NODE_MAX_ATTEMPT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + const value = env[ENV_MAX_ATTEMPTS]; + if (!value) + return undefined; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Environment variable ${ENV_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + configFileSelector: (profile) => { + const value = profile[CONFIG_MAX_ATTEMPTS]; + if (!value) + return undefined; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Shared config file entry ${CONFIG_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + default: DEFAULT_MAX_ATTEMPTS, +}; +export const resolveRetryConfig = (input) => { + const { retryStrategy, retryMode: _retryMode, maxAttempts: _maxAttempts } = input; + const maxAttempts = normalizeProvider(_maxAttempts ?? DEFAULT_MAX_ATTEMPTS); + return Object.assign(input, { + maxAttempts, + retryStrategy: async () => { + if (retryStrategy) { + return retryStrategy; + } + const retryMode = await normalizeProvider(_retryMode)(); + if (retryMode === RETRY_MODES.ADAPTIVE) { + return new AdaptiveRetryStrategy(maxAttempts); + } + return new StandardRetryStrategy(maxAttempts); + }, + }); +}; +export const ENV_RETRY_MODE = "AWS_RETRY_MODE"; +export const CONFIG_RETRY_MODE = "retry_mode"; +export const NODE_RETRY_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_RETRY_MODE], + configFileSelector: (profile) => profile[CONFIG_RETRY_MODE], + default: DEFAULT_RETRY_MODE, +}; diff --git a/node_modules/@smithy/middleware-retry/dist-es/defaultRetryQuota.js b/node_modules/@smithy/middleware-retry/dist-es/defaultRetryQuota.js new file mode 100644 index 00000000..4bf67716 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-es/defaultRetryQuota.js @@ -0,0 +1,27 @@ +import { NO_RETRY_INCREMENT, RETRY_COST, TIMEOUT_RETRY_COST } from "@smithy/util-retry"; +export const getDefaultRetryQuota = (initialRetryTokens, options) => { + const MAX_CAPACITY = initialRetryTokens; + const noRetryIncrement = options?.noRetryIncrement ?? NO_RETRY_INCREMENT; + const retryCost = options?.retryCost ?? RETRY_COST; + const timeoutRetryCost = options?.timeoutRetryCost ?? TIMEOUT_RETRY_COST; + let availableCapacity = initialRetryTokens; + const getCapacityAmount = (error) => (error.name === "TimeoutError" ? timeoutRetryCost : retryCost); + const hasRetryTokens = (error) => getCapacityAmount(error) <= availableCapacity; + const retrieveRetryTokens = (error) => { + if (!hasRetryTokens(error)) { + throw new Error("No retry token available"); + } + const capacityAmount = getCapacityAmount(error); + availableCapacity -= capacityAmount; + return capacityAmount; + }; + const releaseRetryTokens = (capacityReleaseAmount) => { + availableCapacity += capacityReleaseAmount ?? noRetryIncrement; + availableCapacity = Math.min(availableCapacity, MAX_CAPACITY); + }; + return Object.freeze({ + hasRetryTokens, + retrieveRetryTokens, + releaseRetryTokens, + }); +}; diff --git a/node_modules/@smithy/middleware-retry/dist-es/delayDecider.js b/node_modules/@smithy/middleware-retry/dist-es/delayDecider.js new file mode 100644 index 00000000..2928506a --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-es/delayDecider.js @@ -0,0 +1,2 @@ +import { MAXIMUM_RETRY_DELAY } from "@smithy/util-retry"; +export const defaultDelayDecider = (delayBase, attempts) => Math.floor(Math.min(MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); diff --git a/node_modules/@smithy/middleware-retry/dist-es/index.js b/node_modules/@smithy/middleware-retry/dist-es/index.js new file mode 100644 index 00000000..9ebe326a --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-es/index.js @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./StandardRetryStrategy"; +export * from "./configurations"; +export * from "./delayDecider"; +export * from "./omitRetryHeadersMiddleware"; +export * from "./retryDecider"; +export * from "./retryMiddleware"; diff --git a/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.browser.js b/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.browser.js new file mode 100644 index 00000000..9569e924 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.browser.js @@ -0,0 +1 @@ +export const isStreamingPayload = (request) => request?.body instanceof ReadableStream; diff --git a/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.js b/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.js new file mode 100644 index 00000000..7dcc687b --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.js @@ -0,0 +1,3 @@ +import { Readable } from "stream"; +export const isStreamingPayload = (request) => request?.body instanceof Readable || + (typeof ReadableStream !== "undefined" && request?.body instanceof ReadableStream); diff --git a/node_modules/@smithy/middleware-retry/dist-es/omitRetryHeadersMiddleware.js b/node_modules/@smithy/middleware-retry/dist-es/omitRetryHeadersMiddleware.js new file mode 100644 index 00000000..cb3c3724 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-es/omitRetryHeadersMiddleware.js @@ -0,0 +1,22 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { INVOCATION_ID_HEADER, REQUEST_HEADER } from "@smithy/util-retry"; +export const omitRetryHeadersMiddleware = () => (next) => async (args) => { + const { request } = args; + if (HttpRequest.isInstance(request)) { + delete request.headers[INVOCATION_ID_HEADER]; + delete request.headers[REQUEST_HEADER]; + } + return next(args); +}; +export const omitRetryHeadersMiddlewareOptions = { + name: "omitRetryHeadersMiddleware", + tags: ["RETRY", "HEADERS", "OMIT_RETRY_HEADERS"], + relation: "before", + toMiddleware: "awsAuthMiddleware", + override: true, +}; +export const getOmitRetryHeadersPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(omitRetryHeadersMiddleware(), omitRetryHeadersMiddlewareOptions); + }, +}); diff --git a/node_modules/@smithy/middleware-retry/dist-es/retryDecider.js b/node_modules/@smithy/middleware-retry/dist-es/retryDecider.js new file mode 100644 index 00000000..b965fba0 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-es/retryDecider.js @@ -0,0 +1,7 @@ +import { isClockSkewError, isRetryableByTrait, isThrottlingError, isTransientError, } from "@smithy/service-error-classification"; +export const defaultRetryDecider = (error) => { + if (!error) { + return false; + } + return isRetryableByTrait(error) || isClockSkewError(error) || isThrottlingError(error) || isTransientError(error); +}; diff --git a/node_modules/@smithy/middleware-retry/dist-es/retryMiddleware.js b/node_modules/@smithy/middleware-retry/dist-es/retryMiddleware.js new file mode 100644 index 00000000..a8977352 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-es/retryMiddleware.js @@ -0,0 +1,112 @@ +import { HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { isServerError, isThrottlingError, isTransientError } from "@smithy/service-error-classification"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { INVOCATION_ID_HEADER, REQUEST_HEADER } from "@smithy/util-retry"; +import { v4 } from "uuid"; +import { isStreamingPayload } from "./isStreamingPayload/isStreamingPayload"; +import { asSdkError } from "./util"; +export const retryMiddleware = (options) => (next, context) => async (args) => { + let retryStrategy = await options.retryStrategy(); + const maxAttempts = await options.maxAttempts(); + if (isRetryStrategyV2(retryStrategy)) { + retryStrategy = retryStrategy; + let retryToken = await retryStrategy.acquireInitialRetryToken(context["partition_id"]); + let lastError = new Error(); + let attempts = 0; + let totalRetryDelay = 0; + const { request } = args; + const isRequest = HttpRequest.isInstance(request); + if (isRequest) { + request.headers[INVOCATION_ID_HEADER] = v4(); + } + while (true) { + try { + if (isRequest) { + request.headers[REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + const { response, output } = await next(args); + retryStrategy.recordSuccess(retryToken); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalRetryDelay; + return { response, output }; + } + catch (e) { + const retryErrorInfo = getRetryErrorInfo(e); + lastError = asSdkError(e); + if (isRequest && isStreamingPayload(request)) { + (context.logger instanceof NoOpLogger ? console : context.logger)?.warn("An error was encountered in a non-retryable streaming request."); + throw lastError; + } + try { + retryToken = await retryStrategy.refreshRetryTokenForRetry(retryToken, retryErrorInfo); + } + catch (refreshError) { + if (!lastError.$metadata) { + lastError.$metadata = {}; + } + lastError.$metadata.attempts = attempts + 1; + lastError.$metadata.totalRetryDelay = totalRetryDelay; + throw lastError; + } + attempts = retryToken.getRetryCount(); + const delay = retryToken.getRetryDelay(); + totalRetryDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + } + } + } + else { + retryStrategy = retryStrategy; + if (retryStrategy?.mode) + context.userAgent = [...(context.userAgent || []), ["cfg/retry-mode", retryStrategy.mode]]; + return retryStrategy.retry(next, args); + } +}; +const isRetryStrategyV2 = (retryStrategy) => typeof retryStrategy.acquireInitialRetryToken !== "undefined" && + typeof retryStrategy.refreshRetryTokenForRetry !== "undefined" && + typeof retryStrategy.recordSuccess !== "undefined"; +const getRetryErrorInfo = (error) => { + const errorInfo = { + error, + errorType: getRetryErrorType(error), + }; + const retryAfterHint = getRetryAfterHint(error.$response); + if (retryAfterHint) { + errorInfo.retryAfterHint = retryAfterHint; + } + return errorInfo; +}; +const getRetryErrorType = (error) => { + if (isThrottlingError(error)) + return "THROTTLING"; + if (isTransientError(error)) + return "TRANSIENT"; + if (isServerError(error)) + return "SERVER_ERROR"; + return "CLIENT_ERROR"; +}; +export const retryMiddlewareOptions = { + name: "retryMiddleware", + tags: ["RETRY"], + step: "finalizeRequest", + priority: "high", + override: true, +}; +export const getRetryPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(retryMiddleware(options), retryMiddlewareOptions); + }, +}); +export const getRetryAfterHint = (response) => { + if (!HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return new Date(retryAfterSeconds * 1000); + const retryAfterDate = new Date(retryAfter); + return retryAfterDate; +}; diff --git a/node_modules/@smithy/middleware-retry/dist-es/types.js b/node_modules/@smithy/middleware-retry/dist-es/types.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/middleware-retry/dist-es/util.js b/node_modules/@smithy/middleware-retry/dist-es/util.js new file mode 100644 index 00000000..f45e6b4d --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-es/util.js @@ -0,0 +1,9 @@ +export const asSdkError = (error) => { + if (error instanceof Error) + return error; + if (error instanceof Object) + return Object.assign(new Error(), error); + if (typeof error === "string") + return new Error(error); + return new Error(`AWS SDK error wrapper for ${error}`); +}; diff --git a/node_modules/@smithy/middleware-retry/dist-types/AdaptiveRetryStrategy.d.ts b/node_modules/@smithy/middleware-retry/dist-types/AdaptiveRetryStrategy.d.ts new file mode 100644 index 00000000..98a6a1da --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/AdaptiveRetryStrategy.d.ts @@ -0,0 +1,22 @@ +import { FinalizeHandler, FinalizeHandlerArguments, MetadataBearer, Provider } from "@smithy/types"; +import { RateLimiter } from "@smithy/util-retry"; +import { StandardRetryStrategy, StandardRetryStrategyOptions } from "./StandardRetryStrategy"; +/** + * @public + * Strategy options to be passed to AdaptiveRetryStrategy + */ +export interface AdaptiveRetryStrategyOptions extends StandardRetryStrategyOptions { + rateLimiter?: RateLimiter; +} +/** + * @public + * @deprecated use AdaptiveRetryStrategy from @smithy/util-retry + */ +export declare class AdaptiveRetryStrategy extends StandardRetryStrategy { + private rateLimiter; + constructor(maxAttemptsProvider: Provider, options?: AdaptiveRetryStrategyOptions); + retry(next: FinalizeHandler, args: FinalizeHandlerArguments): Promise<{ + response: unknown; + output: Ouput; + }>; +} diff --git a/node_modules/@smithy/middleware-retry/dist-types/StandardRetryStrategy.d.ts b/node_modules/@smithy/middleware-retry/dist-types/StandardRetryStrategy.d.ts new file mode 100644 index 00000000..7007ac38 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/StandardRetryStrategy.d.ts @@ -0,0 +1,33 @@ +import { FinalizeHandler, FinalizeHandlerArguments, MetadataBearer, Provider, RetryStrategy } from "@smithy/types"; +import { DelayDecider, RetryDecider, RetryQuota } from "./types"; +/** + * Strategy options to be passed to StandardRetryStrategy + * @public + * @deprecated use StandardRetryStrategy from @smithy/util-retry + */ +export interface StandardRetryStrategyOptions { + retryDecider?: RetryDecider; + delayDecider?: DelayDecider; + retryQuota?: RetryQuota; +} +/** + * @public + * @deprecated use StandardRetryStrategy from @smithy/util-retry + */ +export declare class StandardRetryStrategy implements RetryStrategy { + private readonly maxAttemptsProvider; + private retryDecider; + private delayDecider; + private retryQuota; + mode: string; + constructor(maxAttemptsProvider: Provider, options?: StandardRetryStrategyOptions); + private shouldRetry; + private getMaxAttempts; + retry(next: FinalizeHandler, args: FinalizeHandlerArguments, options?: { + beforeRequest: Function; + afterRequest: Function; + }): Promise<{ + response: unknown; + output: Ouput; + }>; +} diff --git a/node_modules/@smithy/middleware-retry/dist-types/configurations.d.ts b/node_modules/@smithy/middleware-retry/dist-types/configurations.d.ts new file mode 100644 index 00000000..150c2a36 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/configurations.d.ts @@ -0,0 +1,66 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { Provider, RetryStrategy, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +export declare const ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +/** + * @internal + */ +export declare const CONFIG_MAX_ATTEMPTS = "max_attempts"; +/** + * @internal + */ +export declare const NODE_MAX_ATTEMPT_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @public + */ +export interface RetryInputConfig { + /** + * The maximum number of times requests that encounter retryable failures should be attempted. + */ + maxAttempts?: number | Provider; + /** + * The strategy to retry the request. Using built-in exponential backoff strategy by default. + */ + retryStrategy?: RetryStrategy | RetryStrategyV2; +} +/** + * @internal + */ +export interface PreviouslyResolved { + /** + * Specifies provider for retry algorithm to use. + * @internal + */ + retryMode: string | Provider; +} +/** + * @internal + */ +export interface RetryResolvedConfig { + /** + * Resolved value for input config {@link RetryInputConfig.maxAttempts} + */ + maxAttempts: Provider; + /** + * Resolved value for input config {@link RetryInputConfig.retryStrategy} + */ + retryStrategy: Provider; +} +/** + * @internal + */ +export declare const resolveRetryConfig: (input: T & PreviouslyResolved & RetryInputConfig) => T & RetryResolvedConfig; +/** + * @internal + */ +export declare const ENV_RETRY_MODE = "AWS_RETRY_MODE"; +/** + * @internal + */ +export declare const CONFIG_RETRY_MODE = "retry_mode"; +/** + * @internal + */ +export declare const NODE_RETRY_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@smithy/middleware-retry/dist-types/defaultRetryQuota.d.ts b/node_modules/@smithy/middleware-retry/dist-types/defaultRetryQuota.d.ts new file mode 100644 index 00000000..332a4940 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/defaultRetryQuota.d.ts @@ -0,0 +1,24 @@ +import { RetryQuota } from "./types"; +/** + * @internal + */ +export interface DefaultRetryQuotaOptions { + /** + * The total amount of retry token to be incremented from retry token balance + * if an SDK operation invocation succeeds without requiring a retry request. + */ + noRetryIncrement?: number; + /** + * The total amount of retry tokens to be decremented from retry token balance. + */ + retryCost?: number; + /** + * The total amount of retry tokens to be decremented from retry token balance + * when a throttling error is encountered. + */ + timeoutRetryCost?: number; +} +/** + * @internal + */ +export declare const getDefaultRetryQuota: (initialRetryTokens: number, options?: DefaultRetryQuotaOptions) => RetryQuota; diff --git a/node_modules/@smithy/middleware-retry/dist-types/delayDecider.d.ts b/node_modules/@smithy/middleware-retry/dist-types/delayDecider.d.ts new file mode 100644 index 00000000..986ff42a --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/delayDecider.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Calculate a capped, fully-jittered exponential backoff time. + */ +export declare const defaultDelayDecider: (delayBase: number, attempts: number) => number; diff --git a/node_modules/@smithy/middleware-retry/dist-types/index.d.ts b/node_modules/@smithy/middleware-retry/dist-types/index.d.ts new file mode 100644 index 00000000..9ebe326a --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./StandardRetryStrategy"; +export * from "./configurations"; +export * from "./delayDecider"; +export * from "./omitRetryHeadersMiddleware"; +export * from "./retryDecider"; +export * from "./retryMiddleware"; diff --git a/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.browser.d.ts b/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.browser.d.ts new file mode 100644 index 00000000..48d70ba8 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.browser.d.ts @@ -0,0 +1,5 @@ +import type { HttpRequest } from "@smithy/protocol-http"; +/** + * @internal + */ +export declare const isStreamingPayload: (request: HttpRequest) => boolean; diff --git a/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.d.ts b/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.d.ts new file mode 100644 index 00000000..48d70ba8 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.d.ts @@ -0,0 +1,5 @@ +import type { HttpRequest } from "@smithy/protocol-http"; +/** + * @internal + */ +export declare const isStreamingPayload: (request: HttpRequest) => boolean; diff --git a/node_modules/@smithy/middleware-retry/dist-types/omitRetryHeadersMiddleware.d.ts b/node_modules/@smithy/middleware-retry/dist-types/omitRetryHeadersMiddleware.d.ts new file mode 100644 index 00000000..50c1ab67 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/omitRetryHeadersMiddleware.d.ts @@ -0,0 +1,13 @@ +import { FinalizeHandler, MetadataBearer, Pluggable, RelativeMiddlewareOptions } from "@smithy/types"; +/** + * @internal + */ +export declare const omitRetryHeadersMiddleware: () => (next: FinalizeHandler) => FinalizeHandler; +/** + * @internal + */ +export declare const omitRetryHeadersMiddlewareOptions: RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getOmitRetryHeadersPlugin: (options: unknown) => Pluggable; diff --git a/node_modules/@smithy/middleware-retry/dist-types/retryDecider.d.ts b/node_modules/@smithy/middleware-retry/dist-types/retryDecider.d.ts new file mode 100644 index 00000000..11a4a9c7 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/retryDecider.d.ts @@ -0,0 +1,6 @@ +import { SdkError } from "@smithy/types"; +/** + * @internal + * @deprecated this is only used in the deprecated StandardRetryStrategy. Do not use in new code. + */ +export declare const defaultRetryDecider: (error: SdkError) => boolean; diff --git a/node_modules/@smithy/middleware-retry/dist-types/retryMiddleware.d.ts b/node_modules/@smithy/middleware-retry/dist-types/retryMiddleware.d.ts new file mode 100644 index 00000000..93103017 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/retryMiddleware.d.ts @@ -0,0 +1,18 @@ +import { AbsoluteLocation, FinalizeHandler, FinalizeRequestHandlerOptions, HandlerExecutionContext, MetadataBearer, Pluggable } from "@smithy/types"; +import { RetryResolvedConfig } from "./configurations"; +/** + * @internal + */ +export declare const retryMiddleware: (options: RetryResolvedConfig) => (next: FinalizeHandler, context: HandlerExecutionContext) => FinalizeHandler; +/** + * @internal + */ +export declare const retryMiddlewareOptions: FinalizeRequestHandlerOptions & AbsoluteLocation; +/** + * @internal + */ +export declare const getRetryPlugin: (options: RetryResolvedConfig) => Pluggable; +/** + * @internal + */ +export declare const getRetryAfterHint: (response: unknown) => Date | undefined; diff --git a/node_modules/@smithy/middleware-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts new file mode 100644 index 00000000..33f04164 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts @@ -0,0 +1,22 @@ +import { FinalizeHandler, FinalizeHandlerArguments, MetadataBearer, Provider } from "@smithy/types"; +import { RateLimiter } from "@smithy/util-retry"; +import { StandardRetryStrategy, StandardRetryStrategyOptions } from "./StandardRetryStrategy"; +/** + * @public + * Strategy options to be passed to AdaptiveRetryStrategy + */ +export interface AdaptiveRetryStrategyOptions extends StandardRetryStrategyOptions { + rateLimiter?: RateLimiter; +} +/** + * @public + * @deprecated use AdaptiveRetryStrategy from @smithy/util-retry + */ +export declare class AdaptiveRetryStrategy extends StandardRetryStrategy { + private rateLimiter; + constructor(maxAttemptsProvider: Provider, options?: AdaptiveRetryStrategyOptions); + retry(next: FinalizeHandler, args: FinalizeHandlerArguments): Promise<{ + response: unknown; + output: Ouput; + }>; +} diff --git a/node_modules/@smithy/middleware-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts new file mode 100644 index 00000000..b4656d25 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts @@ -0,0 +1,33 @@ +import { FinalizeHandler, FinalizeHandlerArguments, MetadataBearer, Provider, RetryStrategy } from "@smithy/types"; +import { DelayDecider, RetryDecider, RetryQuota } from "./types"; +/** + * Strategy options to be passed to StandardRetryStrategy + * @public + * @deprecated use StandardRetryStrategy from @smithy/util-retry + */ +export interface StandardRetryStrategyOptions { + retryDecider?: RetryDecider; + delayDecider?: DelayDecider; + retryQuota?: RetryQuota; +} +/** + * @public + * @deprecated use StandardRetryStrategy from @smithy/util-retry + */ +export declare class StandardRetryStrategy implements RetryStrategy { + private readonly maxAttemptsProvider; + private retryDecider; + private delayDecider; + private retryQuota; + mode: string; + constructor(maxAttemptsProvider: Provider, options?: StandardRetryStrategyOptions); + private shouldRetry; + private getMaxAttempts; + retry(next: FinalizeHandler, args: FinalizeHandlerArguments, options?: { + beforeRequest: Function; + afterRequest: Function; + }): Promise<{ + response: unknown; + output: Ouput; + }>; +} diff --git a/node_modules/@smithy/middleware-retry/dist-types/ts3.4/configurations.d.ts b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/configurations.d.ts new file mode 100644 index 00000000..79f86462 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/configurations.d.ts @@ -0,0 +1,66 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { Provider, RetryStrategy, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +export declare const ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +/** + * @internal + */ +export declare const CONFIG_MAX_ATTEMPTS = "max_attempts"; +/** + * @internal + */ +export declare const NODE_MAX_ATTEMPT_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @public + */ +export interface RetryInputConfig { + /** + * The maximum number of times requests that encounter retryable failures should be attempted. + */ + maxAttempts?: number | Provider; + /** + * The strategy to retry the request. Using built-in exponential backoff strategy by default. + */ + retryStrategy?: RetryStrategy | RetryStrategyV2; +} +/** + * @internal + */ +export interface PreviouslyResolved { + /** + * Specifies provider for retry algorithm to use. + * @internal + */ + retryMode: string | Provider; +} +/** + * @internal + */ +export interface RetryResolvedConfig { + /** + * Resolved value for input config {@link RetryInputConfig.maxAttempts} + */ + maxAttempts: Provider; + /** + * Resolved value for input config {@link RetryInputConfig.retryStrategy} + */ + retryStrategy: Provider; +} +/** + * @internal + */ +export declare const resolveRetryConfig: (input: T & PreviouslyResolved & RetryInputConfig) => T & RetryResolvedConfig; +/** + * @internal + */ +export declare const ENV_RETRY_MODE = "AWS_RETRY_MODE"; +/** + * @internal + */ +export declare const CONFIG_RETRY_MODE = "retry_mode"; +/** + * @internal + */ +export declare const NODE_RETRY_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@smithy/middleware-retry/dist-types/ts3.4/defaultRetryQuota.d.ts b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/defaultRetryQuota.d.ts new file mode 100644 index 00000000..704b5afe --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/defaultRetryQuota.d.ts @@ -0,0 +1,24 @@ +import { RetryQuota } from "./types"; +/** + * @internal + */ +export interface DefaultRetryQuotaOptions { + /** + * The total amount of retry token to be incremented from retry token balance + * if an SDK operation invocation succeeds without requiring a retry request. + */ + noRetryIncrement?: number; + /** + * The total amount of retry tokens to be decremented from retry token balance. + */ + retryCost?: number; + /** + * The total amount of retry tokens to be decremented from retry token balance + * when a throttling error is encountered. + */ + timeoutRetryCost?: number; +} +/** + * @internal + */ +export declare const getDefaultRetryQuota: (initialRetryTokens: number, options?: DefaultRetryQuotaOptions) => RetryQuota; diff --git a/node_modules/@smithy/middleware-retry/dist-types/ts3.4/delayDecider.d.ts b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/delayDecider.d.ts new file mode 100644 index 00000000..7fa73ec0 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/delayDecider.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Calculate a capped, fully-jittered exponential backoff time. + */ +export declare const defaultDelayDecider: (delayBase: number, attempts: number) => number; diff --git a/node_modules/@smithy/middleware-retry/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..e366bbbd --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/index.d.ts @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./StandardRetryStrategy"; +export * from "./configurations"; +export * from "./delayDecider"; +export * from "./omitRetryHeadersMiddleware"; +export * from "./retryDecider"; +export * from "./retryMiddleware"; diff --git a/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.browser.d.ts b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.browser.d.ts new file mode 100644 index 00000000..2a4d542b --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.browser.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest } from "@smithy/protocol-http"; +/** + * @internal + */ +export declare const isStreamingPayload: (request: HttpRequest) => boolean; diff --git a/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.d.ts b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.d.ts new file mode 100644 index 00000000..2a4d542b --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest } from "@smithy/protocol-http"; +/** + * @internal + */ +export declare const isStreamingPayload: (request: HttpRequest) => boolean; diff --git a/node_modules/@smithy/middleware-retry/dist-types/ts3.4/omitRetryHeadersMiddleware.d.ts b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/omitRetryHeadersMiddleware.d.ts new file mode 100644 index 00000000..abd8f718 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/omitRetryHeadersMiddleware.d.ts @@ -0,0 +1,13 @@ +import { FinalizeHandler, MetadataBearer, Pluggable, RelativeMiddlewareOptions } from "@smithy/types"; +/** + * @internal + */ +export declare const omitRetryHeadersMiddleware: () => (next: FinalizeHandler) => FinalizeHandler; +/** + * @internal + */ +export declare const omitRetryHeadersMiddlewareOptions: RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getOmitRetryHeadersPlugin: (options: unknown) => Pluggable; diff --git a/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryDecider.d.ts b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryDecider.d.ts new file mode 100644 index 00000000..c00661a5 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryDecider.d.ts @@ -0,0 +1,6 @@ +import { SdkError } from "@smithy/types"; +/** + * @internal + * @deprecated this is only used in the deprecated StandardRetryStrategy. Do not use in new code. + */ +export declare const defaultRetryDecider: (error: SdkError) => boolean; diff --git a/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryMiddleware.d.ts b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryMiddleware.d.ts new file mode 100644 index 00000000..137dbf1c --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryMiddleware.d.ts @@ -0,0 +1,18 @@ +import { AbsoluteLocation, FinalizeHandler, FinalizeRequestHandlerOptions, HandlerExecutionContext, MetadataBearer, Pluggable } from "@smithy/types"; +import { RetryResolvedConfig } from "./configurations"; +/** + * @internal + */ +export declare const retryMiddleware: (options: RetryResolvedConfig) => (next: FinalizeHandler, context: HandlerExecutionContext) => FinalizeHandler; +/** + * @internal + */ +export declare const retryMiddlewareOptions: FinalizeRequestHandlerOptions & AbsoluteLocation; +/** + * @internal + */ +export declare const getRetryPlugin: (options: RetryResolvedConfig) => Pluggable; +/** + * @internal + */ +export declare const getRetryAfterHint: (response: unknown) => Date | undefined; diff --git a/node_modules/@smithy/middleware-retry/dist-types/ts3.4/types.d.ts b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/types.d.ts new file mode 100644 index 00000000..06775c63 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/types.d.ts @@ -0,0 +1,65 @@ +import { SdkError } from "@smithy/types"; +/** + * Determines whether an error is retryable based on the number of retries + * already attempted, the HTTP status code, and the error received (if any). + * + * @param error - The error encountered. + * + * @deprecated + * @internal + */ +export interface RetryDecider { + (error: SdkError): boolean; +} +/** + * Determines the number of milliseconds to wait before retrying an action. + * + * @param delayBase - The base delay (in milliseconds). + * @param attempts - The number of times the action has already been tried. + * + * @deprecated + * @internal + */ +export interface DelayDecider { + (delayBase: number, attempts: number): number; +} +/** + * Interface that specifies the retry quota behavior. + * @deprecated + * @internal + */ +export interface RetryQuota { + /** + * returns true if retry tokens are available from the retry quota bucket. + */ + hasRetryTokens: (error: SdkError) => boolean; + /** + * returns token amount from the retry quota bucket. + * throws error is retry tokens are not available. + */ + retrieveRetryTokens: (error: SdkError) => number; + /** + * releases tokens back to the retry quota. + */ + releaseRetryTokens: (releaseCapacityAmount?: number) => void; +} +/** + * @deprecated + * @internal + */ +export interface RateLimiter { + /** + * If there is sufficient capacity (tokens) available, it immediately returns. + * If there is not sufficient capacity, it will either sleep a certain amount + * of time until the rate limiter can retrieve a token from its token bucket + * or raise an exception indicating there is insufficient capacity. + */ + getSendToken: () => Promise; + /** + * Updates the client sending rate based on response. + * If the response was successful, the capacity and fill rate are increased. + * If the response was a throttling response, the capacity and fill rate are + * decreased. Transient errors do not affect the rate limiter. + */ + updateClientSendingRate: (response: any) => void; +} diff --git a/node_modules/@smithy/middleware-retry/dist-types/ts3.4/util.d.ts b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/util.d.ts new file mode 100644 index 00000000..7684a9fe --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/ts3.4/util.d.ts @@ -0,0 +1,2 @@ +import { SdkError } from "@smithy/types"; +export declare const asSdkError: (error: unknown) => SdkError; diff --git a/node_modules/@smithy/middleware-retry/dist-types/types.d.ts b/node_modules/@smithy/middleware-retry/dist-types/types.d.ts new file mode 100644 index 00000000..8f227127 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/types.d.ts @@ -0,0 +1,65 @@ +import { SdkError } from "@smithy/types"; +/** + * Determines whether an error is retryable based on the number of retries + * already attempted, the HTTP status code, and the error received (if any). + * + * @param error - The error encountered. + * + * @deprecated + * @internal + */ +export interface RetryDecider { + (error: SdkError): boolean; +} +/** + * Determines the number of milliseconds to wait before retrying an action. + * + * @param delayBase - The base delay (in milliseconds). + * @param attempts - The number of times the action has already been tried. + * + * @deprecated + * @internal + */ +export interface DelayDecider { + (delayBase: number, attempts: number): number; +} +/** + * Interface that specifies the retry quota behavior. + * @deprecated + * @internal + */ +export interface RetryQuota { + /** + * returns true if retry tokens are available from the retry quota bucket. + */ + hasRetryTokens: (error: SdkError) => boolean; + /** + * returns token amount from the retry quota bucket. + * throws error is retry tokens are not available. + */ + retrieveRetryTokens: (error: SdkError) => number; + /** + * releases tokens back to the retry quota. + */ + releaseRetryTokens: (releaseCapacityAmount?: number) => void; +} +/** + * @deprecated + * @internal + */ +export interface RateLimiter { + /** + * If there is sufficient capacity (tokens) available, it immediately returns. + * If there is not sufficient capacity, it will either sleep a certain amount + * of time until the rate limiter can retrieve a token from its token bucket + * or raise an exception indicating there is insufficient capacity. + */ + getSendToken: () => Promise; + /** + * Updates the client sending rate based on response. + * If the response was successful, the capacity and fill rate are increased. + * If the response was a throttling response, the capacity and fill rate are + * decreased. Transient errors do not affect the rate limiter. + */ + updateClientSendingRate: (response: any) => void; +} diff --git a/node_modules/@smithy/middleware-retry/dist-types/util.d.ts b/node_modules/@smithy/middleware-retry/dist-types/util.d.ts new file mode 100644 index 00000000..00939b8e --- /dev/null +++ b/node_modules/@smithy/middleware-retry/dist-types/util.d.ts @@ -0,0 +1,2 @@ +import { SdkError } from "@smithy/types"; +export declare const asSdkError: (error: unknown) => SdkError; diff --git a/node_modules/@smithy/middleware-retry/node_modules/.bin/uuid b/node_modules/@smithy/middleware-retry/node_modules/.bin/uuid new file mode 120000 index 00000000..588f70ec --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/.bin/uuid @@ -0,0 +1 @@ +../uuid/dist/bin/uuid \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/CHANGELOG.md b/node_modules/@smithy/middleware-retry/node_modules/uuid/CHANGELOG.md new file mode 100644 index 00000000..0412ad8a --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/CHANGELOG.md @@ -0,0 +1,274 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [9.0.1](https://github.com/uuidjs/uuid/compare/v9.0.0...v9.0.1) (2023-09-12) + +### build + +- Fix CI to work with Node.js 20.x + +## [9.0.0](https://github.com/uuidjs/uuid/compare/v8.3.2...v9.0.0) (2022-09-05) + +### ⚠ BREAKING CHANGES + +- Drop Node.js 10.x support. This library always aims at supporting one EOLed LTS release which by this time now is 12.x which has reached EOL 30 Apr 2022. + +- Remove the minified UMD build from the package. + + Minified code is hard to audit and since this is a widely used library it seems more appropriate nowadays to optimize for auditability than to ship a legacy module format that, at best, serves educational purposes nowadays. + + For production browser use cases, users should be using a bundler. For educational purposes, today's online sandboxes like replit.com offer convenient ways to load npm modules, so the use case for UMD through repos like UNPKG or jsDelivr has largely vanished. + +- Drop IE 11 and Safari 10 support. Drop support for browsers that don't correctly implement const/let and default arguments, and no longer transpile the browser build to ES2015. + + This also removes the fallback on msCrypto instead of the crypto API. + + Browser tests are run in the first supported version of each supported browser and in the latest (as of this commit) version available on Browserstack. + +### Features + +- optimize uuid.v1 by 1.3x uuid.v4 by 4.3x (430%) ([#597](https://github.com/uuidjs/uuid/issues/597)) ([3a033f6](https://github.com/uuidjs/uuid/commit/3a033f6bab6bb3780ece6d645b902548043280bc)) +- remove UMD build ([#645](https://github.com/uuidjs/uuid/issues/645)) ([e948a0f](https://github.com/uuidjs/uuid/commit/e948a0f22bf22f4619b27bd913885e478e20fe6f)), closes [#620](https://github.com/uuidjs/uuid/issues/620) +- use native crypto.randomUUID when available ([#600](https://github.com/uuidjs/uuid/issues/600)) ([c9e076c](https://github.com/uuidjs/uuid/commit/c9e076c852edad7e9a06baaa1d148cf4eda6c6c4)) + +### Bug Fixes + +- add Jest/jsdom compatibility ([#642](https://github.com/uuidjs/uuid/issues/642)) ([16f9c46](https://github.com/uuidjs/uuid/commit/16f9c469edf46f0786164cdf4dc980743984a6fd)) +- change default export to named function ([#545](https://github.com/uuidjs/uuid/issues/545)) ([c57bc5a](https://github.com/uuidjs/uuid/commit/c57bc5a9a0653273aa639cda9177ce52efabe42a)) +- handle error when parameter is not set in v3 and v5 ([#622](https://github.com/uuidjs/uuid/issues/622)) ([fcd7388](https://github.com/uuidjs/uuid/commit/fcd73881692d9fabb63872576ba28e30ff852091)) +- run npm audit fix ([#644](https://github.com/uuidjs/uuid/issues/644)) ([04686f5](https://github.com/uuidjs/uuid/commit/04686f54c5fed2cfffc1b619f4970c4bb8532353)) +- upgrading from uuid3 broken link ([#568](https://github.com/uuidjs/uuid/issues/568)) ([1c849da](https://github.com/uuidjs/uuid/commit/1c849da6e164259e72e18636726345b13a7eddd6)) + +### build + +- drop Node.js 8.x from babel transpile target ([#603](https://github.com/uuidjs/uuid/issues/603)) ([aa11485](https://github.com/uuidjs/uuid/commit/aa114858260402107ec8a1e1a825dea0a259bcb5)) +- drop support for legacy browsers (IE11, Safari 10) ([#604](https://github.com/uuidjs/uuid/issues/604)) ([0f433e5](https://github.com/uuidjs/uuid/commit/0f433e5ec444edacd53016de67db021102f36148)) + +- drop node 10.x to upgrade dev dependencies ([#653](https://github.com/uuidjs/uuid/issues/653)) ([28a5712](https://github.com/uuidjs/uuid/commit/28a571283f8abda6b9d85e689f95b7d3ee9e282e)), closes [#643](https://github.com/uuidjs/uuid/issues/643) + +### [8.3.2](https://github.com/uuidjs/uuid/compare/v8.3.1...v8.3.2) (2020-12-08) + +### Bug Fixes + +- lazy load getRandomValues ([#537](https://github.com/uuidjs/uuid/issues/537)) ([16c8f6d](https://github.com/uuidjs/uuid/commit/16c8f6df2f6b09b4d6235602d6a591188320a82e)), closes [#536](https://github.com/uuidjs/uuid/issues/536) + +### [8.3.1](https://github.com/uuidjs/uuid/compare/v8.3.0...v8.3.1) (2020-10-04) + +### Bug Fixes + +- support expo>=39.0.0 ([#515](https://github.com/uuidjs/uuid/issues/515)) ([c65a0f3](https://github.com/uuidjs/uuid/commit/c65a0f3fa73b901959d638d1e3591dfacdbed867)), closes [#375](https://github.com/uuidjs/uuid/issues/375) + +## [8.3.0](https://github.com/uuidjs/uuid/compare/v8.2.0...v8.3.0) (2020-07-27) + +### Features + +- add parse/stringify/validate/version/NIL APIs ([#479](https://github.com/uuidjs/uuid/issues/479)) ([0e6c10b](https://github.com/uuidjs/uuid/commit/0e6c10ba1bf9517796ff23c052fc0468eedfd5f4)), closes [#475](https://github.com/uuidjs/uuid/issues/475) [#478](https://github.com/uuidjs/uuid/issues/478) [#480](https://github.com/uuidjs/uuid/issues/480) [#481](https://github.com/uuidjs/uuid/issues/481) [#180](https://github.com/uuidjs/uuid/issues/180) + +## [8.2.0](https://github.com/uuidjs/uuid/compare/v8.1.0...v8.2.0) (2020-06-23) + +### Features + +- improve performance of v1 string representation ([#453](https://github.com/uuidjs/uuid/issues/453)) ([0ee0b67](https://github.com/uuidjs/uuid/commit/0ee0b67c37846529c66089880414d29f3ae132d5)) +- remove deprecated v4 string parameter ([#454](https://github.com/uuidjs/uuid/issues/454)) ([88ce3ca](https://github.com/uuidjs/uuid/commit/88ce3ca0ba046f60856de62c7ce03f7ba98ba46c)), closes [#437](https://github.com/uuidjs/uuid/issues/437) +- support jspm ([#473](https://github.com/uuidjs/uuid/issues/473)) ([e9f2587](https://github.com/uuidjs/uuid/commit/e9f2587a92575cac31bc1d4ae944e17c09756659)) + +### Bug Fixes + +- prepare package exports for webpack 5 ([#468](https://github.com/uuidjs/uuid/issues/468)) ([8d6e6a5](https://github.com/uuidjs/uuid/commit/8d6e6a5f8965ca9575eb4d92e99a43435f4a58a8)) + +## [8.1.0](https://github.com/uuidjs/uuid/compare/v8.0.0...v8.1.0) (2020-05-20) + +### Features + +- improve v4 performance by reusing random number array ([#435](https://github.com/uuidjs/uuid/issues/435)) ([bf4af0d](https://github.com/uuidjs/uuid/commit/bf4af0d711b4d2ed03d1f74fd12ad0baa87dc79d)) +- optimize V8 performance of bytesToUuid ([#434](https://github.com/uuidjs/uuid/issues/434)) ([e156415](https://github.com/uuidjs/uuid/commit/e156415448ec1af2351fa0b6660cfb22581971f2)) + +### Bug Fixes + +- export package.json required by react-native and bundlers ([#449](https://github.com/uuidjs/uuid/issues/449)) ([be1c8fe](https://github.com/uuidjs/uuid/commit/be1c8fe9a3206c358e0059b52fafd7213aa48a52)), closes [ai/nanoevents#44](https://github.com/ai/nanoevents/issues/44#issuecomment-602010343) [#444](https://github.com/uuidjs/uuid/issues/444) + +## [8.0.0](https://github.com/uuidjs/uuid/compare/v7.0.3...v8.0.0) (2020-04-29) + +### ⚠ BREAKING CHANGES + +- For native ECMAScript Module (ESM) usage in Node.js only named exports are exposed, there is no more default export. + + ```diff + -import uuid from 'uuid'; + -console.log(uuid.v4()); // -> 'cd6c3b08-0adc-4f4b-a6ef-36087a1c9869' + +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' + ``` + +- Deep requiring specific algorithms of this library like `require('uuid/v4')`, which has been deprecated in `uuid@7`, is no longer supported. + + Instead use the named exports that this module exports. + + For ECMAScript Modules (ESM): + + ```diff + -import uuidv4 from 'uuid/v4'; + +import { v4 as uuidv4 } from 'uuid'; + uuidv4(); + ``` + + For CommonJS: + + ```diff + -const uuidv4 = require('uuid/v4'); + +const { v4: uuidv4 } = require('uuid'); + uuidv4(); + ``` + +### Features + +- native Node.js ES Modules (wrapper approach) ([#423](https://github.com/uuidjs/uuid/issues/423)) ([2d9f590](https://github.com/uuidjs/uuid/commit/2d9f590ad9701d692625c07ed62f0a0f91227991)), closes [#245](https://github.com/uuidjs/uuid/issues/245) [#419](https://github.com/uuidjs/uuid/issues/419) [#342](https://github.com/uuidjs/uuid/issues/342) +- remove deep requires ([#426](https://github.com/uuidjs/uuid/issues/426)) ([daf72b8](https://github.com/uuidjs/uuid/commit/daf72b84ceb20272a81bb5fbddb05dd95922cbba)) + +### Bug Fixes + +- add CommonJS syntax example to README quickstart section ([#417](https://github.com/uuidjs/uuid/issues/417)) ([e0ec840](https://github.com/uuidjs/uuid/commit/e0ec8402c7ad44b7ef0453036c612f5db513fda0)) + +### [7.0.3](https://github.com/uuidjs/uuid/compare/v7.0.2...v7.0.3) (2020-03-31) + +### Bug Fixes + +- make deep require deprecation warning work in browsers ([#409](https://github.com/uuidjs/uuid/issues/409)) ([4b71107](https://github.com/uuidjs/uuid/commit/4b71107d8c0d2ef56861ede6403fc9dc35a1e6bf)), closes [#408](https://github.com/uuidjs/uuid/issues/408) + +### [7.0.2](https://github.com/uuidjs/uuid/compare/v7.0.1...v7.0.2) (2020-03-04) + +### Bug Fixes + +- make access to msCrypto consistent ([#393](https://github.com/uuidjs/uuid/issues/393)) ([8bf2a20](https://github.com/uuidjs/uuid/commit/8bf2a20f3565df743da7215eebdbada9d2df118c)) +- simplify link in deprecation warning ([#391](https://github.com/uuidjs/uuid/issues/391)) ([bb2c8e4](https://github.com/uuidjs/uuid/commit/bb2c8e4e9f4c5f9c1eaaf3ea59710c633cd90cb7)) +- update links to match content in readme ([#386](https://github.com/uuidjs/uuid/issues/386)) ([44f2f86](https://github.com/uuidjs/uuid/commit/44f2f86e9d2bbf14ee5f0f00f72a3db1292666d4)) + +### [7.0.1](https://github.com/uuidjs/uuid/compare/v7.0.0...v7.0.1) (2020-02-25) + +### Bug Fixes + +- clean up esm builds for node and browser ([#383](https://github.com/uuidjs/uuid/issues/383)) ([59e6a49](https://github.com/uuidjs/uuid/commit/59e6a49e7ce7b3e8fb0f3ee52b9daae72af467dc)) +- provide browser versions independent from module system ([#380](https://github.com/uuidjs/uuid/issues/380)) ([4344a22](https://github.com/uuidjs/uuid/commit/4344a22e7aed33be8627eeaaf05360f256a21753)), closes [#378](https://github.com/uuidjs/uuid/issues/378) + +## [7.0.0](https://github.com/uuidjs/uuid/compare/v3.4.0...v7.0.0) (2020-02-24) + +### ⚠ BREAKING CHANGES + +- The default export, which used to be the v4() method but which was already discouraged in v3.x of this library, has been removed. +- Explicitly note that deep imports of the different uuid version functions are deprecated and no longer encouraged and that ECMAScript module named imports should be used instead. Emit a deprecation warning for people who deep-require the different algorithm variants. +- Remove builtin support for insecure random number generators in the browser. Users who want that will have to supply their own random number generator function. +- Remove support for generating v3 and v5 UUIDs in Node.js<4.x +- Convert code base to ECMAScript Modules (ESM) and release CommonJS build for node and ESM build for browser bundlers. + +### Features + +- add UMD build to npm package ([#357](https://github.com/uuidjs/uuid/issues/357)) ([4e75adf](https://github.com/uuidjs/uuid/commit/4e75adf435196f28e3fbbe0185d654b5ded7ca2c)), closes [#345](https://github.com/uuidjs/uuid/issues/345) +- add various es module and CommonJS examples ([b238510](https://github.com/uuidjs/uuid/commit/b238510bf352463521f74bab175a3af9b7a42555)) +- ensure that docs are up-to-date in CI ([ee5e77d](https://github.com/uuidjs/uuid/commit/ee5e77db547474f5a8f23d6c857a6d399209986b)) +- hybrid CommonJS & ECMAScript modules build ([a3f078f](https://github.com/uuidjs/uuid/commit/a3f078faa0baff69ab41aed08e041f8f9c8993d0)) +- remove insecure fallback random number generator ([3a5842b](https://github.com/uuidjs/uuid/commit/3a5842b141a6e5de0ae338f391661e6b84b167c9)), closes [#173](https://github.com/uuidjs/uuid/issues/173) +- remove support for pre Node.js v4 Buffer API ([#356](https://github.com/uuidjs/uuid/issues/356)) ([b59b5c5](https://github.com/uuidjs/uuid/commit/b59b5c5ecad271c5453f1a156f011671f6d35627)) +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([c37a518](https://github.com/uuidjs/uuid/commit/c37a518e367ac4b6d0aa62dba1bc6ce9e85020f7)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +### Bug Fixes + +- add deep-require proxies for local testing and adjust tests ([#365](https://github.com/uuidjs/uuid/issues/365)) ([7fedc79](https://github.com/uuidjs/uuid/commit/7fedc79ac8fda4bfd1c566c7f05ef4ac13b2db48)) +- add note about removal of default export ([#372](https://github.com/uuidjs/uuid/issues/372)) ([12749b7](https://github.com/uuidjs/uuid/commit/12749b700eb49db8a9759fd306d8be05dbfbd58c)), closes [#370](https://github.com/uuidjs/uuid/issues/370) +- deprecated deep requiring of the different algorithm versions ([#361](https://github.com/uuidjs/uuid/issues/361)) ([c0bdf15](https://github.com/uuidjs/uuid/commit/c0bdf15e417639b1aeb0b247b2fb11f7a0a26b23)) + +## [3.4.0](https://github.com/uuidjs/uuid/compare/v3.3.3...v3.4.0) (2020-01-16) + +### Features + +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([e2d7314](https://github.com/uuidjs/uuid/commit/e2d7314)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +## [3.3.3](https://github.com/uuidjs/uuid/compare/v3.3.2...v3.3.3) (2019-08-19) + +### Bug Fixes + +- no longer run ci tests on node v4 +- upgrade dependencies + +## [3.3.2](https://github.com/uuidjs/uuid/compare/v3.3.1...v3.3.2) (2018-06-28) + +### Bug Fixes + +- typo ([305d877](https://github.com/uuidjs/uuid/commit/305d877)) + +## [3.3.1](https://github.com/uuidjs/uuid/compare/v3.3.0...v3.3.1) (2018-06-28) + +### Bug Fixes + +- fix [#284](https://github.com/uuidjs/uuid/issues/284) by setting function name in try-catch ([f2a60f2](https://github.com/uuidjs/uuid/commit/f2a60f2)) + +# [3.3.0](https://github.com/uuidjs/uuid/compare/v3.2.1...v3.3.0) (2018-06-22) + +### Bug Fixes + +- assignment to readonly property to allow running in strict mode ([#270](https://github.com/uuidjs/uuid/issues/270)) ([d062fdc](https://github.com/uuidjs/uuid/commit/d062fdc)) +- fix [#229](https://github.com/uuidjs/uuid/issues/229) ([c9684d4](https://github.com/uuidjs/uuid/commit/c9684d4)) +- Get correct version of IE11 crypto ([#274](https://github.com/uuidjs/uuid/issues/274)) ([153d331](https://github.com/uuidjs/uuid/commit/153d331)) +- mem issue when generating uuid ([#267](https://github.com/uuidjs/uuid/issues/267)) ([c47702c](https://github.com/uuidjs/uuid/commit/c47702c)) + +### Features + +- enforce Conventional Commit style commit messages ([#282](https://github.com/uuidjs/uuid/issues/282)) ([cc9a182](https://github.com/uuidjs/uuid/commit/cc9a182)) + +## [3.2.1](https://github.com/uuidjs/uuid/compare/v3.2.0...v3.2.1) (2018-01-16) + +### Bug Fixes + +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +# [3.2.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.2.0) (2018-01-16) + +### Bug Fixes + +- remove mistakenly added typescript dependency, rollback version (standard-version will auto-increment) ([09fa824](https://github.com/uuidjs/uuid/commit/09fa824)) +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +### Features + +- Add v3 Support ([#217](https://github.com/uuidjs/uuid/issues/217)) ([d94f726](https://github.com/uuidjs/uuid/commit/d94f726)) + +# [3.1.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.0.1) (2017-06-17) + +### Bug Fixes + +- (fix) Add .npmignore file to exclude test/ and other non-essential files from packing. (#183) +- Fix typo (#178) +- Simple typo fix (#165) + +### Features + +- v5 support in CLI (#197) +- V5 support (#188) + +# 3.0.1 (2016-11-28) + +- split uuid versions into separate files + +# 3.0.0 (2016-11-17) + +- remove .parse and .unparse + +# 2.0.0 + +- Removed uuid.BufferClass + +# 1.4.0 + +- Improved module context detection +- Removed public RNG functions + +# 1.3.2 + +- Improve tests and handling of v1() options (Issue #24) +- Expose RNG option to allow for perf testing with different generators + +# 1.3.0 + +- Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)! +- Support for node.js crypto API +- De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/CONTRIBUTING.md b/node_modules/@smithy/middleware-retry/node_modules/uuid/CONTRIBUTING.md new file mode 100644 index 00000000..4a4503d0 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/CONTRIBUTING.md @@ -0,0 +1,18 @@ +# Contributing + +Please feel free to file GitHub Issues or propose Pull Requests. We're always happy to discuss improvements to this library! + +## Testing + +```shell +npm test +``` + +## Releasing + +Releases are supposed to be done from master, version bumping is automated through [`standard-version`](https://github.com/conventional-changelog/standard-version): + +```shell +npm run release -- --dry-run # verify output manually +npm run release # follow the instructions from the output of this command +``` diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/LICENSE.md b/node_modules/@smithy/middleware-retry/node_modules/uuid/LICENSE.md new file mode 100644 index 00000000..39341683 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright (c) 2010-2020 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/README.md b/node_modules/@smithy/middleware-retry/node_modules/uuid/README.md new file mode 100644 index 00000000..4f51e098 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/README.md @@ -0,0 +1,466 @@ + + + +# uuid [![CI](https://github.com/uuidjs/uuid/workflows/CI/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ACI) [![Browser](https://github.com/uuidjs/uuid/workflows/Browser/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ABrowser) + +For the creation of [RFC4122](https://www.ietf.org/rfc/rfc4122.txt) UUIDs + +- **Complete** - Support for RFC4122 version 1, 3, 4, and 5 UUIDs +- **Cross-platform** - Support for ... + - CommonJS, [ECMAScript Modules](#ecmascript-modules) and [CDN builds](#cdn-builds) + - NodeJS 12+ ([LTS releases](https://github.com/nodejs/Release)) + - Chrome, Safari, Firefox, Edge browsers + - Webpack and rollup.js module bundlers + - [React Native / Expo](#react-native--expo) +- **Secure** - Cryptographically-strong random values +- **Small** - Zero-dependency, small footprint, plays nice with "tree shaking" packagers +- **CLI** - Includes the [`uuid` command line](#command-line) utility + +> **Note** Upgrading from `uuid@3`? Your code is probably okay, but check out [Upgrading From `uuid@3`](#upgrading-from-uuid3) for details. + +> **Note** Only interested in creating a version 4 UUID? You might be able to use [`crypto.randomUUID()`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto/randomUUID), eliminating the need to install this library. + +## Quickstart + +To create a random UUID... + +**1. Install** + +```shell +npm install uuid +``` + +**2. Create a UUID** (ES6 module syntax) + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' +``` + +... or using CommonJS syntax: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +For timestamp UUIDs, namespace UUIDs, and other options read on ... + +## API Summary + +| | | | +| --- | --- | --- | +| [`uuid.NIL`](#uuidnil) | The nil UUID string (all zeros) | New in `uuid@8.3` | +| [`uuid.parse()`](#uuidparsestr) | Convert UUID string to array of bytes | New in `uuid@8.3` | +| [`uuid.stringify()`](#uuidstringifyarr-offset) | Convert array of bytes to UUID string | New in `uuid@8.3` | +| [`uuid.v1()`](#uuidv1options-buffer-offset) | Create a version 1 (timestamp) UUID | | +| [`uuid.v3()`](#uuidv3name-namespace-buffer-offset) | Create a version 3 (namespace w/ MD5) UUID | | +| [`uuid.v4()`](#uuidv4options-buffer-offset) | Create a version 4 (random) UUID | | +| [`uuid.v5()`](#uuidv5name-namespace-buffer-offset) | Create a version 5 (namespace w/ SHA-1) UUID | | +| [`uuid.validate()`](#uuidvalidatestr) | Test a string to see if it is a valid UUID | New in `uuid@8.3` | +| [`uuid.version()`](#uuidversionstr) | Detect RFC version of a UUID | New in `uuid@8.3` | + +## API + +### uuid.NIL + +The nil UUID string (all zeros). + +Example: + +```javascript +import { NIL as NIL_UUID } from 'uuid'; + +NIL_UUID; // ⇨ '00000000-0000-0000-0000-000000000000' +``` + +### uuid.parse(str) + +Convert UUID string to array of bytes + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Uint8Array[16]` | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { parse as uuidParse } from 'uuid'; + +// Parse a UUID +const bytes = uuidParse('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); + +// Convert to hex strings to show byte order (for documentation purposes) +[...bytes].map((v) => v.toString(16).padStart(2, '0')); // ⇨ + // [ + // '6e', 'c0', 'bd', '7f', + // '11', 'c0', '43', 'da', + // '97', '5e', '2a', '8a', + // 'd9', 'eb', 'ae', '0b' + // ] +``` + +### uuid.stringify(arr[, offset]) + +Convert array of bytes to UUID string + +| | | +| -------------- | ---------------------------------------------------------------------------- | +| `arr` | `Array`-like collection of 16 values (starting from `offset`) between 0-255. | +| [`offset` = 0] | `Number` Starting index in the Array | +| _returns_ | `String` | +| _throws_ | `TypeError` if a valid UUID string cannot be generated | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { stringify as uuidStringify } from 'uuid'; + +const uuidBytes = [ + 0x6e, 0xc0, 0xbd, 0x7f, 0x11, 0xc0, 0x43, 0xda, 0x97, 0x5e, 0x2a, 0x8a, 0xd9, 0xeb, 0xae, 0x0b, +]; + +uuidStringify(uuidBytes); // ⇨ '6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b' +``` + +### uuid.v1([options[, buffer[, offset]]]) + +Create an RFC version 1 (timestamp) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.node` ] | RFC "node" field as an `Array[6]` of byte values (per 4.1.6) | +| [`options.clockseq`] | RFC "clock sequence" as a `Number` between 0 - 0x3fff | +| [`options.msecs`] | RFC "timestamp" field (`Number` of milliseconds, unix epoch) | +| [`options.nsecs`] | RFC "timestamp" field (`Number` of nanoseconds to add to `msecs`, should be 0-10,000) | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | +| _throws_ | `Error` if more than 10M UUIDs/sec are requested | + +Note: The default [node id](https://tools.ietf.org/html/rfc4122#section-4.1.6) (the last 12 digits in the UUID) is generated once, randomly, on process startup, and then remains unchanged for the duration of the process. + +Note: `options.random` and `options.rng` are only meaningful on the very first call to `v1()`, where they may be passed to initialize the internal `node` and `clockseq` fields. + +Example: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +uuidv1(); // ⇨ '2c5ea4c0-4067-11e9-8bad-9b1deb4d3b7d' +``` + +Example using `options`: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +const v1options = { + node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab], + clockseq: 0x1234, + msecs: new Date('2011-11-01').getTime(), + nsecs: 5678, +}; +uuidv1(v1options); // ⇨ '710b962e-041c-11e1-9234-0123456789ab' +``` + +### uuid.v3(name, namespace[, buffer[, offset]]) + +Create an RFC version 3 (namespace w/ MD5) UUID + +API is identical to `v5()`, but uses "v3" instead. + +⚠️ Note: Per the RFC, "_If backward compatibility is not an issue, SHA-1 [Version 5] is preferred_." + +### uuid.v4([options[, buffer[, offset]]]) + +Create an RFC version 4 (random) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Example: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +Example using predefined `random` values: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +const v4options = { + random: [ + 0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea, 0x71, 0xb4, 0xef, 0xe1, 0x67, 0x1c, 0x58, 0x36, + ], +}; +uuidv4(v4options); // ⇨ '109156be-c4fb-41ea-b1b4-efe1671c5836' +``` + +### uuid.v5(name, namespace[, buffer[, offset]]) + +Create an RFC version 5 (namespace w/ SHA-1) UUID + +| | | +| --- | --- | +| `name` | `String \| Array` | +| `namespace` | `String \| Array[16]` Namespace UUID | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Note: The RFC `DNS` and `URL` namespaces are available as `v5.DNS` and `v5.URL`. + +Example with custom namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +// Define a custom namespace. Readers, create your own using something like +// https://www.uuidgenerator.net/ +const MY_NAMESPACE = '1b671a64-40d5-491e-99b0-da01ff1f3341'; + +uuidv5('Hello, World!', MY_NAMESPACE); // ⇨ '630eb68f-e0fa-5ecc-887a-7c7a62614681' +``` + +Example with RFC `URL` namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +uuidv5('https://www.w3.org/', uuidv5.URL); // ⇨ 'c106a26a-21bb-5538-8bf2-57095d1976c1' +``` + +### uuid.validate(str) + +Test a string to see if it is a valid UUID + +| | | +| --------- | --------------------------------------------------- | +| `str` | `String` to validate | +| _returns_ | `true` if string is a valid UUID, `false` otherwise | + +Example: + +```javascript +import { validate as uuidValidate } from 'uuid'; + +uuidValidate('not a UUID'); // ⇨ false +uuidValidate('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ true +``` + +Using `validate` and `version` together it is possible to do per-version validation, e.g. validate for only v4 UUIds. + +```javascript +import { version as uuidVersion } from 'uuid'; +import { validate as uuidValidate } from 'uuid'; + +function uuidValidateV4(uuid) { + return uuidValidate(uuid) && uuidVersion(uuid) === 4; +} + +const v1Uuid = 'd9428888-122b-11e1-b85c-61cd3cbb3210'; +const v4Uuid = '109156be-c4fb-41ea-b1b4-efe1671c5836'; + +uuidValidateV4(v4Uuid); // ⇨ true +uuidValidateV4(v1Uuid); // ⇨ false +``` + +### uuid.version(str) + +Detect RFC version of a UUID + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Number` The RFC version of the UUID | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Example: + +```javascript +import { version as uuidVersion } from 'uuid'; + +uuidVersion('45637ec4-c85f-11ea-87d0-0242ac130003'); // ⇨ 1 +uuidVersion('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ 4 +``` + +## Command Line + +UUIDs can be generated from the command line using `uuid`. + +```shell +$ npx uuid +ddeb27fb-d9a0-4624-be4d-4615062daed4 +``` + +The default is to generate version 4 UUIDS, however the other versions are supported. Type `uuid --help` for details: + +```shell +$ npx uuid --help + +Usage: + uuid + uuid v1 + uuid v3 + uuid v4 + uuid v5 + uuid --help + +Note: may be "URL" or "DNS" to use the corresponding UUIDs +defined by RFC4122 +``` + +## ECMAScript Modules + +This library comes with [ECMAScript Modules](https://www.ecma-international.org/ecma-262/6.0/#sec-modules) (ESM) support for Node.js versions that support it ([example](./examples/node-esmodules/)) as well as bundlers like [rollup.js](https://rollupjs.org/guide/en/#tree-shaking) ([example](./examples/browser-rollup/)) and [webpack](https://webpack.js.org/guides/tree-shaking/) ([example](./examples/browser-webpack/)) (targeting both, Node.js and browser environments). + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +To run the examples you must first create a dist build of this library in the module root: + +```shell +npm run build +``` + +## CDN Builds + +### ECMAScript Modules + +To load this module directly into modern browsers that [support loading ECMAScript Modules](https://caniuse.com/#feat=es6-module) you can make use of [jspm](https://jspm.org/): + +```html + +``` + +### UMD + +As of `uuid@9` [UMD (Universal Module Definition)](https://github.com/umdjs/umd) builds are no longer shipped with this library. + +If you need a UMD build of this library, use a bundler like Webpack or Rollup. Alternatively, refer to the documentation of [`uuid@8.3.2`](https://github.com/uuidjs/uuid/blob/v8.3.2/README.md#umd) which was the last version that shipped UMD builds. + +## Known issues + +### Duplicate UUIDs (Googlebot) + +This module may generate duplicate UUIDs when run in clients with _deterministic_ random number generators, such as [Googlebot crawlers](https://developers.google.com/search/docs/advanced/crawling/overview-google-crawlers). This can cause problems for apps that expect client-generated UUIDs to always be unique. Developers should be prepared for this and have a strategy for dealing with possible collisions, such as: + +- Check for duplicate UUIDs, fail gracefully +- Disable write operations for Googlebot clients + +### "getRandomValues() not supported" + +This error occurs in environments where the standard [`crypto.getRandomValues()`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto/getRandomValues) API is not supported. This issue can be resolved by adding an appropriate polyfill: + +### React Native / Expo + +1. Install [`react-native-get-random-values`](https://github.com/LinusU/react-native-get-random-values#readme) +1. Import it _before_ `uuid`. Since `uuid` might also appear as a transitive dependency of some other imports it's safest to just import `react-native-get-random-values` as the very first thing in your entry point: + +```javascript +import 'react-native-get-random-values'; +import { v4 as uuidv4 } from 'uuid'; +``` + +Note: If you are using Expo, you must be using at least `react-native-get-random-values@1.5.0` and `expo@39.0.0`. + +### Web Workers / Service Workers (Edge <= 18) + +[In Edge <= 18, Web Crypto is not supported in Web Workers or Service Workers](https://caniuse.com/#feat=cryptography) and we are not aware of a polyfill (let us know if you find one, please). + +### IE 11 (Internet Explorer) + +Support for IE11 and other legacy browsers has been dropped as of `uuid@9`. If you need to support legacy browsers, you can always transpile the uuid module source yourself (e.g. using [Babel](https://babeljs.io/)). + +## Upgrading From `uuid@7` + +### Only Named Exports Supported When Using with Node.js ESM + +`uuid@7` did not come with native ECMAScript Module (ESM) support for Node.js. Importing it in Node.js ESM consequently imported the CommonJS source with a default export. This library now comes with true Node.js ESM support and only provides named exports. + +Instead of doing: + +```javascript +import uuid from 'uuid'; +uuid.v4(); +``` + +you will now have to use the named exports: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +### Deep Requires No Longer Supported + +Deep requires like `require('uuid/v4')` [which have been deprecated in `uuid@7`](#deep-requires-now-deprecated) are no longer supported. + +## Upgrading From `uuid@3` + +"_Wait... what happened to `uuid@4` thru `uuid@6`?!?_" + +In order to avoid confusion with RFC [version 4](#uuidv4options-buffer-offset) and [version 5](#uuidv5name-namespace-buffer-offset) UUIDs, and a possible [version 6](http://gh.peabody.io/uuidv6/), releases 4 thru 6 of this module have been skipped. + +### Deep Requires Now Deprecated + +`uuid@3` encouraged the use of deep requires to minimize the bundle size of browser builds: + +```javascript +const uuidv4 = require('uuid/v4'); // <== NOW DEPRECATED! +uuidv4(); +``` + +As of `uuid@7` this library now provides ECMAScript modules builds, which allow packagers like Webpack and Rollup to do "tree-shaking" to remove dead code. Instead, use the `import` syntax: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +... or for CommonJS: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); +``` + +### Default Export Removed + +`uuid@3` was exporting the Version 4 UUID method as a default export: + +```javascript +const uuid = require('uuid'); // <== REMOVED! +``` + +This usage pattern was already discouraged in `uuid@3` and has been removed in `uuid@7`. + +--- + +Markdown generated from [README_js.md](README_js.md) by diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/bin/uuid b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/bin/uuid new file mode 100755 index 00000000..f38d2ee1 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/bin/uuid @@ -0,0 +1,2 @@ +#!/usr/bin/env node +require('../uuid-bin'); diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/index.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/index.js new file mode 100644 index 00000000..5586dd3d --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/index.js @@ -0,0 +1,79 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "NIL", { + enumerable: true, + get: function get() { + return _nil.default; + } +}); +Object.defineProperty(exports, "parse", { + enumerable: true, + get: function get() { + return _parse.default; + } +}); +Object.defineProperty(exports, "stringify", { + enumerable: true, + get: function get() { + return _stringify.default; + } +}); +Object.defineProperty(exports, "v1", { + enumerable: true, + get: function get() { + return _v.default; + } +}); +Object.defineProperty(exports, "v3", { + enumerable: true, + get: function get() { + return _v2.default; + } +}); +Object.defineProperty(exports, "v4", { + enumerable: true, + get: function get() { + return _v3.default; + } +}); +Object.defineProperty(exports, "v5", { + enumerable: true, + get: function get() { + return _v4.default; + } +}); +Object.defineProperty(exports, "validate", { + enumerable: true, + get: function get() { + return _validate.default; + } +}); +Object.defineProperty(exports, "version", { + enumerable: true, + get: function get() { + return _version.default; + } +}); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +var _nil = _interopRequireDefault(require("./nil.js")); + +var _version = _interopRequireDefault(require("./version.js")); + +var _validate = _interopRequireDefault(require("./validate.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/md5.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/md5.js new file mode 100644 index 00000000..7a4582ac --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/md5.js @@ -0,0 +1,223 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/native.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/native.js new file mode 100644 index 00000000..c2eea59d --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/native.js @@ -0,0 +1,11 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto); +var _default = { + randomUUID +}; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/nil.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/nil.js new file mode 100644 index 00000000..7ade577b --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/nil.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/parse.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/parse.js new file mode 100644 index 00000000..4c69fc39 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/parse.js @@ -0,0 +1,45 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/regex.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/regex.js new file mode 100644 index 00000000..1ef91d64 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/regex.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/rng.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/rng.js new file mode 100644 index 00000000..d067cdb0 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/rng.js @@ -0,0 +1,25 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); + +function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/sha1.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/sha1.js new file mode 100644 index 00000000..24cbcedc --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/sha1.js @@ -0,0 +1,104 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/stringify.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/stringify.js new file mode 100644 index 00000000..390bf891 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/stringify.js @@ -0,0 +1,44 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/v1.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/v1.js new file mode 100644 index 00000000..125bc58f --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/v1.js @@ -0,0 +1,107 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/v3.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/v3.js new file mode 100644 index 00000000..6b47ff51 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/v3.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _md = _interopRequireDefault(require("./md5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/v35.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/v35.js new file mode 100644 index 00000000..7c522d97 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/v35.js @@ -0,0 +1,80 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.URL = exports.DNS = void 0; +exports.default = v35; + +var _stringify = require("./stringify.js"); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/v4.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/v4.js new file mode 100644 index 00000000..959d6986 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/v4.js @@ -0,0 +1,43 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _native = _interopRequireDefault(require("./native.js")); + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/v5.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/v5.js new file mode 100644 index 00000000..99d615e0 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/v5.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _sha = _interopRequireDefault(require("./sha1.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/validate.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/validate.js new file mode 100644 index 00000000..fd052157 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/validate.js @@ -0,0 +1,17 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _regex = _interopRequireDefault(require("./regex.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/version.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/version.js new file mode 100644 index 00000000..f63af01a --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/commonjs-browser/version.js @@ -0,0 +1,21 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/index.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/index.js new file mode 100644 index 00000000..1db6f6d2 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/index.js @@ -0,0 +1,9 @@ +export { default as v1 } from './v1.js'; +export { default as v3 } from './v3.js'; +export { default as v4 } from './v4.js'; +export { default as v5 } from './v5.js'; +export { default as NIL } from './nil.js'; +export { default as version } from './version.js'; +export { default as validate } from './validate.js'; +export { default as stringify } from './stringify.js'; +export { default as parse } from './parse.js'; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/md5.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/md5.js new file mode 100644 index 00000000..f12212ea --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/md5.js @@ -0,0 +1,215 @@ +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +export default md5; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/native.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/native.js new file mode 100644 index 00000000..b22292cd --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/native.js @@ -0,0 +1,4 @@ +const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto); +export default { + randomUUID +}; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/nil.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/nil.js new file mode 100644 index 00000000..b36324c2 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/nil.js @@ -0,0 +1 @@ +export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/parse.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/parse.js new file mode 100644 index 00000000..6421c5d5 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/parse.js @@ -0,0 +1,35 @@ +import validate from './validate.js'; + +function parse(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +export default parse; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/regex.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/regex.js new file mode 100644 index 00000000..3da8673a --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/regex.js @@ -0,0 +1 @@ +export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/rng.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/rng.js new file mode 100644 index 00000000..6e652346 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/rng.js @@ -0,0 +1,18 @@ +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); +export default function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/sha1.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/sha1.js new file mode 100644 index 00000000..d3c25659 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/sha1.js @@ -0,0 +1,96 @@ +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +export default sha1; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/stringify.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/stringify.js new file mode 100644 index 00000000..a6e4c886 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/stringify.js @@ -0,0 +1,33 @@ +import validate from './validate.js'; +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +export function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +export default stringify; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/v1.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/v1.js new file mode 100644 index 00000000..382e5d79 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/v1.js @@ -0,0 +1,95 @@ +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; // **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || rng)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || unsafeStringify(b); +} + +export default v1; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/v3.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/v3.js new file mode 100644 index 00000000..09063b86 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/v3.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import md5 from './md5.js'; +const v3 = v35('v3', 0x30, md5); +export default v3; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/v35.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/v35.js new file mode 100644 index 00000000..3355e1f5 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/v35.js @@ -0,0 +1,66 @@ +import { unsafeStringify } from './stringify.js'; +import parse from './parse.js'; + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +export const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +export const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +export default function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = parse(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return unsafeStringify(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/v4.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/v4.js new file mode 100644 index 00000000..95ea8799 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/v4.js @@ -0,0 +1,29 @@ +import native from './native.js'; +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; + +function v4(options, buf, offset) { + if (native.randomUUID && !buf && !options) { + return native.randomUUID(); + } + + options = options || {}; + const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return unsafeStringify(rnds); +} + +export default v4; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/v5.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/v5.js new file mode 100644 index 00000000..e87fe317 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/v5.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import sha1 from './sha1.js'; +const v5 = v35('v5', 0x50, sha1); +export default v5; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/validate.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/validate.js new file mode 100644 index 00000000..f1cdc7af --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/validate.js @@ -0,0 +1,7 @@ +import REGEX from './regex.js'; + +function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); +} + +export default validate; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/version.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/version.js new file mode 100644 index 00000000..93630763 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-browser/version.js @@ -0,0 +1,11 @@ +import validate from './validate.js'; + +function version(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +export default version; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/index.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/index.js new file mode 100644 index 00000000..1db6f6d2 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/index.js @@ -0,0 +1,9 @@ +export { default as v1 } from './v1.js'; +export { default as v3 } from './v3.js'; +export { default as v4 } from './v4.js'; +export { default as v5 } from './v5.js'; +export { default as NIL } from './nil.js'; +export { default as version } from './version.js'; +export { default as validate } from './validate.js'; +export { default as stringify } from './stringify.js'; +export { default as parse } from './parse.js'; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/md5.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/md5.js new file mode 100644 index 00000000..4d68b040 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/md5.js @@ -0,0 +1,13 @@ +import crypto from 'crypto'; + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return crypto.createHash('md5').update(bytes).digest(); +} + +export default md5; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/native.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/native.js new file mode 100644 index 00000000..f0d19926 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/native.js @@ -0,0 +1,4 @@ +import crypto from 'crypto'; +export default { + randomUUID: crypto.randomUUID +}; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/nil.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/nil.js new file mode 100644 index 00000000..b36324c2 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/nil.js @@ -0,0 +1 @@ +export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/parse.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/parse.js new file mode 100644 index 00000000..6421c5d5 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/parse.js @@ -0,0 +1,35 @@ +import validate from './validate.js'; + +function parse(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +export default parse; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/regex.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/regex.js new file mode 100644 index 00000000..3da8673a --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/regex.js @@ -0,0 +1 @@ +export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/rng.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/rng.js new file mode 100644 index 00000000..80062449 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/rng.js @@ -0,0 +1,12 @@ +import crypto from 'crypto'; +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; +export default function rng() { + if (poolPtr > rnds8Pool.length - 16) { + crypto.randomFillSync(rnds8Pool); + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/sha1.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/sha1.js new file mode 100644 index 00000000..e23850b4 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/sha1.js @@ -0,0 +1,13 @@ +import crypto from 'crypto'; + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return crypto.createHash('sha1').update(bytes).digest(); +} + +export default sha1; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/stringify.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/stringify.js new file mode 100644 index 00000000..a6e4c886 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/stringify.js @@ -0,0 +1,33 @@ +import validate from './validate.js'; +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +export function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +export default stringify; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/v1.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/v1.js new file mode 100644 index 00000000..382e5d79 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/v1.js @@ -0,0 +1,95 @@ +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; // **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || rng)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || unsafeStringify(b); +} + +export default v1; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/v3.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/v3.js new file mode 100644 index 00000000..09063b86 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/v3.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import md5 from './md5.js'; +const v3 = v35('v3', 0x30, md5); +export default v3; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/v35.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/v35.js new file mode 100644 index 00000000..3355e1f5 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/v35.js @@ -0,0 +1,66 @@ +import { unsafeStringify } from './stringify.js'; +import parse from './parse.js'; + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +export const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +export const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +export default function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = parse(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return unsafeStringify(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/v4.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/v4.js new file mode 100644 index 00000000..95ea8799 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/v4.js @@ -0,0 +1,29 @@ +import native from './native.js'; +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; + +function v4(options, buf, offset) { + if (native.randomUUID && !buf && !options) { + return native.randomUUID(); + } + + options = options || {}; + const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return unsafeStringify(rnds); +} + +export default v4; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/v5.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/v5.js new file mode 100644 index 00000000..e87fe317 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/v5.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import sha1 from './sha1.js'; +const v5 = v35('v5', 0x50, sha1); +export default v5; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/validate.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/validate.js new file mode 100644 index 00000000..f1cdc7af --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/validate.js @@ -0,0 +1,7 @@ +import REGEX from './regex.js'; + +function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); +} + +export default validate; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/version.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/version.js new file mode 100644 index 00000000..93630763 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/esm-node/version.js @@ -0,0 +1,11 @@ +import validate from './validate.js'; + +function version(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +export default version; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/index.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/index.js new file mode 100644 index 00000000..88d676a2 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/index.js @@ -0,0 +1,79 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "NIL", { + enumerable: true, + get: function () { + return _nil.default; + } +}); +Object.defineProperty(exports, "parse", { + enumerable: true, + get: function () { + return _parse.default; + } +}); +Object.defineProperty(exports, "stringify", { + enumerable: true, + get: function () { + return _stringify.default; + } +}); +Object.defineProperty(exports, "v1", { + enumerable: true, + get: function () { + return _v.default; + } +}); +Object.defineProperty(exports, "v3", { + enumerable: true, + get: function () { + return _v2.default; + } +}); +Object.defineProperty(exports, "v4", { + enumerable: true, + get: function () { + return _v3.default; + } +}); +Object.defineProperty(exports, "v5", { + enumerable: true, + get: function () { + return _v4.default; + } +}); +Object.defineProperty(exports, "validate", { + enumerable: true, + get: function () { + return _validate.default; + } +}); +Object.defineProperty(exports, "version", { + enumerable: true, + get: function () { + return _version.default; + } +}); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +var _nil = _interopRequireDefault(require("./nil.js")); + +var _version = _interopRequireDefault(require("./version.js")); + +var _validate = _interopRequireDefault(require("./validate.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/md5-browser.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/md5-browser.js new file mode 100644 index 00000000..7a4582ac --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/md5-browser.js @@ -0,0 +1,223 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/md5.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/md5.js new file mode 100644 index 00000000..824d4816 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/md5.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/native-browser.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/native-browser.js new file mode 100644 index 00000000..c2eea59d --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/native-browser.js @@ -0,0 +1,11 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto); +var _default = { + randomUUID +}; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/native.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/native.js new file mode 100644 index 00000000..de804691 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/native.js @@ -0,0 +1,15 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var _default = { + randomUUID: _crypto.default.randomUUID +}; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/nil.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/nil.js new file mode 100644 index 00000000..7ade577b --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/nil.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/parse.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/parse.js new file mode 100644 index 00000000..4c69fc39 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/parse.js @@ -0,0 +1,45 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/regex.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/regex.js new file mode 100644 index 00000000..1ef91d64 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/regex.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/rng-browser.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/rng-browser.js new file mode 100644 index 00000000..d067cdb0 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/rng-browser.js @@ -0,0 +1,25 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); + +function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/rng.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/rng.js new file mode 100644 index 00000000..3507f937 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/rng.js @@ -0,0 +1,24 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/sha1-browser.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/sha1-browser.js new file mode 100644 index 00000000..24cbcedc --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/sha1-browser.js @@ -0,0 +1,104 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/sha1.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/sha1.js new file mode 100644 index 00000000..03bdd63c --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/sha1.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/stringify.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/stringify.js new file mode 100644 index 00000000..390bf891 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/stringify.js @@ -0,0 +1,44 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/uuid-bin.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/uuid-bin.js new file mode 100644 index 00000000..50a7a9f1 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/uuid-bin.js @@ -0,0 +1,85 @@ +"use strict"; + +var _assert = _interopRequireDefault(require("assert")); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function usage() { + console.log('Usage:'); + console.log(' uuid'); + console.log(' uuid v1'); + console.log(' uuid v3 '); + console.log(' uuid v4'); + console.log(' uuid v5 '); + console.log(' uuid --help'); + console.log('\nNote: may be "URL" or "DNS" to use the corresponding UUIDs defined by RFC4122'); +} + +const args = process.argv.slice(2); + +if (args.indexOf('--help') >= 0) { + usage(); + process.exit(0); +} + +const version = args.shift() || 'v4'; + +switch (version) { + case 'v1': + console.log((0, _v.default)()); + break; + + case 'v3': + { + const name = args.shift(); + let namespace = args.shift(); + (0, _assert.default)(name != null, 'v3 name not specified'); + (0, _assert.default)(namespace != null, 'v3 namespace not specified'); + + if (namespace === 'URL') { + namespace = _v2.default.URL; + } + + if (namespace === 'DNS') { + namespace = _v2.default.DNS; + } + + console.log((0, _v2.default)(name, namespace)); + break; + } + + case 'v4': + console.log((0, _v3.default)()); + break; + + case 'v5': + { + const name = args.shift(); + let namespace = args.shift(); + (0, _assert.default)(name != null, 'v5 name not specified'); + (0, _assert.default)(namespace != null, 'v5 namespace not specified'); + + if (namespace === 'URL') { + namespace = _v4.default.URL; + } + + if (namespace === 'DNS') { + namespace = _v4.default.DNS; + } + + console.log((0, _v4.default)(name, namespace)); + break; + } + + default: + usage(); + process.exit(1); +} \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/v1.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/v1.js new file mode 100644 index 00000000..125bc58f --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/v1.js @@ -0,0 +1,107 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/v3.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/v3.js new file mode 100644 index 00000000..6b47ff51 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/v3.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _md = _interopRequireDefault(require("./md5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/v35.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/v35.js new file mode 100644 index 00000000..7c522d97 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/v35.js @@ -0,0 +1,80 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.URL = exports.DNS = void 0; +exports.default = v35; + +var _stringify = require("./stringify.js"); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/v4.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/v4.js new file mode 100644 index 00000000..959d6986 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/v4.js @@ -0,0 +1,43 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _native = _interopRequireDefault(require("./native.js")); + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/v5.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/v5.js new file mode 100644 index 00000000..99d615e0 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/v5.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _sha = _interopRequireDefault(require("./sha1.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/validate.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/validate.js new file mode 100644 index 00000000..fd052157 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/validate.js @@ -0,0 +1,17 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _regex = _interopRequireDefault(require("./regex.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/version.js b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/version.js new file mode 100644 index 00000000..f63af01a --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/dist/version.js @@ -0,0 +1,21 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/package.json b/node_modules/@smithy/middleware-retry/node_modules/uuid/package.json new file mode 100644 index 00000000..6cc33618 --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/package.json @@ -0,0 +1,135 @@ +{ + "name": "uuid", + "version": "9.0.1", + "description": "RFC4122 (v1, v4, and v5) UUIDs", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "commitlint": { + "extends": [ + "@commitlint/config-conventional" + ] + }, + "keywords": [ + "uuid", + "guid", + "rfc4122" + ], + "license": "MIT", + "bin": { + "uuid": "./dist/bin/uuid" + }, + "sideEffects": false, + "main": "./dist/index.js", + "exports": { + ".": { + "node": { + "module": "./dist/esm-node/index.js", + "require": "./dist/index.js", + "import": "./wrapper.mjs" + }, + "browser": { + "import": "./dist/esm-browser/index.js", + "require": "./dist/commonjs-browser/index.js" + }, + "default": "./dist/esm-browser/index.js" + }, + "./package.json": "./package.json" + }, + "module": "./dist/esm-node/index.js", + "browser": { + "./dist/md5.js": "./dist/md5-browser.js", + "./dist/native.js": "./dist/native-browser.js", + "./dist/rng.js": "./dist/rng-browser.js", + "./dist/sha1.js": "./dist/sha1-browser.js", + "./dist/esm-node/index.js": "./dist/esm-browser/index.js" + }, + "files": [ + "CHANGELOG.md", + "CONTRIBUTING.md", + "LICENSE.md", + "README.md", + "dist", + "wrapper.mjs" + ], + "devDependencies": { + "@babel/cli": "7.18.10", + "@babel/core": "7.18.10", + "@babel/eslint-parser": "7.18.9", + "@babel/preset-env": "7.18.10", + "@commitlint/cli": "17.0.3", + "@commitlint/config-conventional": "17.0.3", + "bundlewatch": "0.3.3", + "eslint": "8.21.0", + "eslint-config-prettier": "8.5.0", + "eslint-config-standard": "17.0.0", + "eslint-plugin-import": "2.26.0", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-prettier": "4.2.1", + "eslint-plugin-promise": "6.0.0", + "husky": "8.0.1", + "jest": "28.1.3", + "lint-staged": "13.0.3", + "npm-run-all": "4.1.5", + "optional-dev-dependency": "2.0.1", + "prettier": "2.7.1", + "random-seed": "0.3.0", + "runmd": "1.3.9", + "standard-version": "9.5.0" + }, + "optionalDevDependencies": { + "@wdio/browserstack-service": "7.16.10", + "@wdio/cli": "7.16.10", + "@wdio/jasmine-framework": "7.16.6", + "@wdio/local-runner": "7.16.10", + "@wdio/spec-reporter": "7.16.9", + "@wdio/static-server-service": "7.16.6" + }, + "scripts": { + "examples:browser:webpack:build": "cd examples/browser-webpack && npm install && npm run build", + "examples:browser:rollup:build": "cd examples/browser-rollup && npm install && npm run build", + "examples:node:commonjs:test": "cd examples/node-commonjs && npm install && npm test", + "examples:node:esmodules:test": "cd examples/node-esmodules && npm install && npm test", + "examples:node:jest:test": "cd examples/node-jest && npm install && npm test", + "prepare": "cd $( git rev-parse --show-toplevel ) && husky install", + "lint": "npm run eslint:check && npm run prettier:check", + "eslint:check": "eslint src/ test/ examples/ *.js", + "eslint:fix": "eslint --fix src/ test/ examples/ *.js", + "pretest": "[ -n $CI ] || npm run build", + "test": "BABEL_ENV=commonjsNode node --throw-deprecation node_modules/.bin/jest test/unit/", + "pretest:browser": "optional-dev-dependency && npm run build && npm-run-all --parallel examples:browser:**", + "test:browser": "wdio run ./wdio.conf.js", + "pretest:node": "npm run build", + "test:node": "npm-run-all --parallel examples:node:**", + "test:pack": "./scripts/testpack.sh", + "pretest:benchmark": "npm run build", + "test:benchmark": "cd examples/benchmark && npm install && npm test", + "prettier:check": "prettier --check '**/*.{js,jsx,json,md}'", + "prettier:fix": "prettier --write '**/*.{js,jsx,json,md}'", + "bundlewatch": "npm run pretest:browser && bundlewatch --config bundlewatch.config.json", + "md": "runmd --watch --output=README.md README_js.md", + "docs": "( node --version | grep -q 'v18' ) && ( npm run build && npx runmd --output=README.md README_js.md )", + "docs:diff": "npm run docs && git diff --quiet README.md", + "build": "./scripts/build.sh", + "prepack": "npm run build", + "release": "standard-version --no-verify" + }, + "repository": { + "type": "git", + "url": "https://github.com/uuidjs/uuid.git" + }, + "lint-staged": { + "*.{js,jsx,json,md}": [ + "prettier --write" + ], + "*.{js,jsx}": [ + "eslint --fix" + ] + }, + "standard-version": { + "scripts": { + "postchangelog": "prettier --write CHANGELOG.md" + } + } +} diff --git a/node_modules/@smithy/middleware-retry/node_modules/uuid/wrapper.mjs b/node_modules/@smithy/middleware-retry/node_modules/uuid/wrapper.mjs new file mode 100644 index 00000000..c31e9cef --- /dev/null +++ b/node_modules/@smithy/middleware-retry/node_modules/uuid/wrapper.mjs @@ -0,0 +1,10 @@ +import uuid from './dist/index.js'; +export const v1 = uuid.v1; +export const v3 = uuid.v3; +export const v4 = uuid.v4; +export const v5 = uuid.v5; +export const NIL = uuid.NIL; +export const version = uuid.version; +export const validate = uuid.validate; +export const stringify = uuid.stringify; +export const parse = uuid.parse; diff --git a/node_modules/@smithy/middleware-retry/package.json b/node_modules/@smithy/middleware-retry/package.json new file mode 100644 index 00000000..4dfba9aa --- /dev/null +++ b/node_modules/@smithy/middleware-retry/package.json @@ -0,0 +1,79 @@ +{ + "name": "@smithy/middleware-retry", + "version": "4.1.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-retry", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "browser": { + "./dist-es/isStreamingPayload/isStreamingPayload": "./dist-es/isStreamingPayload/isStreamingPayload.browser" + }, + "react-native": { + "./dist-cjs/isStreamingPayload/isStreamingPayload": "./dist-cjs/isStreamingPayload/isStreamingPayload.browser", + "./dist-es/isStreamingPayload/isStreamingPayload": "./dist-es/isStreamingPayload/isStreamingPayload.browser" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/service-error-classification": "^4.0.3", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "devDependencies": { + "@smithy/util-test": "^0.2.8", + "@types/uuid": "^8.3.0", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-retry", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-retry" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/middleware-serde/LICENSE b/node_modules/@smithy/middleware-serde/LICENSE new file mode 100644 index 00000000..e907b586 --- /dev/null +++ b/node_modules/@smithy/middleware-serde/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@smithy/middleware-serde/README.md b/node_modules/@smithy/middleware-serde/README.md new file mode 100644 index 00000000..d2bbfa63 --- /dev/null +++ b/node_modules/@smithy/middleware-serde/README.md @@ -0,0 +1,4 @@ +# @smithy/middleware-serde + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-serde/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-serde) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-serde.svg)](https://www.npmjs.com/package/@smithy/middleware-serde) diff --git a/node_modules/@smithy/middleware-serde/dist-cjs/deserializerMiddleware.js b/node_modules/@smithy/middleware-serde/dist-cjs/deserializerMiddleware.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/middleware-serde/dist-cjs/deserializerMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-serde/dist-cjs/index.js b/node_modules/@smithy/middleware-serde/dist-cjs/index.js new file mode 100644 index 00000000..04fa6f31 --- /dev/null +++ b/node_modules/@smithy/middleware-serde/dist-cjs/index.js @@ -0,0 +1,109 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + deserializerMiddleware: () => deserializerMiddleware, + deserializerMiddlewareOption: () => deserializerMiddlewareOption, + getSerdePlugin: () => getSerdePlugin, + serializerMiddleware: () => serializerMiddleware, + serializerMiddlewareOption: () => serializerMiddlewareOption +}); +module.exports = __toCommonJS(src_exports); + +// src/deserializerMiddleware.ts +var deserializerMiddleware = /* @__PURE__ */ __name((options, deserializer) => (next, context) => async (args) => { + const { response } = await next(args); + try { + const parsed = await deserializer(response, options); + return { + response, + output: parsed + }; + } catch (error) { + Object.defineProperty(error, "$response", { + value: response + }); + if (!("$metadata" in error)) { + const hint = `Deserialization error: to see the raw response, inspect the hidden field {error}.$response on this object.`; + try { + error.message += "\n " + hint; + } catch (e) { + if (!context.logger || context.logger?.constructor?.name === "NoOpLogger") { + console.warn(hint); + } else { + context.logger?.warn?.(hint); + } + } + if (typeof error.$responseBodyText !== "undefined") { + if (error.$response) { + error.$response.body = error.$responseBodyText; + } + } + } + throw error; + } +}, "deserializerMiddleware"); + +// src/serializerMiddleware.ts +var serializerMiddleware = /* @__PURE__ */ __name((options, serializer) => (next, context) => async (args) => { + const endpoint = context.endpointV2?.url && options.urlParser ? async () => options.urlParser(context.endpointV2.url) : options.endpoint; + if (!endpoint) { + throw new Error("No valid endpoint provider available."); + } + const request = await serializer(args.input, { ...options, endpoint }); + return next({ + ...args, + request + }); +}, "serializerMiddleware"); + +// src/serdePlugin.ts +var deserializerMiddlewareOption = { + name: "deserializerMiddleware", + step: "deserialize", + tags: ["DESERIALIZER"], + override: true +}; +var serializerMiddlewareOption = { + name: "serializerMiddleware", + step: "serialize", + tags: ["SERIALIZER"], + override: true +}; +function getSerdePlugin(config, serializer, deserializer) { + return { + applyToStack: (commandStack) => { + commandStack.add(deserializerMiddleware(config, deserializer), deserializerMiddlewareOption); + commandStack.add(serializerMiddleware(config, serializer), serializerMiddlewareOption); + } + }; +} +__name(getSerdePlugin, "getSerdePlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + deserializerMiddleware, + deserializerMiddlewareOption, + serializerMiddlewareOption, + getSerdePlugin, + serializerMiddleware +}); + diff --git a/node_modules/@smithy/middleware-serde/dist-cjs/serdePlugin.js b/node_modules/@smithy/middleware-serde/dist-cjs/serdePlugin.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/middleware-serde/dist-cjs/serdePlugin.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-serde/dist-cjs/serializerMiddleware.js b/node_modules/@smithy/middleware-serde/dist-cjs/serializerMiddleware.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/middleware-serde/dist-cjs/serializerMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-serde/dist-es/deserializerMiddleware.js b/node_modules/@smithy/middleware-serde/dist-es/deserializerMiddleware.js new file mode 100644 index 00000000..19c0c278 --- /dev/null +++ b/node_modules/@smithy/middleware-serde/dist-es/deserializerMiddleware.js @@ -0,0 +1,35 @@ +export const deserializerMiddleware = (options, deserializer) => (next, context) => async (args) => { + const { response } = await next(args); + try { + const parsed = await deserializer(response, options); + return { + response, + output: parsed, + }; + } + catch (error) { + Object.defineProperty(error, "$response", { + value: response, + }); + if (!("$metadata" in error)) { + const hint = `Deserialization error: to see the raw response, inspect the hidden field {error}.$response on this object.`; + try { + error.message += "\n " + hint; + } + catch (e) { + if (!context.logger || context.logger?.constructor?.name === "NoOpLogger") { + console.warn(hint); + } + else { + context.logger?.warn?.(hint); + } + } + if (typeof error.$responseBodyText !== "undefined") { + if (error.$response) { + error.$response.body = error.$responseBodyText; + } + } + } + throw error; + } +}; diff --git a/node_modules/@smithy/middleware-serde/dist-es/index.js b/node_modules/@smithy/middleware-serde/dist-es/index.js new file mode 100644 index 00000000..166a2be2 --- /dev/null +++ b/node_modules/@smithy/middleware-serde/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./deserializerMiddleware"; +export * from "./serdePlugin"; +export * from "./serializerMiddleware"; diff --git a/node_modules/@smithy/middleware-serde/dist-es/serdePlugin.js b/node_modules/@smithy/middleware-serde/dist-es/serdePlugin.js new file mode 100644 index 00000000..be2a06ef --- /dev/null +++ b/node_modules/@smithy/middleware-serde/dist-es/serdePlugin.js @@ -0,0 +1,22 @@ +import { deserializerMiddleware } from "./deserializerMiddleware"; +import { serializerMiddleware } from "./serializerMiddleware"; +export const deserializerMiddlewareOption = { + name: "deserializerMiddleware", + step: "deserialize", + tags: ["DESERIALIZER"], + override: true, +}; +export const serializerMiddlewareOption = { + name: "serializerMiddleware", + step: "serialize", + tags: ["SERIALIZER"], + override: true, +}; +export function getSerdePlugin(config, serializer, deserializer) { + return { + applyToStack: (commandStack) => { + commandStack.add(deserializerMiddleware(config, deserializer), deserializerMiddlewareOption); + commandStack.add(serializerMiddleware(config, serializer), serializerMiddlewareOption); + }, + }; +} diff --git a/node_modules/@smithy/middleware-serde/dist-es/serializerMiddleware.js b/node_modules/@smithy/middleware-serde/dist-es/serializerMiddleware.js new file mode 100644 index 00000000..b02b93d7 --- /dev/null +++ b/node_modules/@smithy/middleware-serde/dist-es/serializerMiddleware.js @@ -0,0 +1,13 @@ +export const serializerMiddleware = (options, serializer) => (next, context) => async (args) => { + const endpoint = context.endpointV2?.url && options.urlParser + ? async () => options.urlParser(context.endpointV2.url) + : options.endpoint; + if (!endpoint) { + throw new Error("No valid endpoint provider available."); + } + const request = await serializer(args.input, { ...options, endpoint }); + return next({ + ...args, + request, + }); +}; diff --git a/node_modules/@smithy/middleware-serde/dist-types/deserializerMiddleware.d.ts b/node_modules/@smithy/middleware-serde/dist-types/deserializerMiddleware.d.ts new file mode 100644 index 00000000..4d811418 --- /dev/null +++ b/node_modules/@smithy/middleware-serde/dist-types/deserializerMiddleware.d.ts @@ -0,0 +1,5 @@ +import { DeserializeMiddleware, ResponseDeserializer, SerdeContext, SerdeFunctions } from "@smithy/types"; +/** + * @internal + */ +export declare const deserializerMiddleware: (options: SerdeFunctions, deserializer: ResponseDeserializer) => DeserializeMiddleware; diff --git a/node_modules/@smithy/middleware-serde/dist-types/index.d.ts b/node_modules/@smithy/middleware-serde/dist-types/index.d.ts new file mode 100644 index 00000000..166a2be2 --- /dev/null +++ b/node_modules/@smithy/middleware-serde/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./deserializerMiddleware"; +export * from "./serdePlugin"; +export * from "./serializerMiddleware"; diff --git a/node_modules/@smithy/middleware-serde/dist-types/serdePlugin.d.ts b/node_modules/@smithy/middleware-serde/dist-types/serdePlugin.d.ts new file mode 100644 index 00000000..bf1091af --- /dev/null +++ b/node_modules/@smithy/middleware-serde/dist-types/serdePlugin.d.ts @@ -0,0 +1,12 @@ +import { DeserializeHandlerOptions, Endpoint, MetadataBearer, Pluggable, Provider, RequestSerializer, ResponseDeserializer, SerdeContext, SerdeFunctions, SerializeHandlerOptions, UrlParser } from "@smithy/types"; +export declare const deserializerMiddlewareOption: DeserializeHandlerOptions; +export declare const serializerMiddlewareOption: SerializeHandlerOptions; +export type V1OrV2Endpoint = { + urlParser?: UrlParser; + endpoint?: Provider; +}; +/** + * @internal + * + */ +export declare function getSerdePlugin(config: V1OrV2Endpoint & SerdeFunctions, serializer: RequestSerializer, deserializer: ResponseDeserializer): Pluggable; diff --git a/node_modules/@smithy/middleware-serde/dist-types/serializerMiddleware.d.ts b/node_modules/@smithy/middleware-serde/dist-types/serializerMiddleware.d.ts new file mode 100644 index 00000000..5437298a --- /dev/null +++ b/node_modules/@smithy/middleware-serde/dist-types/serializerMiddleware.d.ts @@ -0,0 +1,6 @@ +import { RequestSerializer, SerdeContext, SerdeFunctions, SerializeMiddleware } from "@smithy/types"; +import type { V1OrV2Endpoint } from "./serdePlugin"; +/** + * @internal + */ +export declare const serializerMiddleware: (options: V1OrV2Endpoint & SerdeFunctions, serializer: RequestSerializer) => SerializeMiddleware; diff --git a/node_modules/@smithy/middleware-serde/dist-types/ts3.4/deserializerMiddleware.d.ts b/node_modules/@smithy/middleware-serde/dist-types/ts3.4/deserializerMiddleware.d.ts new file mode 100644 index 00000000..b0ed4924 --- /dev/null +++ b/node_modules/@smithy/middleware-serde/dist-types/ts3.4/deserializerMiddleware.d.ts @@ -0,0 +1,5 @@ +import { DeserializeMiddleware, ResponseDeserializer, SerdeContext, SerdeFunctions } from "@smithy/types"; +/** + * @internal + */ +export declare const deserializerMiddleware: (options: SerdeFunctions, deserializer: ResponseDeserializer) => DeserializeMiddleware; diff --git a/node_modules/@smithy/middleware-serde/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/middleware-serde/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..ec66df4d --- /dev/null +++ b/node_modules/@smithy/middleware-serde/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./deserializerMiddleware"; +export * from "./serdePlugin"; +export * from "./serializerMiddleware"; diff --git a/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serdePlugin.d.ts b/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serdePlugin.d.ts new file mode 100644 index 00000000..c3817212 --- /dev/null +++ b/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serdePlugin.d.ts @@ -0,0 +1,12 @@ +import { DeserializeHandlerOptions, Endpoint, MetadataBearer, Pluggable, Provider, RequestSerializer, ResponseDeserializer, SerdeContext, SerdeFunctions, SerializeHandlerOptions, UrlParser } from "@smithy/types"; +export declare const deserializerMiddlewareOption: DeserializeHandlerOptions; +export declare const serializerMiddlewareOption: SerializeHandlerOptions; +export type V1OrV2Endpoint = { + urlParser?: UrlParser; + endpoint?: Provider; +}; +/** + * @internal + * + */ +export declare function getSerdePlugin(config: V1OrV2Endpoint & SerdeFunctions, serializer: RequestSerializer, deserializer: ResponseDeserializer): Pluggable; diff --git a/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serializerMiddleware.d.ts b/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serializerMiddleware.d.ts new file mode 100644 index 00000000..914b3b64 --- /dev/null +++ b/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serializerMiddleware.d.ts @@ -0,0 +1,6 @@ +import { RequestSerializer, SerdeContext, SerdeFunctions, SerializeMiddleware } from "@smithy/types"; +import { V1OrV2Endpoint } from "./serdePlugin"; +/** + * @internal + */ +export declare const serializerMiddleware: (options: V1OrV2Endpoint & SerdeFunctions, serializer: RequestSerializer) => SerializeMiddleware; diff --git a/node_modules/@smithy/middleware-serde/package.json b/node_modules/@smithy/middleware-serde/package.json new file mode 100644 index 00000000..042be080 --- /dev/null +++ b/node_modules/@smithy/middleware-serde/package.json @@ -0,0 +1,63 @@ +{ + "name": "@smithy/middleware-serde", + "version": "4.0.3", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-serde", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-serde", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-serde" + }, + "devDependencies": { + "@smithy/util-test": "^0.2.8", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/middleware-stack/LICENSE b/node_modules/@smithy/middleware-stack/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/middleware-stack/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/middleware-stack/README.md b/node_modules/@smithy/middleware-stack/README.md new file mode 100644 index 00000000..c09d4d36 --- /dev/null +++ b/node_modules/@smithy/middleware-stack/README.md @@ -0,0 +1,78 @@ +# @smithy/middleware-stack + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-stack/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-stack) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-stack.svg)](https://www.npmjs.com/package/@smithy/middleware-stack) + +The package contains an implementation of middleware stack interface. Middleware +stack is a structure storing middleware in specified order and resolve these +middleware into a single handler. + +A middleware stack has five `Step`s, each of them represents a specific request life cycle: + +- **initialize**: The input is being prepared. Examples of typical initialization tasks include injecting default options computing derived parameters. + +- **serialize**: The input is complete and ready to be serialized. Examples of typical serialization tasks include input validation and building an HTTP request from user input. + +- **build**: The input has been serialized into an HTTP request, but that request may require further modification. Any request alterations will be applied to all retries. Examples of typical build tasks include injecting HTTP headers that describe a stable aspect of the request, such as `Content-Length` or a body checksum. + +- **finalizeRequest**: The request is being prepared to be sent over the wire. The request in this stage should already be semantically complete and should therefore only be altered to match the recipient's expectations. Examples of typical finalization tasks include request signing and injecting hop-by-hop headers. + +- **deserialize**: The response has arrived, the middleware here will deserialize the raw response object to structured response + +## Adding Middleware + +There are two ways to add middleware to a middleware stack. They both add middleware to specified `Step` but they provide fine-grained location control differently. + +### Absolute Location + +You can add middleware to specified step with: + +```javascript +stack.add(middleware, { + step: "finalizeRequest", +}); +``` + +This approach works for most cases. Sometimes you want your middleware to be executed in the front of the `Step`, you can set the `Priority` to `high`. Set the `Priority` to `low` then this middleware will be executed at the end of `Step`: + +```javascript +stack.add(middleware, { + step: "finalizeRequest", + priority: "high", +}); +``` + +If multiple middleware is added to same `step` with same `priority`, the order of them is determined by the order of adding them. + +### Relative Location + +In some cases, you might want to execute your middleware before some other known middleware, then you can use `addRelativeTo()`: + +```javascript +stack.add(middleware, { + step: "finalizeRequest", + name: "myMiddleware", +}); +stack.addRelativeTo(anotherMiddleware, { + relation: "before", //or 'after' + toMiddleware: "myMiddleware", +}); +``` + +## Removing Middleware + +You can remove middleware by name one at a time: + +```javascript +stack.remove("Middleware1"); +``` + +If you specify tags for middleware, you can remove multiple middleware at a time according to tag: + +```javascript +stack.add(middleware, { + step: "finalizeRequest", + tags: ["final"], +}); +stack.removeByTag("final"); +``` diff --git a/node_modules/@smithy/middleware-stack/dist-cjs/MiddlewareStack.js b/node_modules/@smithy/middleware-stack/dist-cjs/MiddlewareStack.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/middleware-stack/dist-cjs/MiddlewareStack.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-stack/dist-cjs/index.js b/node_modules/@smithy/middleware-stack/dist-cjs/index.js new file mode 100644 index 00000000..4c785970 --- /dev/null +++ b/node_modules/@smithy/middleware-stack/dist-cjs/index.js @@ -0,0 +1,313 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + constructStack: () => constructStack +}); +module.exports = __toCommonJS(src_exports); + +// src/MiddlewareStack.ts +var getAllAliases = /* @__PURE__ */ __name((name, aliases) => { + const _aliases = []; + if (name) { + _aliases.push(name); + } + if (aliases) { + for (const alias of aliases) { + _aliases.push(alias); + } + } + return _aliases; +}, "getAllAliases"); +var getMiddlewareNameWithAliases = /* @__PURE__ */ __name((name, aliases) => { + return `${name || "anonymous"}${aliases && aliases.length > 0 ? ` (a.k.a. ${aliases.join(",")})` : ""}`; +}, "getMiddlewareNameWithAliases"); +var constructStack = /* @__PURE__ */ __name(() => { + let absoluteEntries = []; + let relativeEntries = []; + let identifyOnResolve = false; + const entriesNameSet = /* @__PURE__ */ new Set(); + const sort = /* @__PURE__ */ __name((entries) => entries.sort( + (a, b) => stepWeights[b.step] - stepWeights[a.step] || priorityWeights[b.priority || "normal"] - priorityWeights[a.priority || "normal"] + ), "sort"); + const removeByName = /* @__PURE__ */ __name((toRemove) => { + let isRemoved = false; + const filterCb = /* @__PURE__ */ __name((entry) => { + const aliases = getAllAliases(entry.name, entry.aliases); + if (aliases.includes(toRemove)) { + isRemoved = true; + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }, "filterCb"); + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, "removeByName"); + const removeByReference = /* @__PURE__ */ __name((toRemove) => { + let isRemoved = false; + const filterCb = /* @__PURE__ */ __name((entry) => { + if (entry.middleware === toRemove) { + isRemoved = true; + for (const alias of getAllAliases(entry.name, entry.aliases)) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }, "filterCb"); + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, "removeByReference"); + const cloneTo = /* @__PURE__ */ __name((toStack) => { + absoluteEntries.forEach((entry) => { + toStack.add(entry.middleware, { ...entry }); + }); + relativeEntries.forEach((entry) => { + toStack.addRelativeTo(entry.middleware, { ...entry }); + }); + toStack.identifyOnResolve?.(stack.identifyOnResolve()); + return toStack; + }, "cloneTo"); + const expandRelativeMiddlewareList = /* @__PURE__ */ __name((from) => { + const expandedMiddlewareList = []; + from.before.forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + expandedMiddlewareList.push(from); + from.after.reverse().forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + return expandedMiddlewareList; + }, "expandRelativeMiddlewareList"); + const getMiddlewareList = /* @__PURE__ */ __name((debug = false) => { + const normalizedAbsoluteEntries = []; + const normalizedRelativeEntries = []; + const normalizedEntriesNameMap = {}; + absoluteEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [] + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedAbsoluteEntries.push(normalizedEntry); + }); + relativeEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [] + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedRelativeEntries.push(normalizedEntry); + }); + normalizedRelativeEntries.forEach((entry) => { + if (entry.toMiddleware) { + const toMiddleware = normalizedEntriesNameMap[entry.toMiddleware]; + if (toMiddleware === void 0) { + if (debug) { + return; + } + throw new Error( + `${entry.toMiddleware} is not found when adding ${getMiddlewareNameWithAliases(entry.name, entry.aliases)} middleware ${entry.relation} ${entry.toMiddleware}` + ); + } + if (entry.relation === "after") { + toMiddleware.after.push(entry); + } + if (entry.relation === "before") { + toMiddleware.before.push(entry); + } + } + }); + const mainChain = sort(normalizedAbsoluteEntries).map(expandRelativeMiddlewareList).reduce( + (wholeList, expandedMiddlewareList) => { + wholeList.push(...expandedMiddlewareList); + return wholeList; + }, + [] + ); + return mainChain; + }, "getMiddlewareList"); + const stack = { + add: (middleware, options = {}) => { + const { name, override, aliases: _aliases } = options; + const entry = { + step: "initialize", + priority: "normal", + middleware, + ...options + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = absoluteEntries.findIndex( + (entry2) => entry2.name === alias || entry2.aliases?.some((a) => a === alias) + ); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = absoluteEntries[toOverrideIndex]; + if (toOverride.step !== entry.step || entry.priority !== toOverride.priority) { + throw new Error( + `"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware with ${toOverride.priority} priority in ${toOverride.step} step cannot be overridden by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware with ${entry.priority} priority in ${entry.step} step.` + ); + } + absoluteEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + absoluteEntries.push(entry); + }, + addRelativeTo: (middleware, options) => { + const { name, override, aliases: _aliases } = options; + const entry = { + middleware, + ...options + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = relativeEntries.findIndex( + (entry2) => entry2.name === alias || entry2.aliases?.some((a) => a === alias) + ); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = relativeEntries[toOverrideIndex]; + if (toOverride.toMiddleware !== entry.toMiddleware || toOverride.relation !== entry.relation) { + throw new Error( + `"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware ${toOverride.relation} "${toOverride.toMiddleware}" middleware cannot be overridden by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware ${entry.relation} "${entry.toMiddleware}" middleware.` + ); + } + relativeEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + relativeEntries.push(entry); + }, + clone: () => cloneTo(constructStack()), + use: (plugin) => { + plugin.applyToStack(stack); + }, + remove: (toRemove) => { + if (typeof toRemove === "string") + return removeByName(toRemove); + else + return removeByReference(toRemove); + }, + removeByTag: (toRemove) => { + let isRemoved = false; + const filterCb = /* @__PURE__ */ __name((entry) => { + const { tags, name, aliases: _aliases } = entry; + if (tags && tags.includes(toRemove)) { + const aliases = getAllAliases(name, _aliases); + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + isRemoved = true; + return false; + } + return true; + }, "filterCb"); + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, + concat: (from) => { + const cloned = cloneTo(constructStack()); + cloned.use(from); + cloned.identifyOnResolve( + identifyOnResolve || cloned.identifyOnResolve() || (from.identifyOnResolve?.() ?? false) + ); + return cloned; + }, + applyToStack: cloneTo, + identify: () => { + return getMiddlewareList(true).map((mw) => { + const step = mw.step ?? mw.relation + " " + mw.toMiddleware; + return getMiddlewareNameWithAliases(mw.name, mw.aliases) + " - " + step; + }); + }, + identifyOnResolve(toggle) { + if (typeof toggle === "boolean") + identifyOnResolve = toggle; + return identifyOnResolve; + }, + resolve: (handler, context) => { + for (const middleware of getMiddlewareList().map((entry) => entry.middleware).reverse()) { + handler = middleware(handler, context); + } + if (identifyOnResolve) { + console.log(stack.identify()); + } + return handler; + } + }; + return stack; +}, "constructStack"); +var stepWeights = { + initialize: 5, + serialize: 4, + build: 3, + finalizeRequest: 2, + deserialize: 1 +}; +var priorityWeights = { + high: 3, + normal: 2, + low: 1 +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + constructStack +}); + diff --git a/node_modules/@smithy/middleware-stack/dist-cjs/types.js b/node_modules/@smithy/middleware-stack/dist-cjs/types.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/middleware-stack/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/middleware-stack/dist-es/MiddlewareStack.js b/node_modules/@smithy/middleware-stack/dist-es/MiddlewareStack.js new file mode 100644 index 00000000..2e02c73d --- /dev/null +++ b/node_modules/@smithy/middleware-stack/dist-es/MiddlewareStack.js @@ -0,0 +1,281 @@ +const getAllAliases = (name, aliases) => { + const _aliases = []; + if (name) { + _aliases.push(name); + } + if (aliases) { + for (const alias of aliases) { + _aliases.push(alias); + } + } + return _aliases; +}; +const getMiddlewareNameWithAliases = (name, aliases) => { + return `${name || "anonymous"}${aliases && aliases.length > 0 ? ` (a.k.a. ${aliases.join(",")})` : ""}`; +}; +export const constructStack = () => { + let absoluteEntries = []; + let relativeEntries = []; + let identifyOnResolve = false; + const entriesNameSet = new Set(); + const sort = (entries) => entries.sort((a, b) => stepWeights[b.step] - stepWeights[a.step] || + priorityWeights[b.priority || "normal"] - priorityWeights[a.priority || "normal"]); + const removeByName = (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + const aliases = getAllAliases(entry.name, entry.aliases); + if (aliases.includes(toRemove)) { + isRemoved = true; + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }; + const removeByReference = (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + if (entry.middleware === toRemove) { + isRemoved = true; + for (const alias of getAllAliases(entry.name, entry.aliases)) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }; + const cloneTo = (toStack) => { + absoluteEntries.forEach((entry) => { + toStack.add(entry.middleware, { ...entry }); + }); + relativeEntries.forEach((entry) => { + toStack.addRelativeTo(entry.middleware, { ...entry }); + }); + toStack.identifyOnResolve?.(stack.identifyOnResolve()); + return toStack; + }; + const expandRelativeMiddlewareList = (from) => { + const expandedMiddlewareList = []; + from.before.forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } + else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + expandedMiddlewareList.push(from); + from.after.reverse().forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } + else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + return expandedMiddlewareList; + }; + const getMiddlewareList = (debug = false) => { + const normalizedAbsoluteEntries = []; + const normalizedRelativeEntries = []; + const normalizedEntriesNameMap = {}; + absoluteEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [], + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedAbsoluteEntries.push(normalizedEntry); + }); + relativeEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [], + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedRelativeEntries.push(normalizedEntry); + }); + normalizedRelativeEntries.forEach((entry) => { + if (entry.toMiddleware) { + const toMiddleware = normalizedEntriesNameMap[entry.toMiddleware]; + if (toMiddleware === undefined) { + if (debug) { + return; + } + throw new Error(`${entry.toMiddleware} is not found when adding ` + + `${getMiddlewareNameWithAliases(entry.name, entry.aliases)} ` + + `middleware ${entry.relation} ${entry.toMiddleware}`); + } + if (entry.relation === "after") { + toMiddleware.after.push(entry); + } + if (entry.relation === "before") { + toMiddleware.before.push(entry); + } + } + }); + const mainChain = sort(normalizedAbsoluteEntries) + .map(expandRelativeMiddlewareList) + .reduce((wholeList, expandedMiddlewareList) => { + wholeList.push(...expandedMiddlewareList); + return wholeList; + }, []); + return mainChain; + }; + const stack = { + add: (middleware, options = {}) => { + const { name, override, aliases: _aliases } = options; + const entry = { + step: "initialize", + priority: "normal", + middleware, + ...options, + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = absoluteEntries.findIndex((entry) => entry.name === alias || entry.aliases?.some((a) => a === alias)); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = absoluteEntries[toOverrideIndex]; + if (toOverride.step !== entry.step || entry.priority !== toOverride.priority) { + throw new Error(`"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware with ` + + `${toOverride.priority} priority in ${toOverride.step} step cannot ` + + `be overridden by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware with ` + + `${entry.priority} priority in ${entry.step} step.`); + } + absoluteEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + absoluteEntries.push(entry); + }, + addRelativeTo: (middleware, options) => { + const { name, override, aliases: _aliases } = options; + const entry = { + middleware, + ...options, + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = relativeEntries.findIndex((entry) => entry.name === alias || entry.aliases?.some((a) => a === alias)); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = relativeEntries[toOverrideIndex]; + if (toOverride.toMiddleware !== entry.toMiddleware || toOverride.relation !== entry.relation) { + throw new Error(`"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware ` + + `${toOverride.relation} "${toOverride.toMiddleware}" middleware cannot be overridden ` + + `by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware ${entry.relation} ` + + `"${entry.toMiddleware}" middleware.`); + } + relativeEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + relativeEntries.push(entry); + }, + clone: () => cloneTo(constructStack()), + use: (plugin) => { + plugin.applyToStack(stack); + }, + remove: (toRemove) => { + if (typeof toRemove === "string") + return removeByName(toRemove); + else + return removeByReference(toRemove); + }, + removeByTag: (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + const { tags, name, aliases: _aliases } = entry; + if (tags && tags.includes(toRemove)) { + const aliases = getAllAliases(name, _aliases); + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + isRemoved = true; + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, + concat: (from) => { + const cloned = cloneTo(constructStack()); + cloned.use(from); + cloned.identifyOnResolve(identifyOnResolve || cloned.identifyOnResolve() || (from.identifyOnResolve?.() ?? false)); + return cloned; + }, + applyToStack: cloneTo, + identify: () => { + return getMiddlewareList(true).map((mw) => { + const step = mw.step ?? + mw.relation + + " " + + mw.toMiddleware; + return getMiddlewareNameWithAliases(mw.name, mw.aliases) + " - " + step; + }); + }, + identifyOnResolve(toggle) { + if (typeof toggle === "boolean") + identifyOnResolve = toggle; + return identifyOnResolve; + }, + resolve: (handler, context) => { + for (const middleware of getMiddlewareList() + .map((entry) => entry.middleware) + .reverse()) { + handler = middleware(handler, context); + } + if (identifyOnResolve) { + console.log(stack.identify()); + } + return handler; + }, + }; + return stack; +}; +const stepWeights = { + initialize: 5, + serialize: 4, + build: 3, + finalizeRequest: 2, + deserialize: 1, +}; +const priorityWeights = { + high: 3, + normal: 2, + low: 1, +}; diff --git a/node_modules/@smithy/middleware-stack/dist-es/index.js b/node_modules/@smithy/middleware-stack/dist-es/index.js new file mode 100644 index 00000000..16f56ce9 --- /dev/null +++ b/node_modules/@smithy/middleware-stack/dist-es/index.js @@ -0,0 +1 @@ +export * from "./MiddlewareStack"; diff --git a/node_modules/@smithy/middleware-stack/dist-es/types.js b/node_modules/@smithy/middleware-stack/dist-es/types.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/middleware-stack/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/middleware-stack/dist-types/MiddlewareStack.d.ts b/node_modules/@smithy/middleware-stack/dist-types/MiddlewareStack.d.ts new file mode 100644 index 00000000..2aa088bb --- /dev/null +++ b/node_modules/@smithy/middleware-stack/dist-types/MiddlewareStack.d.ts @@ -0,0 +1,5 @@ +import { MiddlewareStack } from "@smithy/types"; +/** + * @internal + */ +export declare const constructStack: () => MiddlewareStack; diff --git a/node_modules/@smithy/middleware-stack/dist-types/index.d.ts b/node_modules/@smithy/middleware-stack/dist-types/index.d.ts new file mode 100644 index 00000000..16f56ce9 --- /dev/null +++ b/node_modules/@smithy/middleware-stack/dist-types/index.d.ts @@ -0,0 +1 @@ +export * from "./MiddlewareStack"; diff --git a/node_modules/@smithy/middleware-stack/dist-types/ts3.4/MiddlewareStack.d.ts b/node_modules/@smithy/middleware-stack/dist-types/ts3.4/MiddlewareStack.d.ts new file mode 100644 index 00000000..d93ce936 --- /dev/null +++ b/node_modules/@smithy/middleware-stack/dist-types/ts3.4/MiddlewareStack.d.ts @@ -0,0 +1,5 @@ +import { MiddlewareStack } from "@smithy/types"; +/** + * @internal + */ +export declare const constructStack: () => MiddlewareStack; diff --git a/node_modules/@smithy/middleware-stack/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/middleware-stack/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..d906b7dd --- /dev/null +++ b/node_modules/@smithy/middleware-stack/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./MiddlewareStack"; diff --git a/node_modules/@smithy/middleware-stack/dist-types/ts3.4/types.d.ts b/node_modules/@smithy/middleware-stack/dist-types/ts3.4/types.d.ts new file mode 100644 index 00000000..38eb54c9 --- /dev/null +++ b/node_modules/@smithy/middleware-stack/dist-types/ts3.4/types.d.ts @@ -0,0 +1,22 @@ +import { AbsoluteLocation, HandlerOptions, MiddlewareType, Priority, RelativeLocation, Step } from "@smithy/types"; +export interface MiddlewareEntry extends HandlerOptions { + middleware: MiddlewareType; +} +export interface AbsoluteMiddlewareEntry extends MiddlewareEntry, AbsoluteLocation { + step: Step; + priority: Priority; +} +export interface RelativeMiddlewareEntry extends MiddlewareEntry, RelativeLocation { +} +export type Normalized, Input extends object = {}, Output extends object = {}> = T & { + after: Normalized, Input, Output>[]; + before: Normalized, Input, Output>[]; +}; +export interface NormalizedRelativeEntry extends HandlerOptions { + step: Step; + middleware: MiddlewareType; + next?: NormalizedRelativeEntry; + prev?: NormalizedRelativeEntry; + priority: null; +} +export type NamedMiddlewareEntriesMap = Record>; diff --git a/node_modules/@smithy/middleware-stack/dist-types/types.d.ts b/node_modules/@smithy/middleware-stack/dist-types/types.d.ts new file mode 100644 index 00000000..4aa5fc64 --- /dev/null +++ b/node_modules/@smithy/middleware-stack/dist-types/types.d.ts @@ -0,0 +1,22 @@ +import { AbsoluteLocation, HandlerOptions, MiddlewareType, Priority, RelativeLocation, Step } from "@smithy/types"; +export interface MiddlewareEntry extends HandlerOptions { + middleware: MiddlewareType; +} +export interface AbsoluteMiddlewareEntry extends MiddlewareEntry, AbsoluteLocation { + step: Step; + priority: Priority; +} +export interface RelativeMiddlewareEntry extends MiddlewareEntry, RelativeLocation { +} +export type Normalized, Input extends object = {}, Output extends object = {}> = T & { + after: Normalized, Input, Output>[]; + before: Normalized, Input, Output>[]; +}; +export interface NormalizedRelativeEntry extends HandlerOptions { + step: Step; + middleware: MiddlewareType; + next?: NormalizedRelativeEntry; + prev?: NormalizedRelativeEntry; + priority: null; +} +export type NamedMiddlewareEntriesMap = Record>; diff --git a/node_modules/@smithy/middleware-stack/package.json b/node_modules/@smithy/middleware-stack/package.json new file mode 100644 index 00000000..57077ab0 --- /dev/null +++ b/node_modules/@smithy/middleware-stack/package.json @@ -0,0 +1,63 @@ +{ + "name": "@smithy/middleware-stack", + "version": "4.0.2", + "description": "Provides a means for composing multiple middleware functions into a single handler", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-stack", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-stack", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-stack" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/node-config-provider/LICENSE b/node_modules/@smithy/node-config-provider/LICENSE new file mode 100644 index 00000000..74d4e5c3 --- /dev/null +++ b/node_modules/@smithy/node-config-provider/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/node-config-provider/README.md b/node_modules/@smithy/node-config-provider/README.md new file mode 100644 index 00000000..af591d2b --- /dev/null +++ b/node_modules/@smithy/node-config-provider/README.md @@ -0,0 +1,4 @@ +# @smithy/node-config-provider + +[![NPM version](https://img.shields.io/npm/v/@smithy/node-config-provider/latest.svg)](https://www.npmjs.com/package/@smithy/node-config-provider) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/node-config-provider.svg)](https://www.npmjs.com/package/@smithy/node-config-provider) diff --git a/node_modules/@smithy/node-config-provider/dist-cjs/configLoader.js b/node_modules/@smithy/node-config-provider/dist-cjs/configLoader.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-cjs/configLoader.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-config-provider/dist-cjs/fromEnv.js b/node_modules/@smithy/node-config-provider/dist-cjs/fromEnv.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-cjs/fromEnv.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-config-provider/dist-cjs/fromSharedConfigFiles.js b/node_modules/@smithy/node-config-provider/dist-cjs/fromSharedConfigFiles.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-cjs/fromSharedConfigFiles.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-config-provider/dist-cjs/fromStatic.js b/node_modules/@smithy/node-config-provider/dist-cjs/fromStatic.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-cjs/fromStatic.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-config-provider/dist-cjs/getSelectorName.js b/node_modules/@smithy/node-config-provider/dist-cjs/getSelectorName.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-cjs/getSelectorName.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-config-provider/dist-cjs/index.js b/node_modules/@smithy/node-config-provider/dist-cjs/index.js new file mode 100644 index 00000000..8a98b1b3 --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-cjs/index.js @@ -0,0 +1,105 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + loadConfig: () => loadConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/configLoader.ts + + +// src/fromEnv.ts +var import_property_provider = require("@smithy/property-provider"); + +// src/getSelectorName.ts +function getSelectorName(functionString) { + try { + const constants = new Set(Array.from(functionString.match(/([A-Z_]){3,}/g) ?? [])); + constants.delete("CONFIG"); + constants.delete("CONFIG_PREFIX_SEPARATOR"); + constants.delete("ENV"); + return [...constants].join(", "); + } catch (e) { + return functionString; + } +} +__name(getSelectorName, "getSelectorName"); + +// src/fromEnv.ts +var fromEnv = /* @__PURE__ */ __name((envVarSelector, logger) => async () => { + try { + const config = envVarSelector(process.env); + if (config === void 0) { + throw new Error(); + } + return config; + } catch (e) { + throw new import_property_provider.CredentialsProviderError( + e.message || `Not found in ENV: ${getSelectorName(envVarSelector.toString())}`, + { logger } + ); + } +}, "fromEnv"); + +// src/fromSharedConfigFiles.ts + +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); +var fromSharedConfigFiles = /* @__PURE__ */ __name((configSelector, { preferredFile = "config", ...init } = {}) => async () => { + const profile = (0, import_shared_ini_file_loader.getProfileName)(init); + const { configFile, credentialsFile } = await (0, import_shared_ini_file_loader.loadSharedConfigFiles)(init); + const profileFromCredentials = credentialsFile[profile] || {}; + const profileFromConfig = configFile[profile] || {}; + const mergedProfile = preferredFile === "config" ? { ...profileFromCredentials, ...profileFromConfig } : { ...profileFromConfig, ...profileFromCredentials }; + try { + const cfgFile = preferredFile === "config" ? configFile : credentialsFile; + const configValue = configSelector(mergedProfile, cfgFile); + if (configValue === void 0) { + throw new Error(); + } + return configValue; + } catch (e) { + throw new import_property_provider.CredentialsProviderError( + e.message || `Not found in config files w/ profile [${profile}]: ${getSelectorName(configSelector.toString())}`, + { logger: init.logger } + ); + } +}, "fromSharedConfigFiles"); + +// src/fromStatic.ts + +var isFunction = /* @__PURE__ */ __name((func) => typeof func === "function", "isFunction"); +var fromStatic = /* @__PURE__ */ __name((defaultValue) => isFunction(defaultValue) ? async () => await defaultValue() : (0, import_property_provider.fromStatic)(defaultValue), "fromStatic"); + +// src/configLoader.ts +var loadConfig = /* @__PURE__ */ __name(({ environmentVariableSelector, configFileSelector, default: defaultValue }, configuration = {}) => (0, import_property_provider.memoize)( + (0, import_property_provider.chain)( + fromEnv(environmentVariableSelector), + fromSharedConfigFiles(configFileSelector, configuration), + fromStatic(defaultValue) + ) +), "loadConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + loadConfig +}); + diff --git a/node_modules/@smithy/node-config-provider/dist-es/configLoader.js b/node_modules/@smithy/node-config-provider/dist-es/configLoader.js new file mode 100644 index 00000000..db044dd2 --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-es/configLoader.js @@ -0,0 +1,5 @@ +import { chain, memoize } from "@smithy/property-provider"; +import { fromEnv } from "./fromEnv"; +import { fromSharedConfigFiles } from "./fromSharedConfigFiles"; +import { fromStatic } from "./fromStatic"; +export const loadConfig = ({ environmentVariableSelector, configFileSelector, default: defaultValue }, configuration = {}) => memoize(chain(fromEnv(environmentVariableSelector), fromSharedConfigFiles(configFileSelector, configuration), fromStatic(defaultValue))); diff --git a/node_modules/@smithy/node-config-provider/dist-es/fromEnv.js b/node_modules/@smithy/node-config-provider/dist-es/fromEnv.js new file mode 100644 index 00000000..d43edbde --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-es/fromEnv.js @@ -0,0 +1,14 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getSelectorName } from "./getSelectorName"; +export const fromEnv = (envVarSelector, logger) => async () => { + try { + const config = envVarSelector(process.env); + if (config === undefined) { + throw new Error(); + } + return config; + } + catch (e) { + throw new CredentialsProviderError(e.message || `Not found in ENV: ${getSelectorName(envVarSelector.toString())}`, { logger }); + } +}; diff --git a/node_modules/@smithy/node-config-provider/dist-es/fromSharedConfigFiles.js b/node_modules/@smithy/node-config-provider/dist-es/fromSharedConfigFiles.js new file mode 100644 index 00000000..b6435eda --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-es/fromSharedConfigFiles.js @@ -0,0 +1,23 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getProfileName, loadSharedConfigFiles } from "@smithy/shared-ini-file-loader"; +import { getSelectorName } from "./getSelectorName"; +export const fromSharedConfigFiles = (configSelector, { preferredFile = "config", ...init } = {}) => async () => { + const profile = getProfileName(init); + const { configFile, credentialsFile } = await loadSharedConfigFiles(init); + const profileFromCredentials = credentialsFile[profile] || {}; + const profileFromConfig = configFile[profile] || {}; + const mergedProfile = preferredFile === "config" + ? { ...profileFromCredentials, ...profileFromConfig } + : { ...profileFromConfig, ...profileFromCredentials }; + try { + const cfgFile = preferredFile === "config" ? configFile : credentialsFile; + const configValue = configSelector(mergedProfile, cfgFile); + if (configValue === undefined) { + throw new Error(); + } + return configValue; + } + catch (e) { + throw new CredentialsProviderError(e.message || `Not found in config files w/ profile [${profile}]: ${getSelectorName(configSelector.toString())}`, { logger: init.logger }); + } +}; diff --git a/node_modules/@smithy/node-config-provider/dist-es/fromStatic.js b/node_modules/@smithy/node-config-provider/dist-es/fromStatic.js new file mode 100644 index 00000000..c9f91ffb --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-es/fromStatic.js @@ -0,0 +1,3 @@ +import { fromStatic as convertToProvider } from "@smithy/property-provider"; +const isFunction = (func) => typeof func === "function"; +export const fromStatic = (defaultValue) => isFunction(defaultValue) ? async () => await defaultValue() : convertToProvider(defaultValue); diff --git a/node_modules/@smithy/node-config-provider/dist-es/getSelectorName.js b/node_modules/@smithy/node-config-provider/dist-es/getSelectorName.js new file mode 100644 index 00000000..d5e0f782 --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-es/getSelectorName.js @@ -0,0 +1,12 @@ +export function getSelectorName(functionString) { + try { + const constants = new Set(Array.from(functionString.match(/([A-Z_]){3,}/g) ?? [])); + constants.delete("CONFIG"); + constants.delete("CONFIG_PREFIX_SEPARATOR"); + constants.delete("ENV"); + return [...constants].join(", "); + } + catch (e) { + return functionString; + } +} diff --git a/node_modules/@smithy/node-config-provider/dist-es/index.js b/node_modules/@smithy/node-config-provider/dist-es/index.js new file mode 100644 index 00000000..2d035d91 --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-es/index.js @@ -0,0 +1 @@ +export * from "./configLoader"; diff --git a/node_modules/@smithy/node-config-provider/dist-types/configLoader.d.ts b/node_modules/@smithy/node-config-provider/dist-types/configLoader.d.ts new file mode 100644 index 00000000..0d0b232b --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-types/configLoader.d.ts @@ -0,0 +1,31 @@ +import { Provider } from "@smithy/types"; +import { GetterFromEnv } from "./fromEnv"; +import { GetterFromConfig, SharedConfigInit } from "./fromSharedConfigFiles"; +import { FromStaticConfig } from "./fromStatic"; +/** + * @internal + */ +export type LocalConfigOptions = SharedConfigInit; +/** + * @internal + */ +export interface LoadedConfigSelectors { + /** + * A getter function getting the config values from all the environment + * variables. + */ + environmentVariableSelector: GetterFromEnv; + /** + * A getter function getting config values associated with the inferred + * profile from shared INI files + */ + configFileSelector: GetterFromConfig; + /** + * Default value or getter + */ + default: FromStaticConfig; +} +/** + * @internal + */ +export declare const loadConfig: ({ environmentVariableSelector, configFileSelector, default: defaultValue }: LoadedConfigSelectors, configuration?: LocalConfigOptions) => Provider; diff --git a/node_modules/@smithy/node-config-provider/dist-types/fromEnv.d.ts b/node_modules/@smithy/node-config-provider/dist-types/fromEnv.d.ts new file mode 100644 index 00000000..b2454c68 --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-types/fromEnv.d.ts @@ -0,0 +1,7 @@ +import { Logger, Provider } from "@smithy/types"; +export type GetterFromEnv = (env: Record) => T | undefined; +/** + * Get config value given the environment variable name or getter from + * environment variable. + */ +export declare const fromEnv: (envVarSelector: GetterFromEnv, logger?: Logger) => Provider; diff --git a/node_modules/@smithy/node-config-provider/dist-types/fromSharedConfigFiles.d.ts b/node_modules/@smithy/node-config-provider/dist-types/fromSharedConfigFiles.d.ts new file mode 100644 index 00000000..89a8eac2 --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-types/fromSharedConfigFiles.d.ts @@ -0,0 +1,22 @@ +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import { ParsedIniData, Profile, Provider } from "@smithy/types"; +/** + * @internal + */ +export interface SharedConfigInit extends SourceProfileInit { + /** + * The preferred shared ini file to load the config. "config" option refers to + * the shared config file(defaults to `~/.aws/config`). "credentials" option + * refers to the shared credentials file(defaults to `~/.aws/credentials`) + */ + preferredFile?: "config" | "credentials"; +} +/** + * @internal + */ +export type GetterFromConfig = (profile: Profile, configFile?: ParsedIniData) => T | undefined; +/** + * Get config value from the shared config files with inferred profile name. + * @internal + */ +export declare const fromSharedConfigFiles: (configSelector: GetterFromConfig, { preferredFile, ...init }?: SharedConfigInit) => Provider; diff --git a/node_modules/@smithy/node-config-provider/dist-types/fromStatic.d.ts b/node_modules/@smithy/node-config-provider/dist-types/fromStatic.d.ts new file mode 100644 index 00000000..d2c32a44 --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-types/fromStatic.d.ts @@ -0,0 +1,9 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export type FromStaticConfig = T | (() => T) | Provider; +/** + * @internal + */ +export declare const fromStatic: (defaultValue: FromStaticConfig) => Provider; diff --git a/node_modules/@smithy/node-config-provider/dist-types/getSelectorName.d.ts b/node_modules/@smithy/node-config-provider/dist-types/getSelectorName.d.ts new file mode 100644 index 00000000..b5f1a1b5 --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-types/getSelectorName.d.ts @@ -0,0 +1,9 @@ +/** + * Attempts to extract the name of the variable that the functional selector is looking for. + * Improves readability over the raw Function.toString() value. + * @internal + * @param functionString - function's string representation. + * + * @returns constant value used within the function. + */ +export declare function getSelectorName(functionString: string): string; diff --git a/node_modules/@smithy/node-config-provider/dist-types/index.d.ts b/node_modules/@smithy/node-config-provider/dist-types/index.d.ts new file mode 100644 index 00000000..2d035d91 --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-types/index.d.ts @@ -0,0 +1 @@ +export * from "./configLoader"; diff --git a/node_modules/@smithy/node-config-provider/dist-types/ts3.4/configLoader.d.ts b/node_modules/@smithy/node-config-provider/dist-types/ts3.4/configLoader.d.ts new file mode 100644 index 00000000..e8777317 --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-types/ts3.4/configLoader.d.ts @@ -0,0 +1,31 @@ +import { Provider } from "@smithy/types"; +import { GetterFromEnv } from "./fromEnv"; +import { GetterFromConfig, SharedConfigInit } from "./fromSharedConfigFiles"; +import { FromStaticConfig } from "./fromStatic"; +/** + * @internal + */ +export type LocalConfigOptions = SharedConfigInit; +/** + * @internal + */ +export interface LoadedConfigSelectors { + /** + * A getter function getting the config values from all the environment + * variables. + */ + environmentVariableSelector: GetterFromEnv; + /** + * A getter function getting config values associated with the inferred + * profile from shared INI files + */ + configFileSelector: GetterFromConfig; + /** + * Default value or getter + */ + default: FromStaticConfig; +} +/** + * @internal + */ +export declare const loadConfig: ({ environmentVariableSelector, configFileSelector, default: defaultValue }: LoadedConfigSelectors, configuration?: LocalConfigOptions) => Provider; diff --git a/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromEnv.d.ts b/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromEnv.d.ts new file mode 100644 index 00000000..e0a4cc7c --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromEnv.d.ts @@ -0,0 +1,7 @@ +import { Logger, Provider } from "@smithy/types"; +export type GetterFromEnv = (env: Record) => T | undefined; +/** + * Get config value given the environment variable name or getter from + * environment variable. + */ +export declare const fromEnv: (envVarSelector: GetterFromEnv, logger?: Logger) => Provider; diff --git a/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromSharedConfigFiles.d.ts b/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromSharedConfigFiles.d.ts new file mode 100644 index 00000000..aa0efa0d --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromSharedConfigFiles.d.ts @@ -0,0 +1,22 @@ +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import { ParsedIniData, Profile, Provider } from "@smithy/types"; +/** + * @internal + */ +export interface SharedConfigInit extends SourceProfileInit { + /** + * The preferred shared ini file to load the config. "config" option refers to + * the shared config file(defaults to `~/.aws/config`). "credentials" option + * refers to the shared credentials file(defaults to `~/.aws/credentials`) + */ + preferredFile?: "config" | "credentials"; +} +/** + * @internal + */ +export type GetterFromConfig = (profile: Profile, configFile?: ParsedIniData) => T | undefined; +/** + * Get config value from the shared config files with inferred profile name. + * @internal + */ +export declare const fromSharedConfigFiles: (configSelector: GetterFromConfig, { preferredFile, ...init }?: SharedConfigInit) => Provider; diff --git a/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromStatic.d.ts b/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromStatic.d.ts new file mode 100644 index 00000000..a4bab2dd --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromStatic.d.ts @@ -0,0 +1,9 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export type FromStaticConfig = T | (() => T) | Provider; +/** + * @internal + */ +export declare const fromStatic: (defaultValue: FromStaticConfig) => Provider; diff --git a/node_modules/@smithy/node-config-provider/dist-types/ts3.4/getSelectorName.d.ts b/node_modules/@smithy/node-config-provider/dist-types/ts3.4/getSelectorName.d.ts new file mode 100644 index 00000000..11c5da24 --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-types/ts3.4/getSelectorName.d.ts @@ -0,0 +1,9 @@ +/** + * Attempts to extract the name of the variable that the functional selector is looking for. + * Improves readability over the raw Function.toString() value. + * @internal + * @param functionString - function's string representation. + * + * @returns constant value used within the function. + */ +export declare function getSelectorName(functionString: string): string; diff --git a/node_modules/@smithy/node-config-provider/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/node-config-provider/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..74a76f57 --- /dev/null +++ b/node_modules/@smithy/node-config-provider/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./configLoader"; diff --git a/node_modules/@smithy/node-config-provider/package.json b/node_modules/@smithy/node-config-provider/package.json new file mode 100644 index 00000000..3002d8ea --- /dev/null +++ b/node_modules/@smithy/node-config-provider/package.json @@ -0,0 +1,65 @@ +{ + "name": "@smithy/node-config-provider", + "version": "4.0.2", + "description": "Load config default values from ini config files and environmental variable", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline node-config-provider", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/node-config-provider", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/node-config-provider" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/node-http-handler/LICENSE b/node_modules/@smithy/node-http-handler/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/node-http-handler/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/node-http-handler/README.md b/node_modules/@smithy/node-http-handler/README.md new file mode 100644 index 00000000..214719f3 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/README.md @@ -0,0 +1,9 @@ +# @smithy/node-http-handler + +[![NPM version](https://img.shields.io/npm/v/@smithy/node-http-handler/latest.svg)](https://www.npmjs.com/package/@smithy/node-http-handler) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/node-http-handler.svg)](https://www.npmjs.com/package/@smithy/node-http-handler) + +This package implements the default `requestHandler` for Node.js using `node:http`, `node:https`, and `node:http2`. + +For an example on how `requestHandler`s are used by Smithy generated SDK clients, refer to +the [AWS SDK for JavaScript (v3) supplemental docs](https://github.com/aws/aws-sdk-js-v3/blob/main/supplemental-docs/CLIENTS.md#request-handler-requesthandler). diff --git a/node_modules/@smithy/node-http-handler/dist-cjs/constants.js b/node_modules/@smithy/node-http-handler/dist-cjs/constants.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-http-handler/dist-cjs/get-transformed-headers.js b/node_modules/@smithy/node-http-handler/dist-cjs/get-transformed-headers.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-cjs/get-transformed-headers.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-http-handler/dist-cjs/index.js b/node_modules/@smithy/node-http-handler/dist-cjs/index.js new file mode 100644 index 00000000..e31976f7 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-cjs/index.js @@ -0,0 +1,806 @@ +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + DEFAULT_REQUEST_TIMEOUT: () => DEFAULT_REQUEST_TIMEOUT, + NodeHttp2Handler: () => NodeHttp2Handler, + NodeHttpHandler: () => NodeHttpHandler, + streamCollector: () => streamCollector +}); +module.exports = __toCommonJS(src_exports); + +// src/node-http-handler.ts +var import_protocol_http = require("@smithy/protocol-http"); +var import_querystring_builder = require("@smithy/querystring-builder"); +var import_http = require("http"); +var import_https = require("https"); + +// src/constants.ts +var NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "EPIPE", "ETIMEDOUT"]; + +// src/get-transformed-headers.ts +var getTransformedHeaders = /* @__PURE__ */ __name((headers) => { + const transformedHeaders = {}; + for (const name of Object.keys(headers)) { + const headerValues = headers[name]; + transformedHeaders[name] = Array.isArray(headerValues) ? headerValues.join(",") : headerValues; + } + return transformedHeaders; +}, "getTransformedHeaders"); + +// src/timing.ts +var timing = { + setTimeout: (cb, ms) => setTimeout(cb, ms), + clearTimeout: (timeoutId) => clearTimeout(timeoutId) +}; + +// src/set-connection-timeout.ts +var DEFER_EVENT_LISTENER_TIME = 1e3; +var setConnectionTimeout = /* @__PURE__ */ __name((request, reject, timeoutInMs = 0) => { + if (!timeoutInMs) { + return -1; + } + const registerTimeout = /* @__PURE__ */ __name((offset) => { + const timeoutId = timing.setTimeout(() => { + request.destroy(); + reject( + Object.assign(new Error(`Socket timed out without establishing a connection within ${timeoutInMs} ms`), { + name: "TimeoutError" + }) + ); + }, timeoutInMs - offset); + const doWithSocket = /* @__PURE__ */ __name((socket) => { + if (socket?.connecting) { + socket.on("connect", () => { + timing.clearTimeout(timeoutId); + }); + } else { + timing.clearTimeout(timeoutId); + } + }, "doWithSocket"); + if (request.socket) { + doWithSocket(request.socket); + } else { + request.on("socket", doWithSocket); + } + }, "registerTimeout"); + if (timeoutInMs < 2e3) { + registerTimeout(0); + return 0; + } + return timing.setTimeout(registerTimeout.bind(null, DEFER_EVENT_LISTENER_TIME), DEFER_EVENT_LISTENER_TIME); +}, "setConnectionTimeout"); + +// src/set-socket-keep-alive.ts +var DEFER_EVENT_LISTENER_TIME2 = 3e3; +var setSocketKeepAlive = /* @__PURE__ */ __name((request, { keepAlive, keepAliveMsecs }, deferTimeMs = DEFER_EVENT_LISTENER_TIME2) => { + if (keepAlive !== true) { + return -1; + } + const registerListener = /* @__PURE__ */ __name(() => { + if (request.socket) { + request.socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + } else { + request.on("socket", (socket) => { + socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + }); + } + }, "registerListener"); + if (deferTimeMs === 0) { + registerListener(); + return 0; + } + return timing.setTimeout(registerListener, deferTimeMs); +}, "setSocketKeepAlive"); + +// src/set-socket-timeout.ts +var DEFER_EVENT_LISTENER_TIME3 = 3e3; +var setSocketTimeout = /* @__PURE__ */ __name((request, reject, timeoutInMs = DEFAULT_REQUEST_TIMEOUT) => { + const registerTimeout = /* @__PURE__ */ __name((offset) => { + const timeout = timeoutInMs - offset; + const onTimeout = /* @__PURE__ */ __name(() => { + request.destroy(); + reject(Object.assign(new Error(`Connection timed out after ${timeoutInMs} ms`), { name: "TimeoutError" })); + }, "onTimeout"); + if (request.socket) { + request.socket.setTimeout(timeout, onTimeout); + request.on("close", () => request.socket?.removeListener("timeout", onTimeout)); + } else { + request.setTimeout(timeout, onTimeout); + } + }, "registerTimeout"); + if (0 < timeoutInMs && timeoutInMs < 6e3) { + registerTimeout(0); + return 0; + } + return timing.setTimeout( + registerTimeout.bind(null, timeoutInMs === 0 ? 0 : DEFER_EVENT_LISTENER_TIME3), + DEFER_EVENT_LISTENER_TIME3 + ); +}, "setSocketTimeout"); + +// src/write-request-body.ts +var import_stream = require("stream"); +var MIN_WAIT_TIME = 6e3; +async function writeRequestBody(httpRequest, request, maxContinueTimeoutMs = MIN_WAIT_TIME) { + const headers = request.headers ?? {}; + const expect = headers["Expect"] || headers["expect"]; + let timeoutId = -1; + let sendBody = true; + if (expect === "100-continue") { + sendBody = await Promise.race([ + new Promise((resolve) => { + timeoutId = Number(timing.setTimeout(() => resolve(true), Math.max(MIN_WAIT_TIME, maxContinueTimeoutMs))); + }), + new Promise((resolve) => { + httpRequest.on("continue", () => { + timing.clearTimeout(timeoutId); + resolve(true); + }); + httpRequest.on("response", () => { + timing.clearTimeout(timeoutId); + resolve(false); + }); + httpRequest.on("error", () => { + timing.clearTimeout(timeoutId); + resolve(false); + }); + }) + ]); + } + if (sendBody) { + writeBody(httpRequest, request.body); + } +} +__name(writeRequestBody, "writeRequestBody"); +function writeBody(httpRequest, body) { + if (body instanceof import_stream.Readable) { + body.pipe(httpRequest); + return; + } + if (body) { + if (Buffer.isBuffer(body) || typeof body === "string") { + httpRequest.end(body); + return; + } + const uint8 = body; + if (typeof uint8 === "object" && uint8.buffer && typeof uint8.byteOffset === "number" && typeof uint8.byteLength === "number") { + httpRequest.end(Buffer.from(uint8.buffer, uint8.byteOffset, uint8.byteLength)); + return; + } + httpRequest.end(Buffer.from(body)); + return; + } + httpRequest.end(); +} +__name(writeBody, "writeBody"); + +// src/node-http-handler.ts +var DEFAULT_REQUEST_TIMEOUT = 0; +var NodeHttpHandler = class _NodeHttpHandler { + constructor(options) { + this.socketWarningTimestamp = 0; + // Node http handler is hard-coded to http/1.1: https://github.com/nodejs/node/blob/ff5664b83b89c55e4ab5d5f60068fb457f1f5872/lib/_http_server.js#L286 + this.metadata = { handlerProtocol: "http/1.1" }; + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options().then((_options) => { + resolve(this.resolveDefaultConfig(_options)); + }).catch(reject); + } else { + resolve(this.resolveDefaultConfig(options)); + } + }); + } + static { + __name(this, "NodeHttpHandler"); + } + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new _NodeHttpHandler(instanceOrOptions); + } + /** + * @internal + * + * @param agent - http(s) agent in use by the NodeHttpHandler instance. + * @param socketWarningTimestamp - last socket usage check timestamp. + * @param logger - channel for the warning. + * @returns timestamp of last emitted warning. + */ + static checkSocketUsage(agent, socketWarningTimestamp, logger = console) { + const { sockets, requests, maxSockets } = agent; + if (typeof maxSockets !== "number" || maxSockets === Infinity) { + return socketWarningTimestamp; + } + const interval = 15e3; + if (Date.now() - interval < socketWarningTimestamp) { + return socketWarningTimestamp; + } + if (sockets && requests) { + for (const origin in sockets) { + const socketsInUse = sockets[origin]?.length ?? 0; + const requestsEnqueued = requests[origin]?.length ?? 0; + if (socketsInUse >= maxSockets && requestsEnqueued >= 2 * maxSockets) { + logger?.warn?.( + `@smithy/node-http-handler:WARN - socket usage at capacity=${socketsInUse} and ${requestsEnqueued} additional requests are enqueued. +See https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/node-configuring-maxsockets.html +or increase socketAcquisitionWarningTimeout=(millis) in the NodeHttpHandler config.` + ); + return Date.now(); + } + } + } + return socketWarningTimestamp; + } + resolveDefaultConfig(options) { + const { requestTimeout, connectionTimeout, socketTimeout, socketAcquisitionWarningTimeout, httpAgent, httpsAgent } = options || {}; + const keepAlive = true; + const maxSockets = 50; + return { + connectionTimeout, + requestTimeout: requestTimeout ?? socketTimeout, + socketAcquisitionWarningTimeout, + httpAgent: (() => { + if (httpAgent instanceof import_http.Agent || typeof httpAgent?.destroy === "function") { + return httpAgent; + } + return new import_http.Agent({ keepAlive, maxSockets, ...httpAgent }); + })(), + httpsAgent: (() => { + if (httpsAgent instanceof import_https.Agent || typeof httpsAgent?.destroy === "function") { + return httpsAgent; + } + return new import_https.Agent({ keepAlive, maxSockets, ...httpsAgent }); + })(), + logger: console + }; + } + destroy() { + this.config?.httpAgent?.destroy(); + this.config?.httpsAgent?.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + return new Promise((_resolve, _reject) => { + let writeRequestBodyPromise = void 0; + const timeouts = []; + const resolve = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + timeouts.forEach(timing.clearTimeout); + _resolve(arg); + }, "resolve"); + const reject = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + timeouts.forEach(timing.clearTimeout); + _reject(arg); + }, "reject"); + if (!this.config) { + throw new Error("Node HTTP request handler config is not resolved"); + } + if (abortSignal?.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const isSSL = request.protocol === "https:"; + const agent = isSSL ? this.config.httpsAgent : this.config.httpAgent; + timeouts.push( + timing.setTimeout( + () => { + this.socketWarningTimestamp = _NodeHttpHandler.checkSocketUsage( + agent, + this.socketWarningTimestamp, + this.config.logger + ); + }, + this.config.socketAcquisitionWarningTimeout ?? (this.config.requestTimeout ?? 2e3) + (this.config.connectionTimeout ?? 1e3) + ) + ); + const queryString = (0, import_querystring_builder.buildQueryString)(request.query || {}); + let auth = void 0; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}`; + } + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + let hostname = request.hostname ?? ""; + if (hostname[0] === "[" && hostname.endsWith("]")) { + hostname = request.hostname.slice(1, -1); + } else { + hostname = request.hostname; + } + const nodeHttpsOptions = { + headers: request.headers, + host: hostname, + method: request.method, + path, + port: request.port, + agent, + auth + }; + const requestFunc = isSSL ? import_https.request : import_http.request; + const req = requestFunc(nodeHttpsOptions, (res) => { + const httpResponse = new import_protocol_http.HttpResponse({ + statusCode: res.statusCode || -1, + reason: res.statusMessage, + headers: getTransformedHeaders(res.headers), + body: res + }); + resolve({ response: httpResponse }); + }); + req.on("error", (err) => { + if (NODEJS_TIMEOUT_ERROR_CODES.includes(err.code)) { + reject(Object.assign(err, { name: "TimeoutError" })); + } else { + reject(err); + } + }); + if (abortSignal) { + const onAbort = /* @__PURE__ */ __name(() => { + req.destroy(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }, "onAbort"); + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + req.once("close", () => signal.removeEventListener("abort", onAbort)); + } else { + abortSignal.onabort = onAbort; + } + } + timeouts.push(setConnectionTimeout(req, reject, this.config.connectionTimeout)); + timeouts.push(setSocketTimeout(req, reject, this.config.requestTimeout)); + const httpAgent = nodeHttpsOptions.agent; + if (typeof httpAgent === "object" && "keepAlive" in httpAgent) { + timeouts.push( + setSocketKeepAlive(req, { + // @ts-expect-error keepAlive is not public on httpAgent. + keepAlive: httpAgent.keepAlive, + // @ts-expect-error keepAliveMsecs is not public on httpAgent. + keepAliveMsecs: httpAgent.keepAliveMsecs + }) + ); + } + writeRequestBodyPromise = writeRequestBody(req, request, this.config.requestTimeout).catch((e) => { + timeouts.forEach(timing.clearTimeout); + return _reject(e); + }); + }); + } + updateHttpClientConfig(key, value) { + this.config = void 0; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } +}; + +// src/node-http2-handler.ts + + +var import_http22 = require("http2"); + +// src/node-http2-connection-manager.ts +var import_http2 = __toESM(require("http2")); + +// src/node-http2-connection-pool.ts +var NodeHttp2ConnectionPool = class { + constructor(sessions) { + this.sessions = []; + this.sessions = sessions ?? []; + } + static { + __name(this, "NodeHttp2ConnectionPool"); + } + poll() { + if (this.sessions.length > 0) { + return this.sessions.shift(); + } + } + offerLast(session) { + this.sessions.push(session); + } + contains(session) { + return this.sessions.includes(session); + } + remove(session) { + this.sessions = this.sessions.filter((s) => s !== session); + } + [Symbol.iterator]() { + return this.sessions[Symbol.iterator](); + } + destroy(connection) { + for (const session of this.sessions) { + if (session === connection) { + if (!session.destroyed) { + session.destroy(); + } + } + } + } +}; + +// src/node-http2-connection-manager.ts +var NodeHttp2ConnectionManager = class { + constructor(config) { + this.sessionCache = /* @__PURE__ */ new Map(); + this.config = config; + if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { + throw new RangeError("maxConcurrency must be greater than zero."); + } + } + static { + __name(this, "NodeHttp2ConnectionManager"); + } + lease(requestContext, connectionConfiguration) { + const url = this.getUrlString(requestContext); + const existingPool = this.sessionCache.get(url); + if (existingPool) { + const existingSession = existingPool.poll(); + if (existingSession && !this.config.disableConcurrency) { + return existingSession; + } + } + const session = import_http2.default.connect(url); + if (this.config.maxConcurrency) { + session.settings({ maxConcurrentStreams: this.config.maxConcurrency }, (err) => { + if (err) { + throw new Error( + "Fail to set maxConcurrentStreams to " + this.config.maxConcurrency + "when creating new session for " + requestContext.destination.toString() + ); + } + }); + } + session.unref(); + const destroySessionCb = /* @__PURE__ */ __name(() => { + session.destroy(); + this.deleteSession(url, session); + }, "destroySessionCb"); + session.on("goaway", destroySessionCb); + session.on("error", destroySessionCb); + session.on("frameError", destroySessionCb); + session.on("close", () => this.deleteSession(url, session)); + if (connectionConfiguration.requestTimeout) { + session.setTimeout(connectionConfiguration.requestTimeout, destroySessionCb); + } + const connectionPool = this.sessionCache.get(url) || new NodeHttp2ConnectionPool(); + connectionPool.offerLast(session); + this.sessionCache.set(url, connectionPool); + return session; + } + /** + * Delete a session from the connection pool. + * @param authority The authority of the session to delete. + * @param session The session to delete. + */ + deleteSession(authority, session) { + const existingConnectionPool = this.sessionCache.get(authority); + if (!existingConnectionPool) { + return; + } + if (!existingConnectionPool.contains(session)) { + return; + } + existingConnectionPool.remove(session); + this.sessionCache.set(authority, existingConnectionPool); + } + release(requestContext, session) { + const cacheKey = this.getUrlString(requestContext); + this.sessionCache.get(cacheKey)?.offerLast(session); + } + destroy() { + for (const [key, connectionPool] of this.sessionCache) { + for (const session of connectionPool) { + if (!session.destroyed) { + session.destroy(); + } + connectionPool.remove(session); + } + this.sessionCache.delete(key); + } + } + setMaxConcurrentStreams(maxConcurrentStreams) { + if (maxConcurrentStreams && maxConcurrentStreams <= 0) { + throw new RangeError("maxConcurrentStreams must be greater than zero."); + } + this.config.maxConcurrency = maxConcurrentStreams; + } + setDisableConcurrentStreams(disableConcurrentStreams) { + this.config.disableConcurrency = disableConcurrentStreams; + } + getUrlString(request) { + return request.destination.toString(); + } +}; + +// src/node-http2-handler.ts +var NodeHttp2Handler = class _NodeHttp2Handler { + constructor(options) { + this.metadata = { handlerProtocol: "h2" }; + this.connectionManager = new NodeHttp2ConnectionManager({}); + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options().then((opts) => { + resolve(opts || {}); + }).catch(reject); + } else { + resolve(options || {}); + } + }); + } + static { + __name(this, "NodeHttp2Handler"); + } + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new _NodeHttp2Handler(instanceOrOptions); + } + destroy() { + this.connectionManager.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + this.connectionManager.setDisableConcurrentStreams(this.config.disableConcurrentStreams || false); + if (this.config.maxConcurrentStreams) { + this.connectionManager.setMaxConcurrentStreams(this.config.maxConcurrentStreams); + } + } + const { requestTimeout, disableConcurrentStreams } = this.config; + return new Promise((_resolve, _reject) => { + let fulfilled = false; + let writeRequestBodyPromise = void 0; + const resolve = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + _resolve(arg); + }, "resolve"); + const reject = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + _reject(arg); + }, "reject"); + if (abortSignal?.aborted) { + fulfilled = true; + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const { hostname, method, port, protocol, query } = request; + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + const authority = `${protocol}//${auth}${hostname}${port ? `:${port}` : ""}`; + const requestContext = { destination: new URL(authority) }; + const session = this.connectionManager.lease(requestContext, { + requestTimeout: this.config?.sessionTimeout, + disableConcurrentStreams: disableConcurrentStreams || false + }); + const rejectWithDestroy = /* @__PURE__ */ __name((err) => { + if (disableConcurrentStreams) { + this.destroySession(session); + } + fulfilled = true; + reject(err); + }, "rejectWithDestroy"); + const queryString = (0, import_querystring_builder.buildQueryString)(query || {}); + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + const req = session.request({ + ...request.headers, + [import_http22.constants.HTTP2_HEADER_PATH]: path, + [import_http22.constants.HTTP2_HEADER_METHOD]: method + }); + session.ref(); + req.on("response", (headers) => { + const httpResponse = new import_protocol_http.HttpResponse({ + statusCode: headers[":status"] || -1, + headers: getTransformedHeaders(headers), + body: req + }); + fulfilled = true; + resolve({ response: httpResponse }); + if (disableConcurrentStreams) { + session.close(); + this.connectionManager.deleteSession(authority, session); + } + }); + if (requestTimeout) { + req.setTimeout(requestTimeout, () => { + req.close(); + const timeoutError = new Error(`Stream timed out because of no activity for ${requestTimeout} ms`); + timeoutError.name = "TimeoutError"; + rejectWithDestroy(timeoutError); + }); + } + if (abortSignal) { + const onAbort = /* @__PURE__ */ __name(() => { + req.close(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + rejectWithDestroy(abortError); + }, "onAbort"); + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + req.once("close", () => signal.removeEventListener("abort", onAbort)); + } else { + abortSignal.onabort = onAbort; + } + } + req.on("frameError", (type, code, id) => { + rejectWithDestroy(new Error(`Frame type id ${type} in stream id ${id} has failed with code ${code}.`)); + }); + req.on("error", rejectWithDestroy); + req.on("aborted", () => { + rejectWithDestroy( + new Error(`HTTP/2 stream is abnormally aborted in mid-communication with result code ${req.rstCode}.`) + ); + }); + req.on("close", () => { + session.unref(); + if (disableConcurrentStreams) { + session.destroy(); + } + if (!fulfilled) { + rejectWithDestroy(new Error("Unexpected error: http2 request did not get a response")); + } + }); + writeRequestBodyPromise = writeRequestBody(req, request, requestTimeout); + }); + } + updateHttpClientConfig(key, value) { + this.config = void 0; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } + /** + * Destroys a session. + * @param session - the session to destroy. + */ + destroySession(session) { + if (!session.destroyed) { + session.destroy(); + } + } +}; + +// src/stream-collector/collector.ts + +var Collector = class extends import_stream.Writable { + constructor() { + super(...arguments); + this.bufferedBytes = []; + } + static { + __name(this, "Collector"); + } + _write(chunk, encoding, callback) { + this.bufferedBytes.push(chunk); + callback(); + } +}; + +// src/stream-collector/index.ts +var streamCollector = /* @__PURE__ */ __name((stream) => { + if (isReadableStreamInstance(stream)) { + return collectReadableStream(stream); + } + return new Promise((resolve, reject) => { + const collector = new Collector(); + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function() { + const bytes = new Uint8Array(Buffer.concat(this.bufferedBytes)); + resolve(bytes); + }); + }); +}, "streamCollector"); +var isReadableStreamInstance = /* @__PURE__ */ __name((stream) => typeof ReadableStream === "function" && stream instanceof ReadableStream, "isReadableStreamInstance"); +async function collectReadableStream(stream) { + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + let length = 0; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + length += value.length; + } + isDone = done; + } + const collected = new Uint8Array(length); + let offset = 0; + for (const chunk of chunks) { + collected.set(chunk, offset); + offset += chunk.length; + } + return collected; +} +__name(collectReadableStream, "collectReadableStream"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + DEFAULT_REQUEST_TIMEOUT, + NodeHttpHandler, + NodeHttp2Handler, + streamCollector +}); + diff --git a/node_modules/@smithy/node-http-handler/dist-cjs/node-http-handler.js b/node_modules/@smithy/node-http-handler/dist-cjs/node-http-handler.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-cjs/node-http-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-manager.js b/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-manager.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-manager.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-pool.js b/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-pool.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-pool.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-handler.js b/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-handler.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-http-handler/dist-cjs/readable.mock.js b/node_modules/@smithy/node-http-handler/dist-cjs/readable.mock.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-cjs/readable.mock.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-http-handler/dist-cjs/server.mock.js b/node_modules/@smithy/node-http-handler/dist-cjs/server.mock.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-cjs/server.mock.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-http-handler/dist-cjs/set-connection-timeout.js b/node_modules/@smithy/node-http-handler/dist-cjs/set-connection-timeout.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-cjs/set-connection-timeout.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-keep-alive.js b/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-keep-alive.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-keep-alive.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-timeout.js b/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-timeout.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-timeout.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/collector.js b/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/collector.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/collector.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/index.js b/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/readable.mock.js b/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/readable.mock.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/readable.mock.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-http-handler/dist-cjs/timing.js b/node_modules/@smithy/node-http-handler/dist-cjs/timing.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-cjs/timing.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-http-handler/dist-cjs/write-request-body.js b/node_modules/@smithy/node-http-handler/dist-cjs/write-request-body.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-cjs/write-request-body.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/node-http-handler/dist-es/constants.js b/node_modules/@smithy/node-http-handler/dist-es/constants.js new file mode 100644 index 00000000..0619d286 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-es/constants.js @@ -0,0 +1 @@ +export const NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "EPIPE", "ETIMEDOUT"]; diff --git a/node_modules/@smithy/node-http-handler/dist-es/get-transformed-headers.js b/node_modules/@smithy/node-http-handler/dist-es/get-transformed-headers.js new file mode 100644 index 00000000..562883c6 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-es/get-transformed-headers.js @@ -0,0 +1,9 @@ +const getTransformedHeaders = (headers) => { + const transformedHeaders = {}; + for (const name of Object.keys(headers)) { + const headerValues = headers[name]; + transformedHeaders[name] = Array.isArray(headerValues) ? headerValues.join(",") : headerValues; + } + return transformedHeaders; +}; +export { getTransformedHeaders }; diff --git a/node_modules/@smithy/node-http-handler/dist-es/index.js b/node_modules/@smithy/node-http-handler/dist-es/index.js new file mode 100644 index 00000000..09c0b9a5 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./node-http-handler"; +export * from "./node-http2-handler"; +export * from "./stream-collector"; diff --git a/node_modules/@smithy/node-http-handler/dist-es/node-http-handler.js b/node_modules/@smithy/node-http-handler/dist-es/node-http-handler.js new file mode 100644 index 00000000..f0ca1e7f --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-es/node-http-handler.js @@ -0,0 +1,209 @@ +import { HttpResponse } from "@smithy/protocol-http"; +import { buildQueryString } from "@smithy/querystring-builder"; +import { Agent as hAgent, request as hRequest } from "http"; +import { Agent as hsAgent, request as hsRequest } from "https"; +import { NODEJS_TIMEOUT_ERROR_CODES } from "./constants"; +import { getTransformedHeaders } from "./get-transformed-headers"; +import { setConnectionTimeout } from "./set-connection-timeout"; +import { setSocketKeepAlive } from "./set-socket-keep-alive"; +import { setSocketTimeout } from "./set-socket-timeout"; +import { timing } from "./timing"; +import { writeRequestBody } from "./write-request-body"; +export const DEFAULT_REQUEST_TIMEOUT = 0; +export class NodeHttpHandler { + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new NodeHttpHandler(instanceOrOptions); + } + static checkSocketUsage(agent, socketWarningTimestamp, logger = console) { + const { sockets, requests, maxSockets } = agent; + if (typeof maxSockets !== "number" || maxSockets === Infinity) { + return socketWarningTimestamp; + } + const interval = 15000; + if (Date.now() - interval < socketWarningTimestamp) { + return socketWarningTimestamp; + } + if (sockets && requests) { + for (const origin in sockets) { + const socketsInUse = sockets[origin]?.length ?? 0; + const requestsEnqueued = requests[origin]?.length ?? 0; + if (socketsInUse >= maxSockets && requestsEnqueued >= 2 * maxSockets) { + logger?.warn?.(`@smithy/node-http-handler:WARN - socket usage at capacity=${socketsInUse} and ${requestsEnqueued} additional requests are enqueued. +See https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/node-configuring-maxsockets.html +or increase socketAcquisitionWarningTimeout=(millis) in the NodeHttpHandler config.`); + return Date.now(); + } + } + } + return socketWarningTimestamp; + } + constructor(options) { + this.socketWarningTimestamp = 0; + this.metadata = { handlerProtocol: "http/1.1" }; + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options() + .then((_options) => { + resolve(this.resolveDefaultConfig(_options)); + }) + .catch(reject); + } + else { + resolve(this.resolveDefaultConfig(options)); + } + }); + } + resolveDefaultConfig(options) { + const { requestTimeout, connectionTimeout, socketTimeout, socketAcquisitionWarningTimeout, httpAgent, httpsAgent } = options || {}; + const keepAlive = true; + const maxSockets = 50; + return { + connectionTimeout, + requestTimeout: requestTimeout ?? socketTimeout, + socketAcquisitionWarningTimeout, + httpAgent: (() => { + if (httpAgent instanceof hAgent || typeof httpAgent?.destroy === "function") { + return httpAgent; + } + return new hAgent({ keepAlive, maxSockets, ...httpAgent }); + })(), + httpsAgent: (() => { + if (httpsAgent instanceof hsAgent || typeof httpsAgent?.destroy === "function") { + return httpsAgent; + } + return new hsAgent({ keepAlive, maxSockets, ...httpsAgent }); + })(), + logger: console, + }; + } + destroy() { + this.config?.httpAgent?.destroy(); + this.config?.httpsAgent?.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + return new Promise((_resolve, _reject) => { + let writeRequestBodyPromise = undefined; + const timeouts = []; + const resolve = async (arg) => { + await writeRequestBodyPromise; + timeouts.forEach(timing.clearTimeout); + _resolve(arg); + }; + const reject = async (arg) => { + await writeRequestBodyPromise; + timeouts.forEach(timing.clearTimeout); + _reject(arg); + }; + if (!this.config) { + throw new Error("Node HTTP request handler config is not resolved"); + } + if (abortSignal?.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const isSSL = request.protocol === "https:"; + const agent = isSSL ? this.config.httpsAgent : this.config.httpAgent; + timeouts.push(timing.setTimeout(() => { + this.socketWarningTimestamp = NodeHttpHandler.checkSocketUsage(agent, this.socketWarningTimestamp, this.config.logger); + }, this.config.socketAcquisitionWarningTimeout ?? + (this.config.requestTimeout ?? 2000) + (this.config.connectionTimeout ?? 1000))); + const queryString = buildQueryString(request.query || {}); + let auth = undefined; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}`; + } + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + let hostname = request.hostname ?? ""; + if (hostname[0] === "[" && hostname.endsWith("]")) { + hostname = request.hostname.slice(1, -1); + } + else { + hostname = request.hostname; + } + const nodeHttpsOptions = { + headers: request.headers, + host: hostname, + method: request.method, + path, + port: request.port, + agent, + auth, + }; + const requestFunc = isSSL ? hsRequest : hRequest; + const req = requestFunc(nodeHttpsOptions, (res) => { + const httpResponse = new HttpResponse({ + statusCode: res.statusCode || -1, + reason: res.statusMessage, + headers: getTransformedHeaders(res.headers), + body: res, + }); + resolve({ response: httpResponse }); + }); + req.on("error", (err) => { + if (NODEJS_TIMEOUT_ERROR_CODES.includes(err.code)) { + reject(Object.assign(err, { name: "TimeoutError" })); + } + else { + reject(err); + } + }); + if (abortSignal) { + const onAbort = () => { + req.destroy(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }; + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + req.once("close", () => signal.removeEventListener("abort", onAbort)); + } + else { + abortSignal.onabort = onAbort; + } + } + timeouts.push(setConnectionTimeout(req, reject, this.config.connectionTimeout)); + timeouts.push(setSocketTimeout(req, reject, this.config.requestTimeout)); + const httpAgent = nodeHttpsOptions.agent; + if (typeof httpAgent === "object" && "keepAlive" in httpAgent) { + timeouts.push(setSocketKeepAlive(req, { + keepAlive: httpAgent.keepAlive, + keepAliveMsecs: httpAgent.keepAliveMsecs, + })); + } + writeRequestBodyPromise = writeRequestBody(req, request, this.config.requestTimeout).catch((e) => { + timeouts.forEach(timing.clearTimeout); + return _reject(e); + }); + }); + } + updateHttpClientConfig(key, value) { + this.config = undefined; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value, + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } +} diff --git a/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-manager.js b/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-manager.js new file mode 100644 index 00000000..206d94f5 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-manager.js @@ -0,0 +1,86 @@ +import http2 from "http2"; +import { NodeHttp2ConnectionPool } from "./node-http2-connection-pool"; +export class NodeHttp2ConnectionManager { + constructor(config) { + this.sessionCache = new Map(); + this.config = config; + if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { + throw new RangeError("maxConcurrency must be greater than zero."); + } + } + lease(requestContext, connectionConfiguration) { + const url = this.getUrlString(requestContext); + const existingPool = this.sessionCache.get(url); + if (existingPool) { + const existingSession = existingPool.poll(); + if (existingSession && !this.config.disableConcurrency) { + return existingSession; + } + } + const session = http2.connect(url); + if (this.config.maxConcurrency) { + session.settings({ maxConcurrentStreams: this.config.maxConcurrency }, (err) => { + if (err) { + throw new Error("Fail to set maxConcurrentStreams to " + + this.config.maxConcurrency + + "when creating new session for " + + requestContext.destination.toString()); + } + }); + } + session.unref(); + const destroySessionCb = () => { + session.destroy(); + this.deleteSession(url, session); + }; + session.on("goaway", destroySessionCb); + session.on("error", destroySessionCb); + session.on("frameError", destroySessionCb); + session.on("close", () => this.deleteSession(url, session)); + if (connectionConfiguration.requestTimeout) { + session.setTimeout(connectionConfiguration.requestTimeout, destroySessionCb); + } + const connectionPool = this.sessionCache.get(url) || new NodeHttp2ConnectionPool(); + connectionPool.offerLast(session); + this.sessionCache.set(url, connectionPool); + return session; + } + deleteSession(authority, session) { + const existingConnectionPool = this.sessionCache.get(authority); + if (!existingConnectionPool) { + return; + } + if (!existingConnectionPool.contains(session)) { + return; + } + existingConnectionPool.remove(session); + this.sessionCache.set(authority, existingConnectionPool); + } + release(requestContext, session) { + const cacheKey = this.getUrlString(requestContext); + this.sessionCache.get(cacheKey)?.offerLast(session); + } + destroy() { + for (const [key, connectionPool] of this.sessionCache) { + for (const session of connectionPool) { + if (!session.destroyed) { + session.destroy(); + } + connectionPool.remove(session); + } + this.sessionCache.delete(key); + } + } + setMaxConcurrentStreams(maxConcurrentStreams) { + if (maxConcurrentStreams && maxConcurrentStreams <= 0) { + throw new RangeError("maxConcurrentStreams must be greater than zero."); + } + this.config.maxConcurrency = maxConcurrentStreams; + } + setDisableConcurrentStreams(disableConcurrentStreams) { + this.config.disableConcurrency = disableConcurrentStreams; + } + getUrlString(request) { + return request.destination.toString(); + } +} diff --git a/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-pool.js b/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-pool.js new file mode 100644 index 00000000..429eb49c --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-pool.js @@ -0,0 +1,32 @@ +export class NodeHttp2ConnectionPool { + constructor(sessions) { + this.sessions = []; + this.sessions = sessions ?? []; + } + poll() { + if (this.sessions.length > 0) { + return this.sessions.shift(); + } + } + offerLast(session) { + this.sessions.push(session); + } + contains(session) { + return this.sessions.includes(session); + } + remove(session) { + this.sessions = this.sessions.filter((s) => s !== session); + } + [Symbol.iterator]() { + return this.sessions[Symbol.iterator](); + } + destroy(connection) { + for (const session of this.sessions) { + if (session === connection) { + if (!session.destroyed) { + session.destroy(); + } + } + } + } +} diff --git a/node_modules/@smithy/node-http-handler/dist-es/node-http2-handler.js b/node_modules/@smithy/node-http-handler/dist-es/node-http2-handler.js new file mode 100644 index 00000000..b68601ea --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-es/node-http2-handler.js @@ -0,0 +1,167 @@ +import { HttpResponse } from "@smithy/protocol-http"; +import { buildQueryString } from "@smithy/querystring-builder"; +import { constants } from "http2"; +import { getTransformedHeaders } from "./get-transformed-headers"; +import { NodeHttp2ConnectionManager } from "./node-http2-connection-manager"; +import { writeRequestBody } from "./write-request-body"; +export class NodeHttp2Handler { + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new NodeHttp2Handler(instanceOrOptions); + } + constructor(options) { + this.metadata = { handlerProtocol: "h2" }; + this.connectionManager = new NodeHttp2ConnectionManager({}); + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options() + .then((opts) => { + resolve(opts || {}); + }) + .catch(reject); + } + else { + resolve(options || {}); + } + }); + } + destroy() { + this.connectionManager.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + this.connectionManager.setDisableConcurrentStreams(this.config.disableConcurrentStreams || false); + if (this.config.maxConcurrentStreams) { + this.connectionManager.setMaxConcurrentStreams(this.config.maxConcurrentStreams); + } + } + const { requestTimeout, disableConcurrentStreams } = this.config; + return new Promise((_resolve, _reject) => { + let fulfilled = false; + let writeRequestBodyPromise = undefined; + const resolve = async (arg) => { + await writeRequestBodyPromise; + _resolve(arg); + }; + const reject = async (arg) => { + await writeRequestBodyPromise; + _reject(arg); + }; + if (abortSignal?.aborted) { + fulfilled = true; + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const { hostname, method, port, protocol, query } = request; + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + const authority = `${protocol}//${auth}${hostname}${port ? `:${port}` : ""}`; + const requestContext = { destination: new URL(authority) }; + const session = this.connectionManager.lease(requestContext, { + requestTimeout: this.config?.sessionTimeout, + disableConcurrentStreams: disableConcurrentStreams || false, + }); + const rejectWithDestroy = (err) => { + if (disableConcurrentStreams) { + this.destroySession(session); + } + fulfilled = true; + reject(err); + }; + const queryString = buildQueryString(query || {}); + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + const req = session.request({ + ...request.headers, + [constants.HTTP2_HEADER_PATH]: path, + [constants.HTTP2_HEADER_METHOD]: method, + }); + session.ref(); + req.on("response", (headers) => { + const httpResponse = new HttpResponse({ + statusCode: headers[":status"] || -1, + headers: getTransformedHeaders(headers), + body: req, + }); + fulfilled = true; + resolve({ response: httpResponse }); + if (disableConcurrentStreams) { + session.close(); + this.connectionManager.deleteSession(authority, session); + } + }); + if (requestTimeout) { + req.setTimeout(requestTimeout, () => { + req.close(); + const timeoutError = new Error(`Stream timed out because of no activity for ${requestTimeout} ms`); + timeoutError.name = "TimeoutError"; + rejectWithDestroy(timeoutError); + }); + } + if (abortSignal) { + const onAbort = () => { + req.close(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + rejectWithDestroy(abortError); + }; + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + req.once("close", () => signal.removeEventListener("abort", onAbort)); + } + else { + abortSignal.onabort = onAbort; + } + } + req.on("frameError", (type, code, id) => { + rejectWithDestroy(new Error(`Frame type id ${type} in stream id ${id} has failed with code ${code}.`)); + }); + req.on("error", rejectWithDestroy); + req.on("aborted", () => { + rejectWithDestroy(new Error(`HTTP/2 stream is abnormally aborted in mid-communication with result code ${req.rstCode}.`)); + }); + req.on("close", () => { + session.unref(); + if (disableConcurrentStreams) { + session.destroy(); + } + if (!fulfilled) { + rejectWithDestroy(new Error("Unexpected error: http2 request did not get a response")); + } + }); + writeRequestBodyPromise = writeRequestBody(req, request, requestTimeout); + }); + } + updateHttpClientConfig(key, value) { + this.config = undefined; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value, + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } + destroySession(session) { + if (!session.destroyed) { + session.destroy(); + } + } +} diff --git a/node_modules/@smithy/node-http-handler/dist-es/readable.mock.js b/node_modules/@smithy/node-http-handler/dist-es/readable.mock.js new file mode 100644 index 00000000..41fb0b67 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-es/readable.mock.js @@ -0,0 +1,19 @@ +import { Readable } from "stream"; +export class ReadFromBuffers extends Readable { + constructor(options) { + super(options); + this.numBuffersRead = 0; + this.buffersToRead = options.buffers; + this.errorAfter = typeof options.errorAfter === "number" ? options.errorAfter : -1; + } + _read() { + if (this.errorAfter !== -1 && this.errorAfter === this.numBuffersRead) { + this.emit("error", new Error("Mock Error")); + return; + } + if (this.numBuffersRead >= this.buffersToRead.length) { + return this.push(null); + } + return this.push(this.buffersToRead[this.numBuffersRead++]); + } +} diff --git a/node_modules/@smithy/node-http-handler/dist-es/server.mock.js b/node_modules/@smithy/node-http-handler/dist-es/server.mock.js new file mode 100644 index 00000000..6a31adf1 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-es/server.mock.js @@ -0,0 +1,88 @@ +import { readFileSync } from "fs"; +import { createServer as createHttpServer } from "http"; +import { createServer as createHttp2Server } from "http2"; +import { createServer as createHttpsServer } from "https"; +import { join } from "path"; +import { Readable } from "stream"; +import { timing } from "./timing"; +const fixturesDir = join(__dirname, "..", "fixtures"); +const setResponseHeaders = (response, headers) => { + for (const [key, value] of Object.entries(headers)) { + response.setHeader(key, value); + } +}; +const setResponseBody = (response, body) => { + if (body instanceof Readable) { + body.pipe(response); + } + else { + response.end(body); + } +}; +export const createResponseFunction = (httpResp) => (request, response) => { + response.statusCode = httpResp.statusCode; + if (httpResp.reason) { + response.statusMessage = httpResp.reason; + } + setResponseHeaders(response, httpResp.headers); + setResponseBody(response, httpResp.body); +}; +export const createResponseFunctionWithDelay = (httpResp, delay) => (request, response) => { + response.statusCode = httpResp.statusCode; + if (httpResp.reason) { + response.statusMessage = httpResp.reason; + } + setResponseHeaders(response, httpResp.headers); + timing.setTimeout(() => setResponseBody(response, httpResp.body), delay); +}; +export const createContinueResponseFunction = (httpResp) => (request, response) => { + response.writeContinue(); + timing.setTimeout(() => { + createResponseFunction(httpResp)(request, response); + }, 100); +}; +export const createMockHttpsServer = () => { + const server = createHttpsServer({ + key: readFileSync(join(fixturesDir, "test-server-key.pem")), + cert: readFileSync(join(fixturesDir, "test-server-cert.pem")), + }); + return server; +}; +export const createMockHttpServer = () => { + const server = createHttpServer(); + return server; +}; +export const createMockHttp2Server = () => { + const server = createHttp2Server(); + return server; +}; +export const createMirrorResponseFunction = (httpResp) => (request, response) => { + const bufs = []; + request.on("data", (chunk) => { + bufs.push(chunk); + }); + request.on("end", () => { + response.statusCode = httpResp.statusCode; + setResponseHeaders(response, httpResp.headers); + setResponseBody(response, Buffer.concat(bufs)); + }); + request.on("error", (err) => { + response.statusCode = 500; + setResponseHeaders(response, httpResp.headers); + setResponseBody(response, err.message); + }); +}; +export const getResponseBody = (response) => { + return new Promise((resolve, reject) => { + const bufs = []; + response.body.on("data", function (d) { + bufs.push(d); + }); + response.body.on("end", function () { + resolve(Buffer.concat(bufs).toString()); + }); + response.body.on("error", (err) => { + reject(err); + }); + }); +}; diff --git a/node_modules/@smithy/node-http-handler/dist-es/set-connection-timeout.js b/node_modules/@smithy/node-http-handler/dist-es/set-connection-timeout.js new file mode 100644 index 00000000..587532e7 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-es/set-connection-timeout.js @@ -0,0 +1,36 @@ +import { timing } from "./timing"; +const DEFER_EVENT_LISTENER_TIME = 1000; +export const setConnectionTimeout = (request, reject, timeoutInMs = 0) => { + if (!timeoutInMs) { + return -1; + } + const registerTimeout = (offset) => { + const timeoutId = timing.setTimeout(() => { + request.destroy(); + reject(Object.assign(new Error(`Socket timed out without establishing a connection within ${timeoutInMs} ms`), { + name: "TimeoutError", + })); + }, timeoutInMs - offset); + const doWithSocket = (socket) => { + if (socket?.connecting) { + socket.on("connect", () => { + timing.clearTimeout(timeoutId); + }); + } + else { + timing.clearTimeout(timeoutId); + } + }; + if (request.socket) { + doWithSocket(request.socket); + } + else { + request.on("socket", doWithSocket); + } + }; + if (timeoutInMs < 2000) { + registerTimeout(0); + return 0; + } + return timing.setTimeout(registerTimeout.bind(null, DEFER_EVENT_LISTENER_TIME), DEFER_EVENT_LISTENER_TIME); +}; diff --git a/node_modules/@smithy/node-http-handler/dist-es/set-socket-keep-alive.js b/node_modules/@smithy/node-http-handler/dist-es/set-socket-keep-alive.js new file mode 100644 index 00000000..18391a83 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-es/set-socket-keep-alive.js @@ -0,0 +1,22 @@ +import { timing } from "./timing"; +const DEFER_EVENT_LISTENER_TIME = 3000; +export const setSocketKeepAlive = (request, { keepAlive, keepAliveMsecs }, deferTimeMs = DEFER_EVENT_LISTENER_TIME) => { + if (keepAlive !== true) { + return -1; + } + const registerListener = () => { + if (request.socket) { + request.socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + } + else { + request.on("socket", (socket) => { + socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + }); + } + }; + if (deferTimeMs === 0) { + registerListener(); + return 0; + } + return timing.setTimeout(registerListener, deferTimeMs); +}; diff --git a/node_modules/@smithy/node-http-handler/dist-es/set-socket-timeout.js b/node_modules/@smithy/node-http-handler/dist-es/set-socket-timeout.js new file mode 100644 index 00000000..5c4456c9 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-es/set-socket-timeout.js @@ -0,0 +1,24 @@ +import { DEFAULT_REQUEST_TIMEOUT } from "./node-http-handler"; +import { timing } from "./timing"; +const DEFER_EVENT_LISTENER_TIME = 3000; +export const setSocketTimeout = (request, reject, timeoutInMs = DEFAULT_REQUEST_TIMEOUT) => { + const registerTimeout = (offset) => { + const timeout = timeoutInMs - offset; + const onTimeout = () => { + request.destroy(); + reject(Object.assign(new Error(`Connection timed out after ${timeoutInMs} ms`), { name: "TimeoutError" })); + }; + if (request.socket) { + request.socket.setTimeout(timeout, onTimeout); + request.on("close", () => request.socket?.removeListener("timeout", onTimeout)); + } + else { + request.setTimeout(timeout, onTimeout); + } + }; + if (0 < timeoutInMs && timeoutInMs < 6000) { + registerTimeout(0); + return 0; + } + return timing.setTimeout(registerTimeout.bind(null, timeoutInMs === 0 ? 0 : DEFER_EVENT_LISTENER_TIME), DEFER_EVENT_LISTENER_TIME); +}; diff --git a/node_modules/@smithy/node-http-handler/dist-es/stream-collector/collector.js b/node_modules/@smithy/node-http-handler/dist-es/stream-collector/collector.js new file mode 100644 index 00000000..c3737e9f --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-es/stream-collector/collector.js @@ -0,0 +1,11 @@ +import { Writable } from "stream"; +export class Collector extends Writable { + constructor() { + super(...arguments); + this.bufferedBytes = []; + } + _write(chunk, encoding, callback) { + this.bufferedBytes.push(chunk); + callback(); + } +} diff --git a/node_modules/@smithy/node-http-handler/dist-es/stream-collector/index.js b/node_modules/@smithy/node-http-handler/dist-es/stream-collector/index.js new file mode 100644 index 00000000..8ff09c01 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-es/stream-collector/index.js @@ -0,0 +1,41 @@ +import { Collector } from "./collector"; +export const streamCollector = (stream) => { + if (isReadableStreamInstance(stream)) { + return collectReadableStream(stream); + } + return new Promise((resolve, reject) => { + const collector = new Collector(); + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function () { + const bytes = new Uint8Array(Buffer.concat(this.bufferedBytes)); + resolve(bytes); + }); + }); +}; +const isReadableStreamInstance = (stream) => typeof ReadableStream === "function" && stream instanceof ReadableStream; +async function collectReadableStream(stream) { + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + let length = 0; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + length += value.length; + } + isDone = done; + } + const collected = new Uint8Array(length); + let offset = 0; + for (const chunk of chunks) { + collected.set(chunk, offset); + offset += chunk.length; + } + return collected; +} diff --git a/node_modules/@smithy/node-http-handler/dist-es/stream-collector/readable.mock.js b/node_modules/@smithy/node-http-handler/dist-es/stream-collector/readable.mock.js new file mode 100644 index 00000000..2f653c50 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-es/stream-collector/readable.mock.js @@ -0,0 +1,19 @@ +import { Readable } from "stream"; +export class ReadFromBuffers extends Readable { + constructor(options) { + super(options); + this.numBuffersRead = 0; + this.buffersToRead = options.buffers; + this.errorAfter = typeof options.errorAfter === "number" ? options.errorAfter : -1; + } + _read(size) { + if (this.errorAfter !== -1 && this.errorAfter === this.numBuffersRead) { + this.emit("error", new Error("Mock Error")); + return; + } + if (this.numBuffersRead >= this.buffersToRead.length) { + return this.push(null); + } + return this.push(this.buffersToRead[this.numBuffersRead++]); + } +} diff --git a/node_modules/@smithy/node-http-handler/dist-es/timing.js b/node_modules/@smithy/node-http-handler/dist-es/timing.js new file mode 100644 index 00000000..792ba484 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-es/timing.js @@ -0,0 +1,4 @@ +export const timing = { + setTimeout: (cb, ms) => setTimeout(cb, ms), + clearTimeout: (timeoutId) => clearTimeout(timeoutId), +}; diff --git a/node_modules/@smithy/node-http-handler/dist-es/write-request-body.js b/node_modules/@smithy/node-http-handler/dist-es/write-request-body.js new file mode 100644 index 00000000..36e15f9d --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-es/write-request-body.js @@ -0,0 +1,56 @@ +import { Readable } from "stream"; +import { timing } from "./timing"; +const MIN_WAIT_TIME = 6000; +export async function writeRequestBody(httpRequest, request, maxContinueTimeoutMs = MIN_WAIT_TIME) { + const headers = request.headers ?? {}; + const expect = headers["Expect"] || headers["expect"]; + let timeoutId = -1; + let sendBody = true; + if (expect === "100-continue") { + sendBody = await Promise.race([ + new Promise((resolve) => { + timeoutId = Number(timing.setTimeout(() => resolve(true), Math.max(MIN_WAIT_TIME, maxContinueTimeoutMs))); + }), + new Promise((resolve) => { + httpRequest.on("continue", () => { + timing.clearTimeout(timeoutId); + resolve(true); + }); + httpRequest.on("response", () => { + timing.clearTimeout(timeoutId); + resolve(false); + }); + httpRequest.on("error", () => { + timing.clearTimeout(timeoutId); + resolve(false); + }); + }), + ]); + } + if (sendBody) { + writeBody(httpRequest, request.body); + } +} +function writeBody(httpRequest, body) { + if (body instanceof Readable) { + body.pipe(httpRequest); + return; + } + if (body) { + if (Buffer.isBuffer(body) || typeof body === "string") { + httpRequest.end(body); + return; + } + const uint8 = body; + if (typeof uint8 === "object" && + uint8.buffer && + typeof uint8.byteOffset === "number" && + typeof uint8.byteLength === "number") { + httpRequest.end(Buffer.from(uint8.buffer, uint8.byteOffset, uint8.byteLength)); + return; + } + httpRequest.end(Buffer.from(body)); + return; + } + httpRequest.end(); +} diff --git a/node_modules/@smithy/node-http-handler/dist-types/constants.d.ts b/node_modules/@smithy/node-http-handler/dist-types/constants.d.ts new file mode 100644 index 00000000..35404617 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/constants.d.ts @@ -0,0 +1,5 @@ +/** + * Node.js system error codes that indicate timeout. + * @deprecated use NODEJS_TIMEOUT_ERROR_CODES from @smithy/service-error-classification/constants + */ +export declare const NODEJS_TIMEOUT_ERROR_CODES: string[]; diff --git a/node_modules/@smithy/node-http-handler/dist-types/get-transformed-headers.d.ts b/node_modules/@smithy/node-http-handler/dist-types/get-transformed-headers.d.ts new file mode 100644 index 00000000..bb7cd4e8 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/get-transformed-headers.d.ts @@ -0,0 +1,4 @@ +import { HeaderBag } from "@smithy/types"; +import { IncomingHttpHeaders } from "http2"; +declare const getTransformedHeaders: (headers: IncomingHttpHeaders) => HeaderBag; +export { getTransformedHeaders }; diff --git a/node_modules/@smithy/node-http-handler/dist-types/index.d.ts b/node_modules/@smithy/node-http-handler/dist-types/index.d.ts new file mode 100644 index 00000000..09c0b9a5 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./node-http-handler"; +export * from "./node-http2-handler"; +export * from "./stream-collector"; diff --git a/node_modules/@smithy/node-http-handler/dist-types/node-http-handler.d.ts b/node_modules/@smithy/node-http-handler/dist-types/node-http-handler.d.ts new file mode 100644 index 00000000..b120313b --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/node-http-handler.d.ts @@ -0,0 +1,47 @@ +/// +/// +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import type { Logger, NodeHttpHandlerOptions } from "@smithy/types"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +import { Agent as hAgent } from "http"; +import { Agent as hsAgent } from "https"; +export { NodeHttpHandlerOptions }; +/** + * @public + * A default of 0 means no timeout. + */ +export declare const DEFAULT_REQUEST_TIMEOUT = 0; +/** + * @public + * A request handler that uses the Node.js http and https modules. + */ +export declare class NodeHttpHandler implements HttpHandler { + private config?; + private configProvider; + private socketWarningTimestamp; + readonly metadata: { + handlerProtocol: string; + }; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | NodeHttpHandlerOptions | Provider): NodeHttpHandler | HttpHandler; + /** + * @internal + * + * @param agent - http(s) agent in use by the NodeHttpHandler instance. + * @param socketWarningTimestamp - last socket usage check timestamp. + * @param logger - channel for the warning. + * @returns timestamp of last emitted warning. + */ + static checkSocketUsage(agent: hAgent | hsAgent, socketWarningTimestamp: number, logger?: Logger): number; + constructor(options?: NodeHttpHandlerOptions | Provider); + private resolveDefaultConfig; + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof NodeHttpHandlerOptions, value: NodeHttpHandlerOptions[typeof key]): void; + httpHandlerConfigs(): NodeHttpHandlerOptions; +} diff --git a/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-manager.d.ts b/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-manager.d.ts new file mode 100644 index 00000000..24bc3b5f --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-manager.d.ts @@ -0,0 +1,25 @@ +/// +import { RequestContext } from "@smithy/types"; +import { ConnectConfiguration } from "@smithy/types"; +import { ConnectionManager, ConnectionManagerConfiguration } from "@smithy/types"; +import { ClientHttp2Session } from "http2"; +/** + * @public + */ +export declare class NodeHttp2ConnectionManager implements ConnectionManager { + constructor(config: ConnectionManagerConfiguration); + private config; + private readonly sessionCache; + lease(requestContext: RequestContext, connectionConfiguration: ConnectConfiguration): ClientHttp2Session; + /** + * Delete a session from the connection pool. + * @param authority The authority of the session to delete. + * @param session The session to delete. + */ + deleteSession(authority: string, session: ClientHttp2Session): void; + release(requestContext: RequestContext, session: ClientHttp2Session): void; + destroy(): void; + setMaxConcurrentStreams(maxConcurrentStreams: number): void; + setDisableConcurrentStreams(disableConcurrentStreams: boolean): void; + private getUrlString; +} diff --git a/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-pool.d.ts b/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-pool.d.ts new file mode 100644 index 00000000..6695893a --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-pool.d.ts @@ -0,0 +1,13 @@ +/// +import { ConnectionPool } from "@smithy/types"; +import { ClientHttp2Session } from "http2"; +export declare class NodeHttp2ConnectionPool implements ConnectionPool { + private sessions; + constructor(sessions?: ClientHttp2Session[]); + poll(): ClientHttp2Session | void; + offerLast(session: ClientHttp2Session): void; + contains(session: ClientHttp2Session): boolean; + remove(session: ClientHttp2Session): void; + [Symbol.iterator](): IterableIterator; + destroy(connection: ClientHttp2Session): void; +} diff --git a/node_modules/@smithy/node-http-handler/dist-types/node-http2-handler.d.ts b/node_modules/@smithy/node-http-handler/dist-types/node-http2-handler.d.ts new file mode 100644 index 00000000..68610a9b --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/node-http2-handler.d.ts @@ -0,0 +1,62 @@ +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +/** + * Represents the http2 options that can be passed to a node http2 client. + * @public + */ +export interface NodeHttp2HandlerOptions { + /** + * The maximum time in milliseconds that a stream may remain idle before it + * is closed. + */ + requestTimeout?: number; + /** + * The maximum time in milliseconds that a session or socket may remain idle + * before it is closed. + * https://nodejs.org/docs/latest-v12.x/api/http2.html#http2_http2session_and_sockets + */ + sessionTimeout?: number; + /** + * Disables processing concurrent streams on a ClientHttp2Session instance. When set + * to true, a new session instance is created for each request to a URL. + * **Default:** false. + * https://nodejs.org/api/http2.html#http2_class_clienthttp2session + */ + disableConcurrentStreams?: boolean; + /** + * Maximum number of concurrent Http2Stream instances per ClientHttp2Session. Each session + * may have up to 2^31-1 Http2Stream instances over its lifetime. + * This value must be greater than or equal to 0. + * https://nodejs.org/api/http2.html#class-http2stream + */ + maxConcurrentStreams?: number; +} +/** + * A request handler using the node:http2 package. + * @public + */ +export declare class NodeHttp2Handler implements HttpHandler { + private config?; + private configProvider; + readonly metadata: { + handlerProtocol: string; + }; + private readonly connectionManager; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | NodeHttp2HandlerOptions | Provider): HttpHandler | NodeHttp2Handler; + constructor(options?: NodeHttp2HandlerOptions | Provider); + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof NodeHttp2HandlerOptions, value: NodeHttp2HandlerOptions[typeof key]): void; + httpHandlerConfigs(): NodeHttp2HandlerOptions; + /** + * Destroys a session. + * @param session - the session to destroy. + */ + private destroySession; +} diff --git a/node_modules/@smithy/node-http-handler/dist-types/readable.mock.d.ts b/node_modules/@smithy/node-http-handler/dist-types/readable.mock.d.ts new file mode 100644 index 00000000..cd7e77f0 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/readable.mock.d.ts @@ -0,0 +1,15 @@ +/// +/// +/// +import { Readable, ReadableOptions } from "stream"; +export interface ReadFromBuffersOptions extends ReadableOptions { + buffers: Buffer[]; + errorAfter?: number; +} +export declare class ReadFromBuffers extends Readable { + private buffersToRead; + private numBuffersRead; + private errorAfter; + constructor(options: ReadFromBuffersOptions); + _read(): boolean | undefined; +} diff --git a/node_modules/@smithy/node-http-handler/dist-types/server.mock.d.ts b/node_modules/@smithy/node-http-handler/dist-types/server.mock.d.ts new file mode 100644 index 00000000..585a6773 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/server.mock.d.ts @@ -0,0 +1,12 @@ +import { HttpResponse } from "@smithy/types"; +import { IncomingMessage, Server as HttpServer, ServerResponse } from "http"; +import { Http2Server } from "http2"; +import { Server as HttpsServer } from "https"; +export declare const createResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createResponseFunctionWithDelay: (httpResp: HttpResponse, delay: number) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createContinueResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createMockHttpsServer: () => HttpsServer; +export declare const createMockHttpServer: () => HttpServer; +export declare const createMockHttp2Server: () => Http2Server; +export declare const createMirrorResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const getResponseBody: (response: HttpResponse) => Promise; diff --git a/node_modules/@smithy/node-http-handler/dist-types/set-connection-timeout.d.ts b/node_modules/@smithy/node-http-handler/dist-types/set-connection-timeout.d.ts new file mode 100644 index 00000000..57b811f7 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/set-connection-timeout.d.ts @@ -0,0 +1,9 @@ +/// +/// +/// +/// +/// +/// +/// +import { ClientRequest } from "http"; +export declare const setConnectionTimeout: (request: ClientRequest, reject: (err: Error) => void, timeoutInMs?: number) => NodeJS.Timeout | number; diff --git a/node_modules/@smithy/node-http-handler/dist-types/set-socket-keep-alive.d.ts b/node_modules/@smithy/node-http-handler/dist-types/set-socket-keep-alive.d.ts new file mode 100644 index 00000000..80507d8b --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/set-socket-keep-alive.d.ts @@ -0,0 +1,13 @@ +/// +/// +/// +/// +/// +/// +/// +import { ClientRequest } from "http"; +export interface SocketKeepAliveOptions { + keepAlive: boolean; + keepAliveMsecs?: number; +} +export declare const setSocketKeepAlive: (request: ClientRequest, { keepAlive, keepAliveMsecs }: SocketKeepAliveOptions, deferTimeMs?: number) => NodeJS.Timeout | number; diff --git a/node_modules/@smithy/node-http-handler/dist-types/set-socket-timeout.d.ts b/node_modules/@smithy/node-http-handler/dist-types/set-socket-timeout.d.ts new file mode 100644 index 00000000..019a62ba --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/set-socket-timeout.d.ts @@ -0,0 +1,9 @@ +/// +/// +/// +/// +/// +/// +/// +import { ClientRequest } from "http"; +export declare const setSocketTimeout: (request: ClientRequest, reject: (err: Error) => void, timeoutInMs?: number) => NodeJS.Timeout | number; diff --git a/node_modules/@smithy/node-http-handler/dist-types/stream-collector/collector.d.ts b/node_modules/@smithy/node-http-handler/dist-types/stream-collector/collector.d.ts new file mode 100644 index 00000000..b7d4d125 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/stream-collector/collector.d.ts @@ -0,0 +1,8 @@ +/// +/// +/// +import { Writable } from "stream"; +export declare class Collector extends Writable { + readonly bufferedBytes: Buffer[]; + _write(chunk: Buffer, encoding: string, callback: (err?: Error) => void): void; +} diff --git a/node_modules/@smithy/node-http-handler/dist-types/stream-collector/index.d.ts b/node_modules/@smithy/node-http-handler/dist-types/stream-collector/index.d.ts new file mode 100644 index 00000000..a9a9498d --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/stream-collector/index.d.ts @@ -0,0 +1,6 @@ +import { StreamCollector } from "@smithy/types"; +/** + * @internal + * Converts a stream to a byte array. + */ +export declare const streamCollector: StreamCollector; diff --git a/node_modules/@smithy/node-http-handler/dist-types/stream-collector/readable.mock.d.ts b/node_modules/@smithy/node-http-handler/dist-types/stream-collector/readable.mock.d.ts new file mode 100644 index 00000000..2543a286 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/stream-collector/readable.mock.d.ts @@ -0,0 +1,15 @@ +/// +/// +/// +import { Readable, ReadableOptions } from "stream"; +export interface ReadFromBuffersOptions extends ReadableOptions { + buffers: Buffer[]; + errorAfter?: number; +} +export declare class ReadFromBuffers extends Readable { + private buffersToRead; + private numBuffersRead; + private errorAfter; + constructor(options: ReadFromBuffersOptions); + _read(size: number): boolean | undefined; +} diff --git a/node_modules/@smithy/node-http-handler/dist-types/timing.d.ts b/node_modules/@smithy/node-http-handler/dist-types/timing.d.ts new file mode 100644 index 00000000..de5b695c --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/timing.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * For test spies. + */ +export declare const timing: { + setTimeout: (cb: (...ignored: any[]) => void | unknown, ms?: number) => number; + clearTimeout: (timeoutId: string | number | undefined | unknown) => void; +}; diff --git a/node_modules/@smithy/node-http-handler/dist-types/ts3.4/constants.d.ts b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/constants.d.ts new file mode 100644 index 00000000..b02b0b6c --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,5 @@ +/** + * Node.js system error codes that indicate timeout. + * @deprecated use NODEJS_TIMEOUT_ERROR_CODES from @smithy/service-error-classification/constants + */ +export declare const NODEJS_TIMEOUT_ERROR_CODES: string[]; diff --git a/node_modules/@smithy/node-http-handler/dist-types/ts3.4/get-transformed-headers.d.ts b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/get-transformed-headers.d.ts new file mode 100644 index 00000000..c6f5a8b5 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/get-transformed-headers.d.ts @@ -0,0 +1,4 @@ +import { HeaderBag } from "@smithy/types"; +import { IncomingHttpHeaders } from "http2"; +declare const getTransformedHeaders: (headers: IncomingHttpHeaders) => HeaderBag; +export { getTransformedHeaders }; diff --git a/node_modules/@smithy/node-http-handler/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..055c48c2 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./node-http-handler"; +export * from "./node-http2-handler"; +export * from "./stream-collector"; diff --git a/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http-handler.d.ts b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http-handler.d.ts new file mode 100644 index 00000000..eb1da7be --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http-handler.d.ts @@ -0,0 +1,46 @@ +/// +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { Logger, NodeHttpHandlerOptions } from "@smithy/types"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +import { Agent as hAgent } from "http"; +import { Agent as hsAgent } from "https"; +export { NodeHttpHandlerOptions }; +/** + * @public + * A default of 0 means no timeout. + */ +export declare const DEFAULT_REQUEST_TIMEOUT = 0; +/** + * @public + * A request handler that uses the Node.js http and https modules. + */ +export declare class NodeHttpHandler implements HttpHandler { + private config?; + private configProvider; + private socketWarningTimestamp; + readonly metadata: { + handlerProtocol: string; + }; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | NodeHttpHandlerOptions | Provider): NodeHttpHandler | HttpHandler; + /** + * @internal + * + * @param agent - http(s) agent in use by the NodeHttpHandler instance. + * @param socketWarningTimestamp - last socket usage check timestamp. + * @param logger - channel for the warning. + * @returns timestamp of last emitted warning. + */ + static checkSocketUsage(agent: hAgent | hsAgent, socketWarningTimestamp: number, logger?: Logger): number; + constructor(options?: NodeHttpHandlerOptions | Provider); + private resolveDefaultConfig; + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof NodeHttpHandlerOptions, value: NodeHttpHandlerOptions[typeof key]): void; + httpHandlerConfigs(): NodeHttpHandlerOptions; +} diff --git a/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-manager.d.ts b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-manager.d.ts new file mode 100644 index 00000000..8aa87c1e --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-manager.d.ts @@ -0,0 +1,25 @@ +/// +import { RequestContext } from "@smithy/types"; +import { ConnectConfiguration } from "@smithy/types"; +import { ConnectionManager, ConnectionManagerConfiguration } from "@smithy/types"; +import { ClientHttp2Session } from "http2"; +/** + * @public + */ +export declare class NodeHttp2ConnectionManager implements ConnectionManager { + constructor(config: ConnectionManagerConfiguration); + private config; + private readonly sessionCache; + lease(requestContext: RequestContext, connectionConfiguration: ConnectConfiguration): ClientHttp2Session; + /** + * Delete a session from the connection pool. + * @param authority The authority of the session to delete. + * @param session The session to delete. + */ + deleteSession(authority: string, session: ClientHttp2Session): void; + release(requestContext: RequestContext, session: ClientHttp2Session): void; + destroy(): void; + setMaxConcurrentStreams(maxConcurrentStreams: number): void; + setDisableConcurrentStreams(disableConcurrentStreams: boolean): void; + private getUrlString; +} diff --git a/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-pool.d.ts b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-pool.d.ts new file mode 100644 index 00000000..e9116cb3 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-pool.d.ts @@ -0,0 +1,13 @@ +/// +import { ConnectionPool } from "@smithy/types"; +import { ClientHttp2Session } from "http2"; +export declare class NodeHttp2ConnectionPool implements ConnectionPool { + private sessions; + constructor(sessions?: ClientHttp2Session[]); + poll(): ClientHttp2Session | void; + offerLast(session: ClientHttp2Session): void; + contains(session: ClientHttp2Session): boolean; + remove(session: ClientHttp2Session): void; + [Symbol.iterator](): IterableIterator; + destroy(connection: ClientHttp2Session): void; +} diff --git a/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-handler.d.ts b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-handler.d.ts new file mode 100644 index 00000000..eaa24bdf --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-handler.d.ts @@ -0,0 +1,62 @@ +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +/** + * Represents the http2 options that can be passed to a node http2 client. + * @public + */ +export interface NodeHttp2HandlerOptions { + /** + * The maximum time in milliseconds that a stream may remain idle before it + * is closed. + */ + requestTimeout?: number; + /** + * The maximum time in milliseconds that a session or socket may remain idle + * before it is closed. + * https://nodejs.org/docs/latest-v12.x/api/http2.html#http2_http2session_and_sockets + */ + sessionTimeout?: number; + /** + * Disables processing concurrent streams on a ClientHttp2Session instance. When set + * to true, a new session instance is created for each request to a URL. + * **Default:** false. + * https://nodejs.org/api/http2.html#http2_class_clienthttp2session + */ + disableConcurrentStreams?: boolean; + /** + * Maximum number of concurrent Http2Stream instances per ClientHttp2Session. Each session + * may have up to 2^31-1 Http2Stream instances over its lifetime. + * This value must be greater than or equal to 0. + * https://nodejs.org/api/http2.html#class-http2stream + */ + maxConcurrentStreams?: number; +} +/** + * A request handler using the node:http2 package. + * @public + */ +export declare class NodeHttp2Handler implements HttpHandler { + private config?; + private configProvider; + readonly metadata: { + handlerProtocol: string; + }; + private readonly connectionManager; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | NodeHttp2HandlerOptions | Provider): HttpHandler | NodeHttp2Handler; + constructor(options?: NodeHttp2HandlerOptions | Provider); + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof NodeHttp2HandlerOptions, value: NodeHttp2HandlerOptions[typeof key]): void; + httpHandlerConfigs(): NodeHttp2HandlerOptions; + /** + * Destroys a session. + * @param session - the session to destroy. + */ + private destroySession; +} diff --git a/node_modules/@smithy/node-http-handler/dist-types/ts3.4/readable.mock.d.ts b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/readable.mock.d.ts new file mode 100644 index 00000000..f0492d42 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/readable.mock.d.ts @@ -0,0 +1,13 @@ +/// +import { Readable, ReadableOptions } from "stream"; +export interface ReadFromBuffersOptions extends ReadableOptions { + buffers: Buffer[]; + errorAfter?: number; +} +export declare class ReadFromBuffers extends Readable { + private buffersToRead; + private numBuffersRead; + private errorAfter; + constructor(options: ReadFromBuffersOptions); + _read(): boolean | undefined; +} diff --git a/node_modules/@smithy/node-http-handler/dist-types/ts3.4/server.mock.d.ts b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/server.mock.d.ts new file mode 100644 index 00000000..6a7e350f --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/server.mock.d.ts @@ -0,0 +1,12 @@ +import { HttpResponse } from "@smithy/types"; +import { IncomingMessage, Server as HttpServer, ServerResponse } from "http"; +import { Http2Server } from "http2"; +import { Server as HttpsServer } from "https"; +export declare const createResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createResponseFunctionWithDelay: (httpResp: HttpResponse, delay: number) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createContinueResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createMockHttpsServer: () => HttpsServer; +export declare const createMockHttpServer: () => HttpServer; +export declare const createMockHttp2Server: () => Http2Server; +export declare const createMirrorResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const getResponseBody: (response: HttpResponse) => Promise; diff --git a/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-connection-timeout.d.ts b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-connection-timeout.d.ts new file mode 100644 index 00000000..96cdb665 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-connection-timeout.d.ts @@ -0,0 +1,3 @@ +/// +import { ClientRequest } from "http"; +export declare const setConnectionTimeout: (request: ClientRequest, reject: (err: Error) => void, timeoutInMs?: number) => NodeJS.Timeout | number; diff --git a/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-keep-alive.d.ts b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-keep-alive.d.ts new file mode 100644 index 00000000..3bb6ec07 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-keep-alive.d.ts @@ -0,0 +1,7 @@ +/// +import { ClientRequest } from "http"; +export interface SocketKeepAliveOptions { + keepAlive: boolean; + keepAliveMsecs?: number; +} +export declare const setSocketKeepAlive: (request: ClientRequest, { keepAlive, keepAliveMsecs }: SocketKeepAliveOptions, deferTimeMs?: number) => NodeJS.Timeout | number; diff --git a/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-timeout.d.ts b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-timeout.d.ts new file mode 100644 index 00000000..57f87434 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-timeout.d.ts @@ -0,0 +1,3 @@ +/// +import { ClientRequest } from "http"; +export declare const setSocketTimeout: (request: ClientRequest, reject: (err: Error) => void, timeoutInMs?: number) => NodeJS.Timeout | number; diff --git a/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/collector.d.ts b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/collector.d.ts new file mode 100644 index 00000000..c329bd4a --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/collector.d.ts @@ -0,0 +1,6 @@ +/// +import { Writable } from "stream"; +export declare class Collector extends Writable { + readonly bufferedBytes: Buffer[]; + _write(chunk: Buffer, encoding: string, callback: (err?: Error) => void): void; +} diff --git a/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/index.d.ts b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/index.d.ts new file mode 100644 index 00000000..1022a17f --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/index.d.ts @@ -0,0 +1,6 @@ +import { StreamCollector } from "@smithy/types"; +/** + * @internal + * Converts a stream to a byte array. + */ +export declare const streamCollector: StreamCollector; diff --git a/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/readable.mock.d.ts b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/readable.mock.d.ts new file mode 100644 index 00000000..e2c0a4c8 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/readable.mock.d.ts @@ -0,0 +1,13 @@ +/// +import { Readable, ReadableOptions } from "stream"; +export interface ReadFromBuffersOptions extends ReadableOptions { + buffers: Buffer[]; + errorAfter?: number; +} +export declare class ReadFromBuffers extends Readable { + private buffersToRead; + private numBuffersRead; + private errorAfter; + constructor(options: ReadFromBuffersOptions); + _read(size: number): boolean | undefined; +} diff --git a/node_modules/@smithy/node-http-handler/dist-types/ts3.4/timing.d.ts b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/timing.d.ts new file mode 100644 index 00000000..c88dd2fa --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/timing.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * For test spies. + */ +export declare const timing: { + setTimeout: (cb: (...ignored: any[]) => void | unknown, ms?: number) => number; + clearTimeout: (timeoutId: string | number | undefined | unknown) => void; +}; diff --git a/node_modules/@smithy/node-http-handler/dist-types/ts3.4/write-request-body.d.ts b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/write-request-body.d.ts new file mode 100644 index 00000000..0f13e96b --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/ts3.4/write-request-body.d.ts @@ -0,0 +1,12 @@ +/// +import { HttpRequest } from "@smithy/types"; +import { ClientRequest } from "http"; +import { ClientHttp2Stream } from "http2"; +/** + * This resolves when writeBody has been called. + * + * @param httpRequest - opened Node.js request. + * @param request - container with the request body. + * @param maxContinueTimeoutMs - time to wait for the continue event. + */ +export declare function writeRequestBody(httpRequest: ClientRequest | ClientHttp2Stream, request: HttpRequest, maxContinueTimeoutMs?: number): Promise; diff --git a/node_modules/@smithy/node-http-handler/dist-types/write-request-body.d.ts b/node_modules/@smithy/node-http-handler/dist-types/write-request-body.d.ts new file mode 100644 index 00000000..0c49e327 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/dist-types/write-request-body.d.ts @@ -0,0 +1,13 @@ +/// +/// +import { HttpRequest } from "@smithy/types"; +import { ClientRequest } from "http"; +import { ClientHttp2Stream } from "http2"; +/** + * This resolves when writeBody has been called. + * + * @param httpRequest - opened Node.js request. + * @param request - container with the request body. + * @param maxContinueTimeoutMs - time to wait for the continue event. + */ +export declare function writeRequestBody(httpRequest: ClientRequest | ClientHttp2Stream, request: HttpRequest, maxContinueTimeoutMs?: number): Promise; diff --git a/node_modules/@smithy/node-http-handler/package.json b/node_modules/@smithy/node-http-handler/package.json new file mode 100644 index 00000000..2e4e1e33 --- /dev/null +++ b/node_modules/@smithy/node-http-handler/package.json @@ -0,0 +1,67 @@ +{ + "name": "@smithy/node-http-handler", + "version": "4.0.4", + "description": "Provides a way to make requests", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline node-http-handler", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/node-http-handler", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/node-http-handler" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/property-provider/LICENSE b/node_modules/@smithy/property-provider/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/property-provider/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/property-provider/README.md b/node_modules/@smithy/property-provider/README.md new file mode 100644 index 00000000..b35fafb7 --- /dev/null +++ b/node_modules/@smithy/property-provider/README.md @@ -0,0 +1,10 @@ +# @smithy/property-provider + +[![NPM version](https://img.shields.io/npm/v/@smithy/property-provider/latest.svg)](https://www.npmjs.com/package/@smithy/property-provider) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/property-provider.svg)](https://www.npmjs.com/package/@smithy/property-provider) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/property-provider/dist-cjs/CredentialsProviderError.js b/node_modules/@smithy/property-provider/dist-cjs/CredentialsProviderError.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-cjs/CredentialsProviderError.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/property-provider/dist-cjs/ProviderError.js b/node_modules/@smithy/property-provider/dist-cjs/ProviderError.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-cjs/ProviderError.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/property-provider/dist-cjs/TokenProviderError.js b/node_modules/@smithy/property-provider/dist-cjs/TokenProviderError.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-cjs/TokenProviderError.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/property-provider/dist-cjs/chain.js b/node_modules/@smithy/property-provider/dist-cjs/chain.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-cjs/chain.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/property-provider/dist-cjs/fromStatic.js b/node_modules/@smithy/property-provider/dist-cjs/fromStatic.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-cjs/fromStatic.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/property-provider/dist-cjs/index.js b/node_modules/@smithy/property-provider/dist-cjs/index.js new file mode 100644 index 00000000..b0fa627d --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-cjs/index.js @@ -0,0 +1,170 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + CredentialsProviderError: () => CredentialsProviderError, + ProviderError: () => ProviderError, + TokenProviderError: () => TokenProviderError, + chain: () => chain, + fromStatic: () => fromStatic, + memoize: () => memoize +}); +module.exports = __toCommonJS(src_exports); + +// src/ProviderError.ts +var ProviderError = class _ProviderError extends Error { + constructor(message, options = true) { + let logger; + let tryNextLink = true; + if (typeof options === "boolean") { + logger = void 0; + tryNextLink = options; + } else if (options != null && typeof options === "object") { + logger = options.logger; + tryNextLink = options.tryNextLink ?? true; + } + super(message); + this.name = "ProviderError"; + this.tryNextLink = tryNextLink; + Object.setPrototypeOf(this, _ProviderError.prototype); + logger?.debug?.(`@smithy/property-provider ${tryNextLink ? "->" : "(!)"} ${message}`); + } + static { + __name(this, "ProviderError"); + } + /** + * @deprecated use new operator. + */ + static from(error, options = true) { + return Object.assign(new this(error.message, options), error); + } +}; + +// src/CredentialsProviderError.ts +var CredentialsProviderError = class _CredentialsProviderError extends ProviderError { + /** + * @override + */ + constructor(message, options = true) { + super(message, options); + this.name = "CredentialsProviderError"; + Object.setPrototypeOf(this, _CredentialsProviderError.prototype); + } + static { + __name(this, "CredentialsProviderError"); + } +}; + +// src/TokenProviderError.ts +var TokenProviderError = class _TokenProviderError extends ProviderError { + /** + * @override + */ + constructor(message, options = true) { + super(message, options); + this.name = "TokenProviderError"; + Object.setPrototypeOf(this, _TokenProviderError.prototype); + } + static { + __name(this, "TokenProviderError"); + } +}; + +// src/chain.ts +var chain = /* @__PURE__ */ __name((...providers) => async () => { + if (providers.length === 0) { + throw new ProviderError("No providers in chain"); + } + let lastProviderError; + for (const provider of providers) { + try { + const credentials = await provider(); + return credentials; + } catch (err) { + lastProviderError = err; + if (err?.tryNextLink) { + continue; + } + throw err; + } + } + throw lastProviderError; +}, "chain"); + +// src/fromStatic.ts +var fromStatic = /* @__PURE__ */ __name((staticValue) => () => Promise.resolve(staticValue), "fromStatic"); + +// src/memoize.ts +var memoize = /* @__PURE__ */ __name((provider, isExpired, requiresRefresh) => { + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = /* @__PURE__ */ __name(async () => { + if (!pending) { + pending = provider(); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } finally { + pending = void 0; + } + return resolved; + }, "coalesceProvider"); + if (isExpired === void 0) { + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(); + } + if (isConstant) { + return resolved; + } + if (requiresRefresh && !requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(); + return resolved; + } + return resolved; + }; +}, "memoize"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + CredentialsProviderError, + ProviderError, + TokenProviderError, + chain, + fromStatic, + memoize +}); + diff --git a/node_modules/@smithy/property-provider/dist-cjs/memoize.js b/node_modules/@smithy/property-provider/dist-cjs/memoize.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-cjs/memoize.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/property-provider/dist-es/CredentialsProviderError.js b/node_modules/@smithy/property-provider/dist-es/CredentialsProviderError.js new file mode 100644 index 00000000..cec1f9e0 --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-es/CredentialsProviderError.js @@ -0,0 +1,8 @@ +import { ProviderError } from "./ProviderError"; +export class CredentialsProviderError extends ProviderError { + constructor(message, options = true) { + super(message, options); + this.name = "CredentialsProviderError"; + Object.setPrototypeOf(this, CredentialsProviderError.prototype); + } +} diff --git a/node_modules/@smithy/property-provider/dist-es/ProviderError.js b/node_modules/@smithy/property-provider/dist-es/ProviderError.js new file mode 100644 index 00000000..e0db2b81 --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-es/ProviderError.js @@ -0,0 +1,22 @@ +export class ProviderError extends Error { + constructor(message, options = true) { + let logger; + let tryNextLink = true; + if (typeof options === "boolean") { + logger = undefined; + tryNextLink = options; + } + else if (options != null && typeof options === "object") { + logger = options.logger; + tryNextLink = options.tryNextLink ?? true; + } + super(message); + this.name = "ProviderError"; + this.tryNextLink = tryNextLink; + Object.setPrototypeOf(this, ProviderError.prototype); + logger?.debug?.(`@smithy/property-provider ${tryNextLink ? "->" : "(!)"} ${message}`); + } + static from(error, options = true) { + return Object.assign(new this(error.message, options), error); + } +} diff --git a/node_modules/@smithy/property-provider/dist-es/TokenProviderError.js b/node_modules/@smithy/property-provider/dist-es/TokenProviderError.js new file mode 100644 index 00000000..f0e75b1c --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-es/TokenProviderError.js @@ -0,0 +1,8 @@ +import { ProviderError } from "./ProviderError"; +export class TokenProviderError extends ProviderError { + constructor(message, options = true) { + super(message, options); + this.name = "TokenProviderError"; + Object.setPrototypeOf(this, TokenProviderError.prototype); + } +} diff --git a/node_modules/@smithy/property-provider/dist-es/chain.js b/node_modules/@smithy/property-provider/dist-es/chain.js new file mode 100644 index 00000000..c389f7fe --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-es/chain.js @@ -0,0 +1,21 @@ +import { ProviderError } from "./ProviderError"; +export const chain = (...providers) => async () => { + if (providers.length === 0) { + throw new ProviderError("No providers in chain"); + } + let lastProviderError; + for (const provider of providers) { + try { + const credentials = await provider(); + return credentials; + } + catch (err) { + lastProviderError = err; + if (err?.tryNextLink) { + continue; + } + throw err; + } + } + throw lastProviderError; +}; diff --git a/node_modules/@smithy/property-provider/dist-es/fromStatic.js b/node_modules/@smithy/property-provider/dist-es/fromStatic.js new file mode 100644 index 00000000..67da7a75 --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-es/fromStatic.js @@ -0,0 +1 @@ +export const fromStatic = (staticValue) => () => Promise.resolve(staticValue); diff --git a/node_modules/@smithy/property-provider/dist-es/index.js b/node_modules/@smithy/property-provider/dist-es/index.js new file mode 100644 index 00000000..15d14e5b --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-es/index.js @@ -0,0 +1,6 @@ +export * from "./CredentialsProviderError"; +export * from "./ProviderError"; +export * from "./TokenProviderError"; +export * from "./chain"; +export * from "./fromStatic"; +export * from "./memoize"; diff --git a/node_modules/@smithy/property-provider/dist-es/memoize.js b/node_modules/@smithy/property-provider/dist-es/memoize.js new file mode 100644 index 00000000..e04839ab --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-es/memoize.js @@ -0,0 +1,45 @@ +export const memoize = (provider, isExpired, requiresRefresh) => { + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = async () => { + if (!pending) { + pending = provider(); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } + finally { + pending = undefined; + } + return resolved; + }; + if (isExpired === undefined) { + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(); + } + if (isConstant) { + return resolved; + } + if (requiresRefresh && !requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(); + return resolved; + } + return resolved; + }; +}; diff --git a/node_modules/@smithy/property-provider/dist-types/CredentialsProviderError.d.ts b/node_modules/@smithy/property-provider/dist-types/CredentialsProviderError.d.ts new file mode 100644 index 00000000..7955dc16 --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-types/CredentialsProviderError.d.ts @@ -0,0 +1,30 @@ +import { ProviderError, ProviderErrorOptionsType } from "./ProviderError"; +/** + * @public + * + * An error representing a failure of an individual credential provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class CredentialsProviderError extends ProviderError { + name: string; + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * @override + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); +} diff --git a/node_modules/@smithy/property-provider/dist-types/ProviderError.d.ts b/node_modules/@smithy/property-provider/dist-types/ProviderError.d.ts new file mode 100644 index 00000000..b87b014f --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-types/ProviderError.d.ts @@ -0,0 +1,39 @@ +import { Logger } from "@smithy/types"; +/** + * @public + */ +export type ProviderErrorOptionsType = { + tryNextLink?: boolean | undefined; + logger?: Logger; +}; +/** + * @public + * + * An error representing a failure of an individual provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class ProviderError extends Error { + name: string; + readonly tryNextLink: boolean; + /** + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); + /** + * @deprecated use new operator. + */ + static from(error: Error, options?: boolean | ProviderErrorOptionsType): ProviderError; +} diff --git a/node_modules/@smithy/property-provider/dist-types/TokenProviderError.d.ts b/node_modules/@smithy/property-provider/dist-types/TokenProviderError.d.ts new file mode 100644 index 00000000..a2f9dd6d --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-types/TokenProviderError.d.ts @@ -0,0 +1,30 @@ +import { ProviderError, ProviderErrorOptionsType } from "./ProviderError"; +/** + * @public + * + * An error representing a failure of an individual token provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class TokenProviderError extends ProviderError { + name: string; + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * @override + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); +} diff --git a/node_modules/@smithy/property-provider/dist-types/chain.d.ts b/node_modules/@smithy/property-provider/dist-types/chain.d.ts new file mode 100644 index 00000000..168df5c0 --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-types/chain.d.ts @@ -0,0 +1,13 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * Compose a single credential provider function from multiple credential + * providers. The first provider in the argument list will always be invoked; + * subsequent providers in the list will be invoked in the order in which the + * were received if the preceding provider did not successfully resolve. + * + * If no providers were received or no provider resolves successfully, the + * returned promise will be rejected. + */ +export declare const chain: (...providers: Provider[]) => Provider; diff --git a/node_modules/@smithy/property-provider/dist-types/fromStatic.d.ts b/node_modules/@smithy/property-provider/dist-types/fromStatic.d.ts new file mode 100644 index 00000000..f58bece9 --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-types/fromStatic.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const fromStatic: (staticValue: T) => Provider; diff --git a/node_modules/@smithy/property-provider/dist-types/index.d.ts b/node_modules/@smithy/property-provider/dist-types/index.d.ts new file mode 100644 index 00000000..6326994c --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-types/index.d.ts @@ -0,0 +1,24 @@ +/** + * @internal + */ +export * from "./CredentialsProviderError"; +/** + * @internal + */ +export * from "./ProviderError"; +/** + * @internal + */ +export * from "./TokenProviderError"; +/** + * @internal + */ +export * from "./chain"; +/** + * @internal + */ +export * from "./fromStatic"; +/** + * @internal + */ +export * from "./memoize"; diff --git a/node_modules/@smithy/property-provider/dist-types/memoize.d.ts b/node_modules/@smithy/property-provider/dist-types/memoize.d.ts new file mode 100644 index 00000000..ce197c0f --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-types/memoize.d.ts @@ -0,0 +1,40 @@ +import { MemoizedProvider, Provider } from "@smithy/types"; +interface MemoizeOverload { + /** + * + * Decorates a provider function with either static memoization. + * + * To create a statically memoized provider, supply a provider as the only + * argument to this function. The provider will be invoked once, and all + * invocations of the provider returned by `memoize` will return the same + * promise object. + * + * @param provider The provider whose result should be cached indefinitely. + */ + (provider: Provider): MemoizedProvider; + /** + * Decorates a provider function with refreshing memoization. + * + * @param provider The provider whose result should be cached. + * @param isExpired A function that will evaluate the resolved value and + * determine if it is expired. For example, when + * memoizing AWS credential providers, this function + * should return `true` when the credential's + * expiration is in the past (or very near future) and + * `false` otherwise. + * @param requiresRefresh A function that will evaluate the resolved value and + * determine if it represents static value or one that + * will eventually need to be refreshed. For example, + * AWS credentials that have no defined expiration will + * never need to be refreshed, so this function would + * return `true` if the credentials resolved by the + * underlying provider had an expiration and `false` + * otherwise. + */ + (provider: Provider, isExpired: (resolved: T) => boolean, requiresRefresh?: (resolved: T) => boolean): MemoizedProvider; +} +/** + * @internal + */ +export declare const memoize: MemoizeOverload; +export {}; diff --git a/node_modules/@smithy/property-provider/dist-types/ts3.4/CredentialsProviderError.d.ts b/node_modules/@smithy/property-provider/dist-types/ts3.4/CredentialsProviderError.d.ts new file mode 100644 index 00000000..11e4aea4 --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-types/ts3.4/CredentialsProviderError.d.ts @@ -0,0 +1,30 @@ +import { ProviderError, ProviderErrorOptionsType } from "./ProviderError"; +/** + * @public + * + * An error representing a failure of an individual credential provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class CredentialsProviderError extends ProviderError { + name: string; + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * @override + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); +} diff --git a/node_modules/@smithy/property-provider/dist-types/ts3.4/ProviderError.d.ts b/node_modules/@smithy/property-provider/dist-types/ts3.4/ProviderError.d.ts new file mode 100644 index 00000000..daf499ce --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-types/ts3.4/ProviderError.d.ts @@ -0,0 +1,39 @@ +import { Logger } from "@smithy/types"; +/** + * @public + */ +export type ProviderErrorOptionsType = { + tryNextLink?: boolean | undefined; + logger?: Logger; +}; +/** + * @public + * + * An error representing a failure of an individual provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class ProviderError extends Error { + name: string; + readonly tryNextLink: boolean; + /** + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); + /** + * @deprecated use new operator. + */ + static from(error: Error, options?: boolean | ProviderErrorOptionsType): ProviderError; +} diff --git a/node_modules/@smithy/property-provider/dist-types/ts3.4/TokenProviderError.d.ts b/node_modules/@smithy/property-provider/dist-types/ts3.4/TokenProviderError.d.ts new file mode 100644 index 00000000..6f67fd5b --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-types/ts3.4/TokenProviderError.d.ts @@ -0,0 +1,30 @@ +import { ProviderError, ProviderErrorOptionsType } from "./ProviderError"; +/** + * @public + * + * An error representing a failure of an individual token provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class TokenProviderError extends ProviderError { + name: string; + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * @override + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); +} diff --git a/node_modules/@smithy/property-provider/dist-types/ts3.4/chain.d.ts b/node_modules/@smithy/property-provider/dist-types/ts3.4/chain.d.ts new file mode 100644 index 00000000..44390b8a --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-types/ts3.4/chain.d.ts @@ -0,0 +1,13 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * Compose a single credential provider function from multiple credential + * providers. The first provider in the argument list will always be invoked; + * subsequent providers in the list will be invoked in the order in which the + * were received if the preceding provider did not successfully resolve. + * + * If no providers were received or no provider resolves successfully, the + * returned promise will be rejected. + */ +export declare const chain: (...providers: Provider[]) => Provider; diff --git a/node_modules/@smithy/property-provider/dist-types/ts3.4/fromStatic.d.ts b/node_modules/@smithy/property-provider/dist-types/ts3.4/fromStatic.d.ts new file mode 100644 index 00000000..0df63094 --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-types/ts3.4/fromStatic.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const fromStatic: (staticValue: T) => Provider; diff --git a/node_modules/@smithy/property-provider/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/property-provider/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..e28099db --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-types/ts3.4/index.d.ts @@ -0,0 +1,24 @@ +/** + * @internal + */ +export * from "./CredentialsProviderError"; +/** + * @internal + */ +export * from "./ProviderError"; +/** + * @internal + */ +export * from "./TokenProviderError"; +/** + * @internal + */ +export * from "./chain"; +/** + * @internal + */ +export * from "./fromStatic"; +/** + * @internal + */ +export * from "./memoize"; diff --git a/node_modules/@smithy/property-provider/dist-types/ts3.4/memoize.d.ts b/node_modules/@smithy/property-provider/dist-types/ts3.4/memoize.d.ts new file mode 100644 index 00000000..29ce53d5 --- /dev/null +++ b/node_modules/@smithy/property-provider/dist-types/ts3.4/memoize.d.ts @@ -0,0 +1,40 @@ +import { MemoizedProvider, Provider } from "@smithy/types"; +interface MemoizeOverload { + /** + * + * Decorates a provider function with either static memoization. + * + * To create a statically memoized provider, supply a provider as the only + * argument to this function. The provider will be invoked once, and all + * invocations of the provider returned by `memoize` will return the same + * promise object. + * + * @param provider The provider whose result should be cached indefinitely. + */ + (provider: Provider): MemoizedProvider; + /** + * Decorates a provider function with refreshing memoization. + * + * @param provider The provider whose result should be cached. + * @param isExpired A function that will evaluate the resolved value and + * determine if it is expired. For example, when + * memoizing AWS credential providers, this function + * should return `true` when the credential's + * expiration is in the past (or very near future) and + * `false` otherwise. + * @param requiresRefresh A function that will evaluate the resolved value and + * determine if it represents static value or one that + * will eventually need to be refreshed. For example, + * AWS credentials that have no defined expiration will + * never need to be refreshed, so this function would + * return `true` if the credentials resolved by the + * underlying provider had an expiration and `false` + * otherwise. + */ + (provider: Provider, isExpired: (resolved: T) => boolean, requiresRefresh?: (resolved: T) => boolean): MemoizedProvider; +} +/** + * @internal + */ +export declare const memoize: MemoizeOverload; +export {}; diff --git a/node_modules/@smithy/property-provider/package.json b/node_modules/@smithy/property-provider/package.json new file mode 100644 index 00000000..b2e7fc6e --- /dev/null +++ b/node_modules/@smithy/property-provider/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/property-provider", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline property-provider", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/property-provider", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/property-provider" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/protocol-http/LICENSE b/node_modules/@smithy/protocol-http/LICENSE new file mode 100644 index 00000000..e907b586 --- /dev/null +++ b/node_modules/@smithy/protocol-http/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@smithy/protocol-http/README.md b/node_modules/@smithy/protocol-http/README.md new file mode 100644 index 00000000..a547ab08 --- /dev/null +++ b/node_modules/@smithy/protocol-http/README.md @@ -0,0 +1,4 @@ +# @smithy/protocol-http + +[![NPM version](https://img.shields.io/npm/v/@smithy/protocol-http/latest.svg)](https://www.npmjs.com/package/@smithy/protocol-http) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/protocol-http.svg)](https://www.npmjs.com/package/@smithy/protocol-http) diff --git a/node_modules/@smithy/protocol-http/dist-cjs/Field.js b/node_modules/@smithy/protocol-http/dist-cjs/Field.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-cjs/Field.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/protocol-http/dist-cjs/Fields.js b/node_modules/@smithy/protocol-http/dist-cjs/Fields.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-cjs/Fields.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/protocol-http/dist-cjs/extensions/httpExtensionConfiguration.js b/node_modules/@smithy/protocol-http/dist-cjs/extensions/httpExtensionConfiguration.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-cjs/extensions/httpExtensionConfiguration.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/protocol-http/dist-cjs/extensions/index.js b/node_modules/@smithy/protocol-http/dist-cjs/extensions/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-cjs/extensions/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/protocol-http/dist-cjs/httpHandler.js b/node_modules/@smithy/protocol-http/dist-cjs/httpHandler.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-cjs/httpHandler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/protocol-http/dist-cjs/httpRequest.js b/node_modules/@smithy/protocol-http/dist-cjs/httpRequest.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-cjs/httpRequest.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/protocol-http/dist-cjs/httpResponse.js b/node_modules/@smithy/protocol-http/dist-cjs/httpResponse.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-cjs/httpResponse.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/protocol-http/dist-cjs/index.js b/node_modules/@smithy/protocol-http/dist-cjs/index.js new file mode 100644 index 00000000..df371090 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-cjs/index.js @@ -0,0 +1,262 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Field: () => Field, + Fields: () => Fields, + HttpRequest: () => HttpRequest, + HttpResponse: () => HttpResponse, + IHttpRequest: () => import_types.HttpRequest, + getHttpHandlerExtensionConfiguration: () => getHttpHandlerExtensionConfiguration, + isValidHostname: () => isValidHostname, + resolveHttpHandlerRuntimeConfig: () => resolveHttpHandlerRuntimeConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/extensions/httpExtensionConfiguration.ts +var getHttpHandlerExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return { + setHttpHandler(handler) { + runtimeConfig.httpHandler = handler; + }, + httpHandler() { + return runtimeConfig.httpHandler; + }, + updateHttpClientConfig(key, value) { + runtimeConfig.httpHandler?.updateHttpClientConfig(key, value); + }, + httpHandlerConfigs() { + return runtimeConfig.httpHandler.httpHandlerConfigs(); + } + }; +}, "getHttpHandlerExtensionConfiguration"); +var resolveHttpHandlerRuntimeConfig = /* @__PURE__ */ __name((httpHandlerExtensionConfiguration) => { + return { + httpHandler: httpHandlerExtensionConfiguration.httpHandler() + }; +}, "resolveHttpHandlerRuntimeConfig"); + +// src/Field.ts +var import_types = require("@smithy/types"); +var Field = class { + static { + __name(this, "Field"); + } + constructor({ name, kind = import_types.FieldPosition.HEADER, values = [] }) { + this.name = name; + this.kind = kind; + this.values = values; + } + /** + * Appends a value to the field. + * + * @param value The value to append. + */ + add(value) { + this.values.push(value); + } + /** + * Overwrite existing field values. + * + * @param values The new field values. + */ + set(values) { + this.values = values; + } + /** + * Remove all matching entries from list. + * + * @param value Value to remove. + */ + remove(value) { + this.values = this.values.filter((v) => v !== value); + } + /** + * Get comma-delimited string. + * + * @returns String representation of {@link Field}. + */ + toString() { + return this.values.map((v) => v.includes(",") || v.includes(" ") ? `"${v}"` : v).join(", "); + } + /** + * Get string values as a list + * + * @returns Values in {@link Field} as a list. + */ + get() { + return this.values; + } +}; + +// src/Fields.ts +var Fields = class { + constructor({ fields = [], encoding = "utf-8" }) { + this.entries = {}; + fields.forEach(this.setField.bind(this)); + this.encoding = encoding; + } + static { + __name(this, "Fields"); + } + /** + * Set entry for a {@link Field} name. The `name` + * attribute will be used to key the collection. + * + * @param field The {@link Field} to set. + */ + setField(field) { + this.entries[field.name.toLowerCase()] = field; + } + /** + * Retrieve {@link Field} entry by name. + * + * @param name The name of the {@link Field} entry + * to retrieve + * @returns The {@link Field} if it exists. + */ + getField(name) { + return this.entries[name.toLowerCase()]; + } + /** + * Delete entry from collection. + * + * @param name Name of the entry to delete. + */ + removeField(name) { + delete this.entries[name.toLowerCase()]; + } + /** + * Helper function for retrieving specific types of fields. + * Used to grab all headers or all trailers. + * + * @param kind {@link FieldPosition} of entries to retrieve. + * @returns The {@link Field} entries with the specified + * {@link FieldPosition}. + */ + getByType(kind) { + return Object.values(this.entries).filter((field) => field.kind === kind); + } +}; + +// src/httpRequest.ts + +var HttpRequest = class _HttpRequest { + static { + __name(this, "HttpRequest"); + } + constructor(options) { + this.method = options.method || "GET"; + this.hostname = options.hostname || "localhost"; + this.port = options.port; + this.query = options.query || {}; + this.headers = options.headers || {}; + this.body = options.body; + this.protocol = options.protocol ? options.protocol.slice(-1) !== ":" ? `${options.protocol}:` : options.protocol : "https:"; + this.path = options.path ? options.path.charAt(0) !== "/" ? `/${options.path}` : options.path : "/"; + this.username = options.username; + this.password = options.password; + this.fragment = options.fragment; + } + /** + * Note: this does not deep-clone the body. + */ + static clone(request) { + const cloned = new _HttpRequest({ + ...request, + headers: { ...request.headers } + }); + if (cloned.query) { + cloned.query = cloneQuery(cloned.query); + } + return cloned; + } + /** + * This method only actually asserts that request is the interface {@link IHttpRequest}, + * and not necessarily this concrete class. Left in place for API stability. + * + * Do not call instance methods on the input of this function, and + * do not assume it has the HttpRequest prototype. + */ + static isInstance(request) { + if (!request) { + return false; + } + const req = request; + return "method" in req && "protocol" in req && "hostname" in req && "path" in req && typeof req["query"] === "object" && typeof req["headers"] === "object"; + } + /** + * @deprecated use static HttpRequest.clone(request) instead. It's not safe to call + * this method because {@link HttpRequest.isInstance} incorrectly + * asserts that IHttpRequest (interface) objects are of type HttpRequest (class). + */ + clone() { + return _HttpRequest.clone(this); + } +}; +function cloneQuery(query) { + return Object.keys(query).reduce((carry, paramName) => { + const param = query[paramName]; + return { + ...carry, + [paramName]: Array.isArray(param) ? [...param] : param + }; + }, {}); +} +__name(cloneQuery, "cloneQuery"); + +// src/httpResponse.ts +var HttpResponse = class { + static { + __name(this, "HttpResponse"); + } + constructor(options) { + this.statusCode = options.statusCode; + this.reason = options.reason; + this.headers = options.headers || {}; + this.body = options.body; + } + static isInstance(response) { + if (!response) + return false; + const resp = response; + return typeof resp.statusCode === "number" && typeof resp.headers === "object"; + } +}; + +// src/isValidHostname.ts +function isValidHostname(hostname) { + const hostPattern = /^[a-z0-9][a-z0-9\.\-]*[a-z0-9]$/; + return hostPattern.test(hostname); +} +__name(isValidHostname, "isValidHostname"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getHttpHandlerExtensionConfiguration, + resolveHttpHandlerRuntimeConfig, + Field, + Fields, + HttpRequest, + HttpResponse, + isValidHostname +}); + diff --git a/node_modules/@smithy/protocol-http/dist-cjs/isValidHostname.js b/node_modules/@smithy/protocol-http/dist-cjs/isValidHostname.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-cjs/isValidHostname.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/protocol-http/dist-cjs/types.js b/node_modules/@smithy/protocol-http/dist-cjs/types.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/protocol-http/dist-es/Field.js b/node_modules/@smithy/protocol-http/dist-es/Field.js new file mode 100644 index 00000000..918c883f --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-es/Field.js @@ -0,0 +1,23 @@ +import { FieldPosition } from "@smithy/types"; +export class Field { + constructor({ name, kind = FieldPosition.HEADER, values = [] }) { + this.name = name; + this.kind = kind; + this.values = values; + } + add(value) { + this.values.push(value); + } + set(values) { + this.values = values; + } + remove(value) { + this.values = this.values.filter((v) => v !== value); + } + toString() { + return this.values.map((v) => (v.includes(",") || v.includes(" ") ? `"${v}"` : v)).join(", "); + } + get() { + return this.values; + } +} diff --git a/node_modules/@smithy/protocol-http/dist-es/Fields.js b/node_modules/@smithy/protocol-http/dist-es/Fields.js new file mode 100644 index 00000000..efa591fa --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-es/Fields.js @@ -0,0 +1,19 @@ +export class Fields { + constructor({ fields = [], encoding = "utf-8" }) { + this.entries = {}; + fields.forEach(this.setField.bind(this)); + this.encoding = encoding; + } + setField(field) { + this.entries[field.name.toLowerCase()] = field; + } + getField(name) { + return this.entries[name.toLowerCase()]; + } + removeField(name) { + delete this.entries[name.toLowerCase()]; + } + getByType(kind) { + return Object.values(this.entries).filter((field) => field.kind === kind); + } +} diff --git a/node_modules/@smithy/protocol-http/dist-es/extensions/httpExtensionConfiguration.js b/node_modules/@smithy/protocol-http/dist-es/extensions/httpExtensionConfiguration.js new file mode 100644 index 00000000..1a5aa0c9 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-es/extensions/httpExtensionConfiguration.js @@ -0,0 +1,21 @@ +export const getHttpHandlerExtensionConfiguration = (runtimeConfig) => { + return { + setHttpHandler(handler) { + runtimeConfig.httpHandler = handler; + }, + httpHandler() { + return runtimeConfig.httpHandler; + }, + updateHttpClientConfig(key, value) { + runtimeConfig.httpHandler?.updateHttpClientConfig(key, value); + }, + httpHandlerConfigs() { + return runtimeConfig.httpHandler.httpHandlerConfigs(); + }, + }; +}; +export const resolveHttpHandlerRuntimeConfig = (httpHandlerExtensionConfiguration) => { + return { + httpHandler: httpHandlerExtensionConfiguration.httpHandler(), + }; +}; diff --git a/node_modules/@smithy/protocol-http/dist-es/extensions/index.js b/node_modules/@smithy/protocol-http/dist-es/extensions/index.js new file mode 100644 index 00000000..a215a4a8 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-es/extensions/index.js @@ -0,0 +1 @@ +export * from "./httpExtensionConfiguration"; diff --git a/node_modules/@smithy/protocol-http/dist-es/httpHandler.js b/node_modules/@smithy/protocol-http/dist-es/httpHandler.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-es/httpHandler.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/protocol-http/dist-es/httpRequest.js b/node_modules/@smithy/protocol-http/dist-es/httpRequest.js new file mode 100644 index 00000000..fd426ab8 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-es/httpRequest.js @@ -0,0 +1,53 @@ +export class HttpRequest { + constructor(options) { + this.method = options.method || "GET"; + this.hostname = options.hostname || "localhost"; + this.port = options.port; + this.query = options.query || {}; + this.headers = options.headers || {}; + this.body = options.body; + this.protocol = options.protocol + ? options.protocol.slice(-1) !== ":" + ? `${options.protocol}:` + : options.protocol + : "https:"; + this.path = options.path ? (options.path.charAt(0) !== "/" ? `/${options.path}` : options.path) : "/"; + this.username = options.username; + this.password = options.password; + this.fragment = options.fragment; + } + static clone(request) { + const cloned = new HttpRequest({ + ...request, + headers: { ...request.headers }, + }); + if (cloned.query) { + cloned.query = cloneQuery(cloned.query); + } + return cloned; + } + static isInstance(request) { + if (!request) { + return false; + } + const req = request; + return ("method" in req && + "protocol" in req && + "hostname" in req && + "path" in req && + typeof req["query"] === "object" && + typeof req["headers"] === "object"); + } + clone() { + return HttpRequest.clone(this); + } +} +function cloneQuery(query) { + return Object.keys(query).reduce((carry, paramName) => { + const param = query[paramName]; + return { + ...carry, + [paramName]: Array.isArray(param) ? [...param] : param, + }; + }, {}); +} diff --git a/node_modules/@smithy/protocol-http/dist-es/httpResponse.js b/node_modules/@smithy/protocol-http/dist-es/httpResponse.js new file mode 100644 index 00000000..75f470f1 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-es/httpResponse.js @@ -0,0 +1,14 @@ +export class HttpResponse { + constructor(options) { + this.statusCode = options.statusCode; + this.reason = options.reason; + this.headers = options.headers || {}; + this.body = options.body; + } + static isInstance(response) { + if (!response) + return false; + const resp = response; + return typeof resp.statusCode === "number" && typeof resp.headers === "object"; + } +} diff --git a/node_modules/@smithy/protocol-http/dist-es/index.js b/node_modules/@smithy/protocol-http/dist-es/index.js new file mode 100644 index 00000000..8ff7f269 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-es/index.js @@ -0,0 +1,8 @@ +export * from "./extensions"; +export * from "./Field"; +export * from "./Fields"; +export * from "./httpHandler"; +export * from "./httpRequest"; +export * from "./httpResponse"; +export * from "./isValidHostname"; +export * from "./types"; diff --git a/node_modules/@smithy/protocol-http/dist-es/isValidHostname.js b/node_modules/@smithy/protocol-http/dist-es/isValidHostname.js new file mode 100644 index 00000000..464c7db5 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-es/isValidHostname.js @@ -0,0 +1,4 @@ +export function isValidHostname(hostname) { + const hostPattern = /^[a-z0-9][a-z0-9\.\-]*[a-z0-9]$/; + return hostPattern.test(hostname); +} diff --git a/node_modules/@smithy/protocol-http/dist-es/types.js b/node_modules/@smithy/protocol-http/dist-es/types.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/protocol-http/dist-types/Field.d.ts b/node_modules/@smithy/protocol-http/dist-types/Field.d.ts new file mode 100644 index 00000000..2d1613af --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/Field.d.ts @@ -0,0 +1,49 @@ +import { FieldOptions, FieldPosition } from "@smithy/types"; +/** + * A name-value pair representing a single field + * transmitted in an HTTP Request or Response. + * + * The kind will dictate metadata placement within + * an HTTP message. + * + * All field names are case insensitive and + * case-variance must be treated as equivalent. + * Names MAY be normalized but SHOULD be preserved + * for accuracy during transmission. + */ +export declare class Field { + readonly name: string; + readonly kind: FieldPosition; + values: string[]; + constructor({ name, kind, values }: FieldOptions); + /** + * Appends a value to the field. + * + * @param value The value to append. + */ + add(value: string): void; + /** + * Overwrite existing field values. + * + * @param values The new field values. + */ + set(values: string[]): void; + /** + * Remove all matching entries from list. + * + * @param value Value to remove. + */ + remove(value: string): void; + /** + * Get comma-delimited string. + * + * @returns String representation of {@link Field}. + */ + toString(): string; + /** + * Get string values as a list + * + * @returns Values in {@link Field} as a list. + */ + get(): string[]; +} diff --git a/node_modules/@smithy/protocol-http/dist-types/Fields.d.ts b/node_modules/@smithy/protocol-http/dist-types/Fields.d.ts new file mode 100644 index 00000000..8915826d --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/Fields.d.ts @@ -0,0 +1,44 @@ +import { FieldPosition } from "@smithy/types"; +import { Field } from "./Field"; +export type FieldsOptions = { + fields?: Field[]; + encoding?: string; +}; +/** + * Collection of Field entries mapped by name. + */ +export declare class Fields { + private readonly entries; + private readonly encoding; + constructor({ fields, encoding }: FieldsOptions); + /** + * Set entry for a {@link Field} name. The `name` + * attribute will be used to key the collection. + * + * @param field The {@link Field} to set. + */ + setField(field: Field): void; + /** + * Retrieve {@link Field} entry by name. + * + * @param name The name of the {@link Field} entry + * to retrieve + * @returns The {@link Field} if it exists. + */ + getField(name: string): Field | undefined; + /** + * Delete entry from collection. + * + * @param name Name of the entry to delete. + */ + removeField(name: string): void; + /** + * Helper function for retrieving specific types of fields. + * Used to grab all headers or all trailers. + * + * @param kind {@link FieldPosition} of entries to retrieve. + * @returns The {@link Field} entries with the specified + * {@link FieldPosition}. + */ + getByType(kind: FieldPosition): Field[]; +} diff --git a/node_modules/@smithy/protocol-http/dist-types/extensions/httpExtensionConfiguration.d.ts b/node_modules/@smithy/protocol-http/dist-types/extensions/httpExtensionConfiguration.d.ts new file mode 100644 index 00000000..bfe452dc --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/extensions/httpExtensionConfiguration.d.ts @@ -0,0 +1,37 @@ +import { HttpHandler } from "../httpHandler"; +/** + * @internal + */ +export interface HttpHandlerExtensionConfiguration { + setHttpHandler(handler: HttpHandler): void; + httpHandler(): HttpHandler; + updateHttpClientConfig(key: keyof HandlerConfig, value: HandlerConfig[typeof key]): void; + httpHandlerConfigs(): HandlerConfig; +} +/** + * @internal + */ +export type HttpHandlerExtensionConfigType = Partial<{ + httpHandler: HttpHandler; +}>; +/** + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getHttpHandlerExtensionConfiguration: (runtimeConfig: Partial<{ + httpHandler: HttpHandler; +}>) => { + setHttpHandler(handler: HttpHandler): void; + httpHandler(): HttpHandler; + updateHttpClientConfig(key: keyof HandlerConfig, value: HandlerConfig[keyof HandlerConfig]): void; + httpHandlerConfigs(): HandlerConfig; +}; +/** + * @internal + * + * Helper function to resolve runtime config from default extension configuration + */ +export declare const resolveHttpHandlerRuntimeConfig: (httpHandlerExtensionConfiguration: HttpHandlerExtensionConfiguration) => Partial<{ + httpHandler: HttpHandler; +}>; diff --git a/node_modules/@smithy/protocol-http/dist-types/extensions/index.d.ts b/node_modules/@smithy/protocol-http/dist-types/extensions/index.d.ts new file mode 100644 index 00000000..a215a4a8 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/extensions/index.d.ts @@ -0,0 +1 @@ +export * from "./httpExtensionConfiguration"; diff --git a/node_modules/@smithy/protocol-http/dist-types/httpHandler.d.ts b/node_modules/@smithy/protocol-http/dist-types/httpHandler.d.ts new file mode 100644 index 00000000..8dc8d32b --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/httpHandler.d.ts @@ -0,0 +1,35 @@ +import type { FetchHttpHandlerOptions, HttpHandlerOptions, NodeHttpHandlerOptions, RequestHandler } from "@smithy/types"; +import type { HttpRequest } from "./httpRequest"; +import type { HttpResponse } from "./httpResponse"; +/** + * @internal + */ +export type HttpHandler = RequestHandler & { + /** + * @internal + */ + updateHttpClientConfig(key: keyof HttpHandlerConfig, value: HttpHandlerConfig[typeof key]): void; + /** + * @internal + */ + httpHandlerConfigs(): HttpHandlerConfig; +}; +/** + * @public + * + * A type representing the accepted user inputs for the `requestHandler` field + * of a client's constructor object. + * + * You may provide an instance of an HttpHandler, or alternatively + * provide the constructor arguments as an object which will be passed + * to the constructor of the default request handler. + * + * The default class constructor to which your arguments will be passed + * varies. The Node.js default is the NodeHttpHandler and the browser/react-native + * default is the FetchHttpHandler. In rarer cases specific clients may be + * configured to use other default implementations such as Websocket or HTTP2. + * + * The fallback type Record is part of the union to allow + * passing constructor params to an unknown requestHandler type. + */ +export type HttpHandlerUserInput = HttpHandler | NodeHttpHandlerOptions | FetchHttpHandlerOptions | Record; diff --git a/node_modules/@smithy/protocol-http/dist-types/httpRequest.d.ts b/node_modules/@smithy/protocol-http/dist-types/httpRequest.d.ts new file mode 100644 index 00000000..8b64ff66 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/httpRequest.d.ts @@ -0,0 +1,55 @@ +import { HeaderBag, HttpMessage, HttpRequest as IHttpRequest, QueryParameterBag, URI } from "@smithy/types"; +type HttpRequestOptions = Partial & Partial & { + method?: string; +}; +/** + * Use the distinct IHttpRequest interface from \@smithy/types instead. + * This should not be used due to + * overlapping with the concrete class' name. + * + * This is not marked deprecated since that would mark the concrete class + * deprecated as well. + * + * @internal + */ +export interface HttpRequest extends IHttpRequest { +} +/** + * @public + */ +export { IHttpRequest }; +/** + * @public + */ +export declare class HttpRequest implements HttpMessage, URI { + method: string; + protocol: string; + hostname: string; + port?: number; + path: string; + query: QueryParameterBag; + headers: HeaderBag; + username?: string; + password?: string; + fragment?: string; + body?: any; + constructor(options: HttpRequestOptions); + /** + * Note: this does not deep-clone the body. + */ + static clone(request: IHttpRequest): HttpRequest; + /** + * This method only actually asserts that request is the interface {@link IHttpRequest}, + * and not necessarily this concrete class. Left in place for API stability. + * + * Do not call instance methods on the input of this function, and + * do not assume it has the HttpRequest prototype. + */ + static isInstance(request: unknown): request is HttpRequest; + /** + * @deprecated use static HttpRequest.clone(request) instead. It's not safe to call + * this method because {@link HttpRequest.isInstance} incorrectly + * asserts that IHttpRequest (interface) objects are of type HttpRequest (class). + */ + clone(): HttpRequest; +} diff --git a/node_modules/@smithy/protocol-http/dist-types/httpResponse.d.ts b/node_modules/@smithy/protocol-http/dist-types/httpResponse.d.ts new file mode 100644 index 00000000..e51f18b9 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/httpResponse.d.ts @@ -0,0 +1,29 @@ +import { HeaderBag, HttpMessage, HttpResponse as IHttpResponse } from "@smithy/types"; +type HttpResponseOptions = Partial & { + statusCode: number; + reason?: string; +}; +/** + * Use the distinct IHttpResponse interface from \@smithy/types instead. + * This should not be used due to + * overlapping with the concrete class' name. + * + * This is not marked deprecated since that would mark the concrete class + * deprecated as well. + * + * @internal + */ +export interface HttpResponse extends IHttpResponse { +} +/** + * @public + */ +export declare class HttpResponse { + statusCode: number; + reason?: string; + headers: HeaderBag; + body?: any; + constructor(options: HttpResponseOptions); + static isInstance(response: unknown): response is HttpResponse; +} +export {}; diff --git a/node_modules/@smithy/protocol-http/dist-types/index.d.ts b/node_modules/@smithy/protocol-http/dist-types/index.d.ts new file mode 100644 index 00000000..8ff7f269 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/index.d.ts @@ -0,0 +1,8 @@ +export * from "./extensions"; +export * from "./Field"; +export * from "./Fields"; +export * from "./httpHandler"; +export * from "./httpRequest"; +export * from "./httpResponse"; +export * from "./isValidHostname"; +export * from "./types"; diff --git a/node_modules/@smithy/protocol-http/dist-types/isValidHostname.d.ts b/node_modules/@smithy/protocol-http/dist-types/isValidHostname.d.ts new file mode 100644 index 00000000..6fb5bcb3 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/isValidHostname.d.ts @@ -0,0 +1 @@ +export declare function isValidHostname(hostname: string): boolean; diff --git a/node_modules/@smithy/protocol-http/dist-types/ts3.4/Field.d.ts b/node_modules/@smithy/protocol-http/dist-types/ts3.4/Field.d.ts new file mode 100644 index 00000000..faa4b706 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/ts3.4/Field.d.ts @@ -0,0 +1,49 @@ +import { FieldOptions, FieldPosition } from "@smithy/types"; +/** + * A name-value pair representing a single field + * transmitted in an HTTP Request or Response. + * + * The kind will dictate metadata placement within + * an HTTP message. + * + * All field names are case insensitive and + * case-variance must be treated as equivalent. + * Names MAY be normalized but SHOULD be preserved + * for accuracy during transmission. + */ +export declare class Field { + readonly name: string; + readonly kind: FieldPosition; + values: string[]; + constructor({ name, kind, values }: FieldOptions); + /** + * Appends a value to the field. + * + * @param value The value to append. + */ + add(value: string): void; + /** + * Overwrite existing field values. + * + * @param values The new field values. + */ + set(values: string[]): void; + /** + * Remove all matching entries from list. + * + * @param value Value to remove. + */ + remove(value: string): void; + /** + * Get comma-delimited string. + * + * @returns String representation of {@link Field}. + */ + toString(): string; + /** + * Get string values as a list + * + * @returns Values in {@link Field} as a list. + */ + get(): string[]; +} diff --git a/node_modules/@smithy/protocol-http/dist-types/ts3.4/Fields.d.ts b/node_modules/@smithy/protocol-http/dist-types/ts3.4/Fields.d.ts new file mode 100644 index 00000000..616f55eb --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/ts3.4/Fields.d.ts @@ -0,0 +1,44 @@ +import { FieldPosition } from "@smithy/types"; +import { Field } from "./Field"; +export type FieldsOptions = { + fields?: Field[]; + encoding?: string; +}; +/** + * Collection of Field entries mapped by name. + */ +export declare class Fields { + private readonly entries; + private readonly encoding; + constructor({ fields, encoding }: FieldsOptions); + /** + * Set entry for a {@link Field} name. The `name` + * attribute will be used to key the collection. + * + * @param field The {@link Field} to set. + */ + setField(field: Field): void; + /** + * Retrieve {@link Field} entry by name. + * + * @param name The name of the {@link Field} entry + * to retrieve + * @returns The {@link Field} if it exists. + */ + getField(name: string): Field | undefined; + /** + * Delete entry from collection. + * + * @param name Name of the entry to delete. + */ + removeField(name: string): void; + /** + * Helper function for retrieving specific types of fields. + * Used to grab all headers or all trailers. + * + * @param kind {@link FieldPosition} of entries to retrieve. + * @returns The {@link Field} entries with the specified + * {@link FieldPosition}. + */ + getByType(kind: FieldPosition): Field[]; +} diff --git a/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/httpExtensionConfiguration.d.ts b/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/httpExtensionConfiguration.d.ts new file mode 100644 index 00000000..3cd2cf65 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/httpExtensionConfiguration.d.ts @@ -0,0 +1,37 @@ +import { HttpHandler } from "../httpHandler"; +/** + * @internal + */ +export interface HttpHandlerExtensionConfiguration { + setHttpHandler(handler: HttpHandler): void; + httpHandler(): HttpHandler; + updateHttpClientConfig(key: keyof HandlerConfig, value: HandlerConfig[typeof key]): void; + httpHandlerConfigs(): HandlerConfig; +} +/** + * @internal + */ +export type HttpHandlerExtensionConfigType = Partial<{ + httpHandler: HttpHandler; +}>; +/** + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getHttpHandlerExtensionConfiguration: (runtimeConfig: Partial<{ + httpHandler: HttpHandler; +}>) => { + setHttpHandler(handler: HttpHandler): void; + httpHandler(): HttpHandler; + updateHttpClientConfig(key: keyof HandlerConfig, value: HandlerConfig[keyof HandlerConfig]): void; + httpHandlerConfigs(): HandlerConfig; +}; +/** + * @internal + * + * Helper function to resolve runtime config from default extension configuration + */ +export declare const resolveHttpHandlerRuntimeConfig: (httpHandlerExtensionConfiguration: HttpHandlerExtensionConfiguration) => Partial<{ + httpHandler: HttpHandler; +}>; diff --git a/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/index.d.ts b/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 00000000..e0f765b5 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1 @@ +export * from "./httpExtensionConfiguration"; diff --git a/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpHandler.d.ts b/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpHandler.d.ts new file mode 100644 index 00000000..b8f1978d --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpHandler.d.ts @@ -0,0 +1,35 @@ +import { FetchHttpHandlerOptions, HttpHandlerOptions, NodeHttpHandlerOptions, RequestHandler } from "@smithy/types"; +import { HttpRequest } from "./httpRequest"; +import { HttpResponse } from "./httpResponse"; +/** + * @internal + */ +export type HttpHandler = RequestHandler & { + /** + * @internal + */ + updateHttpClientConfig(key: keyof HttpHandlerConfig, value: HttpHandlerConfig[typeof key]): void; + /** + * @internal + */ + httpHandlerConfigs(): HttpHandlerConfig; +}; +/** + * @public + * + * A type representing the accepted user inputs for the `requestHandler` field + * of a client's constructor object. + * + * You may provide an instance of an HttpHandler, or alternatively + * provide the constructor arguments as an object which will be passed + * to the constructor of the default request handler. + * + * The default class constructor to which your arguments will be passed + * varies. The Node.js default is the NodeHttpHandler and the browser/react-native + * default is the FetchHttpHandler. In rarer cases specific clients may be + * configured to use other default implementations such as Websocket or HTTP2. + * + * The fallback type Record is part of the union to allow + * passing constructor params to an unknown requestHandler type. + */ +export type HttpHandlerUserInput = HttpHandler | NodeHttpHandlerOptions | FetchHttpHandlerOptions | Record; diff --git a/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpRequest.d.ts b/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpRequest.d.ts new file mode 100644 index 00000000..cdcf38b8 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpRequest.d.ts @@ -0,0 +1,55 @@ +import { HeaderBag, HttpMessage, HttpRequest as IHttpRequest, QueryParameterBag, URI } from "@smithy/types"; +type HttpRequestOptions = Partial & Partial & { + method?: string; +}; +/** + * Use the distinct IHttpRequest interface from \@smithy/types instead. + * This should not be used due to + * overlapping with the concrete class' name. + * + * This is not marked deprecated since that would mark the concrete class + * deprecated as well. + * + * @internal + */ +export interface HttpRequest extends IHttpRequest { +} +/** + * @public + */ +export { IHttpRequest }; +/** + * @public + */ +export declare class HttpRequest implements HttpMessage, URI { + method: string; + protocol: string; + hostname: string; + port?: number; + path: string; + query: QueryParameterBag; + headers: HeaderBag; + username?: string; + password?: string; + fragment?: string; + body?: any; + constructor(options: HttpRequestOptions); + /** + * Note: this does not deep-clone the body. + */ + static clone(request: IHttpRequest): HttpRequest; + /** + * This method only actually asserts that request is the interface {@link IHttpRequest}, + * and not necessarily this concrete class. Left in place for API stability. + * + * Do not call instance methods on the input of this function, and + * do not assume it has the HttpRequest prototype. + */ + static isInstance(request: unknown): request is HttpRequest; + /** + * @deprecated use static HttpRequest.clone(request) instead. It's not safe to call + * this method because {@link HttpRequest.isInstance} incorrectly + * asserts that IHttpRequest (interface) objects are of type HttpRequest (class). + */ + clone(): HttpRequest; +} diff --git a/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpResponse.d.ts b/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpResponse.d.ts new file mode 100644 index 00000000..8babc912 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpResponse.d.ts @@ -0,0 +1,29 @@ +import { HeaderBag, HttpMessage, HttpResponse as IHttpResponse } from "@smithy/types"; +type HttpResponseOptions = Partial & { + statusCode: number; + reason?: string; +}; +/** + * Use the distinct IHttpResponse interface from \@smithy/types instead. + * This should not be used due to + * overlapping with the concrete class' name. + * + * This is not marked deprecated since that would mark the concrete class + * deprecated as well. + * + * @internal + */ +export interface HttpResponse extends IHttpResponse { +} +/** + * @public + */ +export declare class HttpResponse { + statusCode: number; + reason?: string; + headers: HeaderBag; + body?: any; + constructor(options: HttpResponseOptions); + static isInstance(response: unknown): response is HttpResponse; +} +export {}; diff --git a/node_modules/@smithy/protocol-http/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/protocol-http/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..08feffab --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +export * from "./extensions"; +export * from "./Field"; +export * from "./Fields"; +export * from "./httpHandler"; +export * from "./httpRequest"; +export * from "./httpResponse"; +export * from "./isValidHostname"; +export * from "./types"; diff --git a/node_modules/@smithy/protocol-http/dist-types/ts3.4/isValidHostname.d.ts b/node_modules/@smithy/protocol-http/dist-types/ts3.4/isValidHostname.d.ts new file mode 100644 index 00000000..7b85b36e --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/ts3.4/isValidHostname.d.ts @@ -0,0 +1 @@ +export declare function isValidHostname(hostname: string): boolean; diff --git a/node_modules/@smithy/protocol-http/dist-types/ts3.4/types.d.ts b/node_modules/@smithy/protocol-http/dist-types/ts3.4/types.d.ts new file mode 100644 index 00000000..42e3c660 --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/ts3.4/types.d.ts @@ -0,0 +1,21 @@ +import { FieldOptions as __FieldOptions, FieldPosition as __FieldPosition, HeaderBag as __HeaderBag, HttpHandlerOptions as __HttpHandlerOptions, HttpMessage as __HttpMessage } from "@smithy/types"; +/** + * @deprecated Use FieldOptions from `@smithy/types` instead + */ +export type FieldOptions = __FieldOptions; +/** + * @deprecated Use FieldPosition from `@smithy/types` instead + */ +export type FieldPosition = __FieldPosition; +/** + * @deprecated Use HeaderBag from `@smithy/types` instead + */ +export type HeaderBag = __HeaderBag; +/** + * @deprecated Use HttpMessage from `@smithy/types` instead + */ +export type HttpMessage = __HttpMessage; +/** + * @deprecated Use HttpHandlerOptions from `@smithy/types` instead + */ +export type HttpHandlerOptions = __HttpHandlerOptions; diff --git a/node_modules/@smithy/protocol-http/dist-types/types.d.ts b/node_modules/@smithy/protocol-http/dist-types/types.d.ts new file mode 100644 index 00000000..0d597b9c --- /dev/null +++ b/node_modules/@smithy/protocol-http/dist-types/types.d.ts @@ -0,0 +1,21 @@ +import { FieldOptions as __FieldOptions, FieldPosition as __FieldPosition, HeaderBag as __HeaderBag, HttpHandlerOptions as __HttpHandlerOptions, HttpMessage as __HttpMessage } from "@smithy/types"; +/** + * @deprecated Use FieldOptions from `@smithy/types` instead + */ +export type FieldOptions = __FieldOptions; +/** + * @deprecated Use FieldPosition from `@smithy/types` instead + */ +export type FieldPosition = __FieldPosition; +/** + * @deprecated Use HeaderBag from `@smithy/types` instead + */ +export type HeaderBag = __HeaderBag; +/** + * @deprecated Use HttpMessage from `@smithy/types` instead + */ +export type HttpMessage = __HttpMessage; +/** + * @deprecated Use HttpHandlerOptions from `@smithy/types` instead + */ +export type HttpHandlerOptions = __HttpHandlerOptions; diff --git a/node_modules/@smithy/protocol-http/package.json b/node_modules/@smithy/protocol-http/package.json new file mode 100644 index 00000000..549711a2 --- /dev/null +++ b/node_modules/@smithy/protocol-http/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/protocol-http", + "version": "5.1.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline protocol-http", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS Smithy Team", + "email": "", + "url": "https://smithy.io" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/protocol-http", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/protocol-http" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/querystring-builder/LICENSE b/node_modules/@smithy/querystring-builder/LICENSE new file mode 100644 index 00000000..dd65ae06 --- /dev/null +++ b/node_modules/@smithy/querystring-builder/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@smithy/querystring-builder/README.md b/node_modules/@smithy/querystring-builder/README.md new file mode 100644 index 00000000..00275dad --- /dev/null +++ b/node_modules/@smithy/querystring-builder/README.md @@ -0,0 +1,10 @@ +# @smithy/querystring-builder + +[![NPM version](https://img.shields.io/npm/v/@smithy/querystring-builder/latest.svg)](https://www.npmjs.com/package/@smithy/querystring-builder) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/querystring-builder.svg)](https://www.npmjs.com/package/@smithy/querystring-builder) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/querystring-builder/dist-cjs/index.js b/node_modules/@smithy/querystring-builder/dist-cjs/index.js new file mode 100644 index 00000000..70302425 --- /dev/null +++ b/node_modules/@smithy/querystring-builder/dist-cjs/index.js @@ -0,0 +1,52 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + buildQueryString: () => buildQueryString +}); +module.exports = __toCommonJS(src_exports); +var import_util_uri_escape = require("@smithy/util-uri-escape"); +function buildQueryString(query) { + const parts = []; + for (let key of Object.keys(query).sort()) { + const value = query[key]; + key = (0, import_util_uri_escape.escapeUri)(key); + if (Array.isArray(value)) { + for (let i = 0, iLen = value.length; i < iLen; i++) { + parts.push(`${key}=${(0, import_util_uri_escape.escapeUri)(value[i])}`); + } + } else { + let qsEntry = key; + if (value || typeof value === "string") { + qsEntry += `=${(0, import_util_uri_escape.escapeUri)(value)}`; + } + parts.push(qsEntry); + } + } + return parts.join("&"); +} +__name(buildQueryString, "buildQueryString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + buildQueryString +}); + diff --git a/node_modules/@smithy/querystring-builder/dist-es/index.js b/node_modules/@smithy/querystring-builder/dist-es/index.js new file mode 100644 index 00000000..fbc76840 --- /dev/null +++ b/node_modules/@smithy/querystring-builder/dist-es/index.js @@ -0,0 +1,21 @@ +import { escapeUri } from "@smithy/util-uri-escape"; +export function buildQueryString(query) { + const parts = []; + for (let key of Object.keys(query).sort()) { + const value = query[key]; + key = escapeUri(key); + if (Array.isArray(value)) { + for (let i = 0, iLen = value.length; i < iLen; i++) { + parts.push(`${key}=${escapeUri(value[i])}`); + } + } + else { + let qsEntry = key; + if (value || typeof value === "string") { + qsEntry += `=${escapeUri(value)}`; + } + parts.push(qsEntry); + } + } + return parts.join("&"); +} diff --git a/node_modules/@smithy/querystring-builder/dist-types/index.d.ts b/node_modules/@smithy/querystring-builder/dist-types/index.d.ts new file mode 100644 index 00000000..538b1b0e --- /dev/null +++ b/node_modules/@smithy/querystring-builder/dist-types/index.d.ts @@ -0,0 +1,5 @@ +import { QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare function buildQueryString(query: QueryParameterBag): string; diff --git a/node_modules/@smithy/querystring-builder/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/querystring-builder/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..1f866f3e --- /dev/null +++ b/node_modules/@smithy/querystring-builder/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +import { QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare function buildQueryString(query: QueryParameterBag): string; diff --git a/node_modules/@smithy/querystring-builder/package.json b/node_modules/@smithy/querystring-builder/package.json new file mode 100644 index 00000000..d144f0a6 --- /dev/null +++ b/node_modules/@smithy/querystring-builder/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/querystring-builder", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline querystring-builder", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "exit 0" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-uri-escape": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/querystring-builder", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/querystring-builder" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/querystring-parser/LICENSE b/node_modules/@smithy/querystring-parser/LICENSE new file mode 100644 index 00000000..dd65ae06 --- /dev/null +++ b/node_modules/@smithy/querystring-parser/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@smithy/querystring-parser/README.md b/node_modules/@smithy/querystring-parser/README.md new file mode 100644 index 00000000..02dcf51d --- /dev/null +++ b/node_modules/@smithy/querystring-parser/README.md @@ -0,0 +1,10 @@ +# @smithy/querystring-parser + +[![NPM version](https://img.shields.io/npm/v/@smithy/querystring-parser/latest.svg)](https://www.npmjs.com/package/@smithy/querystring-parser) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/querystring-parser.svg)](https://www.npmjs.com/package/@smithy/querystring-parser) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/querystring-parser/dist-cjs/index.js b/node_modules/@smithy/querystring-parser/dist-cjs/index.js new file mode 100644 index 00000000..924647c8 --- /dev/null +++ b/node_modules/@smithy/querystring-parser/dist-cjs/index.js @@ -0,0 +1,53 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + parseQueryString: () => parseQueryString +}); +module.exports = __toCommonJS(src_exports); +function parseQueryString(querystring) { + const query = {}; + querystring = querystring.replace(/^\?/, ""); + if (querystring) { + for (const pair of querystring.split("&")) { + let [key, value = null] = pair.split("="); + key = decodeURIComponent(key); + if (value) { + value = decodeURIComponent(value); + } + if (!(key in query)) { + query[key] = value; + } else if (Array.isArray(query[key])) { + query[key].push(value); + } else { + query[key] = [query[key], value]; + } + } + } + return query; +} +__name(parseQueryString, "parseQueryString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + parseQueryString +}); + diff --git a/node_modules/@smithy/querystring-parser/dist-es/index.js b/node_modules/@smithy/querystring-parser/dist-es/index.js new file mode 100644 index 00000000..bd7bf004 --- /dev/null +++ b/node_modules/@smithy/querystring-parser/dist-es/index.js @@ -0,0 +1,23 @@ +export function parseQueryString(querystring) { + const query = {}; + querystring = querystring.replace(/^\?/, ""); + if (querystring) { + for (const pair of querystring.split("&")) { + let [key, value = null] = pair.split("="); + key = decodeURIComponent(key); + if (value) { + value = decodeURIComponent(value); + } + if (!(key in query)) { + query[key] = value; + } + else if (Array.isArray(query[key])) { + query[key].push(value); + } + else { + query[key] = [query[key], value]; + } + } + } + return query; +} diff --git a/node_modules/@smithy/querystring-parser/dist-types/index.d.ts b/node_modules/@smithy/querystring-parser/dist-types/index.d.ts new file mode 100644 index 00000000..fdc1ba59 --- /dev/null +++ b/node_modules/@smithy/querystring-parser/dist-types/index.d.ts @@ -0,0 +1,5 @@ +import { QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare function parseQueryString(querystring: string): QueryParameterBag; diff --git a/node_modules/@smithy/querystring-parser/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/querystring-parser/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..8bb747dc --- /dev/null +++ b/node_modules/@smithy/querystring-parser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +import { QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare function parseQueryString(querystring: string): QueryParameterBag; diff --git a/node_modules/@smithy/querystring-parser/package.json b/node_modules/@smithy/querystring-parser/package.json new file mode 100644 index 00000000..9a27e7e4 --- /dev/null +++ b/node_modules/@smithy/querystring-parser/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/querystring-parser", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline querystring-parser", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/querystring-parser", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/querystring-parser" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/service-error-classification/LICENSE b/node_modules/@smithy/service-error-classification/LICENSE new file mode 100644 index 00000000..dd65ae06 --- /dev/null +++ b/node_modules/@smithy/service-error-classification/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@smithy/service-error-classification/README.md b/node_modules/@smithy/service-error-classification/README.md new file mode 100644 index 00000000..902dd432 --- /dev/null +++ b/node_modules/@smithy/service-error-classification/README.md @@ -0,0 +1,4 @@ +# @smithy/service-error-classification + +[![NPM version](https://img.shields.io/npm/v/@smithy/service-error-classification/latest.svg)](https://www.npmjs.com/package/@smithy/service-error-classification) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/service-error-classification.svg)](https://www.npmjs.com/package/@smithy/service-error-classification) diff --git a/node_modules/@smithy/service-error-classification/dist-cjs/constants.js b/node_modules/@smithy/service-error-classification/dist-cjs/constants.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/service-error-classification/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/service-error-classification/dist-cjs/index.js b/node_modules/@smithy/service-error-classification/dist-cjs/index.js new file mode 100644 index 00000000..bcca2b3c --- /dev/null +++ b/node_modules/@smithy/service-error-classification/dist-cjs/index.js @@ -0,0 +1,109 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isBrowserNetworkError: () => isBrowserNetworkError, + isClockSkewCorrectedError: () => isClockSkewCorrectedError, + isClockSkewError: () => isClockSkewError, + isRetryableByTrait: () => isRetryableByTrait, + isServerError: () => isServerError, + isThrottlingError: () => isThrottlingError, + isTransientError: () => isTransientError +}); +module.exports = __toCommonJS(src_exports); + +// src/constants.ts +var CLOCK_SKEW_ERROR_CODES = [ + "AuthFailure", + "InvalidSignatureException", + "RequestExpired", + "RequestInTheFuture", + "RequestTimeTooSkewed", + "SignatureDoesNotMatch" +]; +var THROTTLING_ERROR_CODES = [ + "BandwidthLimitExceeded", + "EC2ThrottledException", + "LimitExceededException", + "PriorRequestNotComplete", + "ProvisionedThroughputExceededException", + "RequestLimitExceeded", + "RequestThrottled", + "RequestThrottledException", + "SlowDown", + "ThrottledException", + "Throttling", + "ThrottlingException", + "TooManyRequestsException", + "TransactionInProgressException" + // DynamoDB +]; +var TRANSIENT_ERROR_CODES = ["TimeoutError", "RequestTimeout", "RequestTimeoutException"]; +var TRANSIENT_ERROR_STATUS_CODES = [500, 502, 503, 504]; +var NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "ECONNREFUSED", "EPIPE", "ETIMEDOUT"]; + +// src/index.ts +var isRetryableByTrait = /* @__PURE__ */ __name((error) => error.$retryable !== void 0, "isRetryableByTrait"); +var isClockSkewError = /* @__PURE__ */ __name((error) => CLOCK_SKEW_ERROR_CODES.includes(error.name), "isClockSkewError"); +var isClockSkewCorrectedError = /* @__PURE__ */ __name((error) => error.$metadata?.clockSkewCorrected, "isClockSkewCorrectedError"); +var isBrowserNetworkError = /* @__PURE__ */ __name((error) => { + const errorMessages = /* @__PURE__ */ new Set([ + "Failed to fetch", + // Chrome + "NetworkError when attempting to fetch resource", + // Firefox + "The Internet connection appears to be offline", + // Safari 16 + "Load failed", + // Safari 17+ + "Network request failed" + // `cross-fetch` + ]); + const isValid = error && error instanceof TypeError; + if (!isValid) { + return false; + } + return errorMessages.has(error.message); +}, "isBrowserNetworkError"); +var isThrottlingError = /* @__PURE__ */ __name((error) => error.$metadata?.httpStatusCode === 429 || THROTTLING_ERROR_CODES.includes(error.name) || error.$retryable?.throttling == true, "isThrottlingError"); +var isTransientError = /* @__PURE__ */ __name((error, depth = 0) => isClockSkewCorrectedError(error) || TRANSIENT_ERROR_CODES.includes(error.name) || NODEJS_TIMEOUT_ERROR_CODES.includes(error?.code || "") || TRANSIENT_ERROR_STATUS_CODES.includes(error.$metadata?.httpStatusCode || 0) || isBrowserNetworkError(error) || error.cause !== void 0 && depth <= 10 && isTransientError(error.cause, depth + 1), "isTransientError"); +var isServerError = /* @__PURE__ */ __name((error) => { + if (error.$metadata?.httpStatusCode !== void 0) { + const statusCode = error.$metadata.httpStatusCode; + if (500 <= statusCode && statusCode <= 599 && !isTransientError(error)) { + return true; + } + return false; + } + return false; +}, "isServerError"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + isRetryableByTrait, + isClockSkewError, + isClockSkewCorrectedError, + isBrowserNetworkError, + isThrottlingError, + isTransientError, + isServerError +}); + diff --git a/node_modules/@smithy/service-error-classification/dist-es/constants.js b/node_modules/@smithy/service-error-classification/dist-es/constants.js new file mode 100644 index 00000000..267443b4 --- /dev/null +++ b/node_modules/@smithy/service-error-classification/dist-es/constants.js @@ -0,0 +1,27 @@ +export const CLOCK_SKEW_ERROR_CODES = [ + "AuthFailure", + "InvalidSignatureException", + "RequestExpired", + "RequestInTheFuture", + "RequestTimeTooSkewed", + "SignatureDoesNotMatch", +]; +export const THROTTLING_ERROR_CODES = [ + "BandwidthLimitExceeded", + "EC2ThrottledException", + "LimitExceededException", + "PriorRequestNotComplete", + "ProvisionedThroughputExceededException", + "RequestLimitExceeded", + "RequestThrottled", + "RequestThrottledException", + "SlowDown", + "ThrottledException", + "Throttling", + "ThrottlingException", + "TooManyRequestsException", + "TransactionInProgressException", +]; +export const TRANSIENT_ERROR_CODES = ["TimeoutError", "RequestTimeout", "RequestTimeoutException"]; +export const TRANSIENT_ERROR_STATUS_CODES = [500, 502, 503, 504]; +export const NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "ECONNREFUSED", "EPIPE", "ETIMEDOUT"]; diff --git a/node_modules/@smithy/service-error-classification/dist-es/index.js b/node_modules/@smithy/service-error-classification/dist-es/index.js new file mode 100644 index 00000000..1da4aa9f --- /dev/null +++ b/node_modules/@smithy/service-error-classification/dist-es/index.js @@ -0,0 +1,37 @@ +import { CLOCK_SKEW_ERROR_CODES, NODEJS_TIMEOUT_ERROR_CODES, THROTTLING_ERROR_CODES, TRANSIENT_ERROR_CODES, TRANSIENT_ERROR_STATUS_CODES, } from "./constants"; +export const isRetryableByTrait = (error) => error.$retryable !== undefined; +export const isClockSkewError = (error) => CLOCK_SKEW_ERROR_CODES.includes(error.name); +export const isClockSkewCorrectedError = (error) => error.$metadata?.clockSkewCorrected; +export const isBrowserNetworkError = (error) => { + const errorMessages = new Set([ + "Failed to fetch", + "NetworkError when attempting to fetch resource", + "The Internet connection appears to be offline", + "Load failed", + "Network request failed", + ]); + const isValid = error && error instanceof TypeError; + if (!isValid) { + return false; + } + return errorMessages.has(error.message); +}; +export const isThrottlingError = (error) => error.$metadata?.httpStatusCode === 429 || + THROTTLING_ERROR_CODES.includes(error.name) || + error.$retryable?.throttling == true; +export const isTransientError = (error, depth = 0) => isClockSkewCorrectedError(error) || + TRANSIENT_ERROR_CODES.includes(error.name) || + NODEJS_TIMEOUT_ERROR_CODES.includes(error?.code || "") || + TRANSIENT_ERROR_STATUS_CODES.includes(error.$metadata?.httpStatusCode || 0) || + isBrowserNetworkError(error) || + (error.cause !== undefined && depth <= 10 && isTransientError(error.cause, depth + 1)); +export const isServerError = (error) => { + if (error.$metadata?.httpStatusCode !== undefined) { + const statusCode = error.$metadata.httpStatusCode; + if (500 <= statusCode && statusCode <= 599 && !isTransientError(error)) { + return true; + } + return false; + } + return false; +}; diff --git a/node_modules/@smithy/service-error-classification/dist-types/constants.d.ts b/node_modules/@smithy/service-error-classification/dist-types/constants.d.ts new file mode 100644 index 00000000..f07663b1 --- /dev/null +++ b/node_modules/@smithy/service-error-classification/dist-types/constants.d.ts @@ -0,0 +1,26 @@ +/** + * Errors encountered when the client clock and server clock cannot agree on the + * current time. + * + * These errors are retryable, assuming the SDK has enabled clock skew + * correction. + */ +export declare const CLOCK_SKEW_ERROR_CODES: string[]; +/** + * Errors that indicate the SDK is being throttled. + * + * These errors are always retryable. + */ +export declare const THROTTLING_ERROR_CODES: string[]; +/** + * Error codes that indicate transient issues + */ +export declare const TRANSIENT_ERROR_CODES: string[]; +/** + * Error codes that indicate transient issues + */ +export declare const TRANSIENT_ERROR_STATUS_CODES: number[]; +/** + * Node.js system error codes that indicate timeout. + */ +export declare const NODEJS_TIMEOUT_ERROR_CODES: string[]; diff --git a/node_modules/@smithy/service-error-classification/dist-types/index.d.ts b/node_modules/@smithy/service-error-classification/dist-types/index.d.ts new file mode 100644 index 00000000..6aad1024 --- /dev/null +++ b/node_modules/@smithy/service-error-classification/dist-types/index.d.ts @@ -0,0 +1,24 @@ +import { SdkError } from "@smithy/types"; +export declare const isRetryableByTrait: (error: SdkError) => boolean; +/** + * @deprecated use isClockSkewCorrectedError. This is only used in deprecated code. + */ +export declare const isClockSkewError: (error: SdkError) => boolean; +/** + * @returns whether the error resulted in a systemClockOffset aka clock skew correction. + */ +export declare const isClockSkewCorrectedError: (error: SdkError) => true | undefined; +/** + * + * @internal + */ +export declare const isBrowserNetworkError: (error: SdkError) => boolean; +export declare const isThrottlingError: (error: SdkError) => boolean; +/** + * Though NODEJS_TIMEOUT_ERROR_CODES are platform specific, they are + * included here because there is an error scenario with unknown root + * cause where the NodeHttpHandler does not decorate the Error with + * the name "TimeoutError" to be checked by the TRANSIENT_ERROR_CODES condition. + */ +export declare const isTransientError: (error: SdkError, depth?: number) => boolean; +export declare const isServerError: (error: SdkError) => boolean; diff --git a/node_modules/@smithy/service-error-classification/dist-types/ts3.4/constants.d.ts b/node_modules/@smithy/service-error-classification/dist-types/ts3.4/constants.d.ts new file mode 100644 index 00000000..74c4858a --- /dev/null +++ b/node_modules/@smithy/service-error-classification/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,26 @@ +/** + * Errors encountered when the client clock and server clock cannot agree on the + * current time. + * + * These errors are retryable, assuming the SDK has enabled clock skew + * correction. + */ +export declare const CLOCK_SKEW_ERROR_CODES: string[]; +/** + * Errors that indicate the SDK is being throttled. + * + * These errors are always retryable. + */ +export declare const THROTTLING_ERROR_CODES: string[]; +/** + * Error codes that indicate transient issues + */ +export declare const TRANSIENT_ERROR_CODES: string[]; +/** + * Error codes that indicate transient issues + */ +export declare const TRANSIENT_ERROR_STATUS_CODES: number[]; +/** + * Node.js system error codes that indicate timeout. + */ +export declare const NODEJS_TIMEOUT_ERROR_CODES: string[]; diff --git a/node_modules/@smithy/service-error-classification/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/service-error-classification/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..c7909ae1 --- /dev/null +++ b/node_modules/@smithy/service-error-classification/dist-types/ts3.4/index.d.ts @@ -0,0 +1,24 @@ +import { SdkError } from "@smithy/types"; +export declare const isRetryableByTrait: (error: SdkError) => boolean; +/** + * @deprecated use isClockSkewCorrectedError. This is only used in deprecated code. + */ +export declare const isClockSkewError: (error: SdkError) => boolean; +/** + * @returns whether the error resulted in a systemClockOffset aka clock skew correction. + */ +export declare const isClockSkewCorrectedError: (error: SdkError) => true | undefined; +/** + * + * @internal + */ +export declare const isBrowserNetworkError: (error: SdkError) => boolean; +export declare const isThrottlingError: (error: SdkError) => boolean; +/** + * Though NODEJS_TIMEOUT_ERROR_CODES are platform specific, they are + * included here because there is an error scenario with unknown root + * cause where the NodeHttpHandler does not decorate the Error with + * the name "TimeoutError" to be checked by the TRANSIENT_ERROR_CODES condition. + */ +export declare const isTransientError: (error: SdkError, depth?: number) => boolean; +export declare const isServerError: (error: SdkError) => boolean; diff --git a/node_modules/@smithy/service-error-classification/package.json b/node_modules/@smithy/service-error-classification/package.json new file mode 100644 index 00000000..a568aee9 --- /dev/null +++ b/node_modules/@smithy/service-error-classification/package.json @@ -0,0 +1,59 @@ +{ + "name": "@smithy/service-error-classification", + "version": "4.0.3", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline service-error-classification", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/service-error-classification", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/service-error-classification" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "dependencies": { + "@smithy/types": "^4.2.0" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/shared-ini-file-loader/LICENSE b/node_modules/@smithy/shared-ini-file-loader/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/shared-ini-file-loader/README.md b/node_modules/@smithy/shared-ini-file-loader/README.md new file mode 100644 index 00000000..45a4b2e6 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/README.md @@ -0,0 +1,105 @@ +# @smithy/shared-ini-file-loader + +[![NPM version](https://img.shields.io/npm/v/@smithy/shared-ini-file-loader/latest.svg)](https://www.npmjs.com/package/@smithy/shared-ini-file-loader) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/shared-ini-file-loader.svg)](https://www.npmjs.com/package/@smithy/shared-ini-file-loader) + +## AWS Shared Configuration File Loader + +This module provides a function that reads from AWS SDK configuration files and +returns a promise that will resolve with a hash of the parsed contents of the +AWS credentials file and of the AWS config file. Given the [sample +files](#sample-files) below, the promise returned by `loadSharedConfigFiles` +would resolve with: + +```javascript +{ + configFile: { + 'default': { + aws_access_key_id: 'foo', + aws_secret_access_key: 'bar', + }, + dev: { + aws_access_key_id: 'foo1', + aws_secret_access_key: 'bar1', + }, + prod: { + aws_access_key_id: 'foo2', + aws_secret_access_key: 'bar2', + }, + 'testing host': { + aws_access_key_id: 'foo4', + aws_secret_access_key: 'bar4', + } + }, + credentialsFile: { + 'default': { + aws_access_key_id: 'foo', + aws_secret_access_key: 'bar', + }, + dev: { + aws_access_key_id: 'foo1', + aws_secret_access_key: 'bar1', + }, + prod: { + aws_access_key_id: 'foo2', + aws_secret_access_key: 'bar2', + } + }, +} +``` + +If a file is not found, its key (`configFile` or `credentialsFile`) will instead +have a value of an empty object. + +## Supported configuration + +You may customize how the files are loaded by providing an options hash to the +`loadSharedConfigFiles` function. The following options are supported: + +- `filepath` - The path to the shared credentials file. If not specified, the + provider will use the value in the `AWS_SHARED_CREDENTIALS_FILE` environment + variable or a default of `~/.aws/credentials`. +- `configFilepath` - The path to the shared config file. If not specified, the + provider will use the value in the `AWS_CONFIG_FILE` environment variable or a + default of `~/.aws/config`. +- `ignoreCache` - The provider will normally cache the contents of the files it + loads. This option will force the provider to reload the files from disk. + Defaults to `false`. + +## Sample files + +### `~/.aws/credentials` + +```ini +[default] +aws_access_key_id=foo +aws_secret_access_key=bar + +[dev] +aws_access_key_id=foo2 +aws_secret_access_key=bar2 + +[prod] +aws_access_key_id=foo3 +aws_secret_access_key=bar3 +``` + +### `~/.aws/config` + +```ini +[default] +aws_access_key_id=foo +aws_secret_access_key=bar + +[profile dev] +aws_access_key_id=foo2 +aws_secret_access_key=bar2 + +[profile prod] +aws_access_key_id=foo3 +aws_secret_access_key=bar3 + +[profile "testing host"] +aws_access_key_id=foo4 +aws_secret_access_key=bar4 +``` diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigData.js b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigData.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigData.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigFilepath.js b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigFilepath.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigFilepath.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getCredentialsFilepath.js b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getCredentialsFilepath.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getCredentialsFilepath.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getHomeDir.js b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getHomeDir.js new file mode 100644 index 00000000..2a4f7375 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getHomeDir.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getHomeDir = void 0; +const os_1 = require("os"); +const path_1 = require("path"); +const homeDirCache = {}; +const getHomeDirCacheKey = () => { + if (process && process.geteuid) { + return `${process.geteuid()}`; + } + return "DEFAULT"; +}; +const getHomeDir = () => { + const { HOME, USERPROFILE, HOMEPATH, HOMEDRIVE = `C:${path_1.sep}` } = process.env; + if (HOME) + return HOME; + if (USERPROFILE) + return USERPROFILE; + if (HOMEPATH) + return `${HOMEDRIVE}${HOMEPATH}`; + const homeDirCacheKey = getHomeDirCacheKey(); + if (!homeDirCache[homeDirCacheKey]) + homeDirCache[homeDirCacheKey] = (0, os_1.homedir)(); + return homeDirCache[homeDirCacheKey]; +}; +exports.getHomeDir = getHomeDir; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getProfileName.js b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getProfileName.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getProfileName.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFilepath.js b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFilepath.js new file mode 100644 index 00000000..30d97b3d --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFilepath.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getSSOTokenFilepath = void 0; +const crypto_1 = require("crypto"); +const path_1 = require("path"); +const getHomeDir_1 = require("./getHomeDir"); +const getSSOTokenFilepath = (id) => { + const hasher = (0, crypto_1.createHash)("sha1"); + const cacheName = hasher.update(id).digest("hex"); + return (0, path_1.join)((0, getHomeDir_1.getHomeDir)(), ".aws", "sso", "cache", `${cacheName}.json`); +}; +exports.getSSOTokenFilepath = getSSOTokenFilepath; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFromFile.js b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFromFile.js new file mode 100644 index 00000000..688accb7 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFromFile.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getSSOTokenFromFile = void 0; +const fs_1 = require("fs"); +const getSSOTokenFilepath_1 = require("./getSSOTokenFilepath"); +const { readFile } = fs_1.promises; +const getSSOTokenFromFile = async (id) => { + const ssoTokenFilepath = (0, getSSOTokenFilepath_1.getSSOTokenFilepath)(id); + const ssoTokenText = await readFile(ssoTokenFilepath, "utf8"); + return JSON.parse(ssoTokenText); +}; +exports.getSSOTokenFromFile = getSSOTokenFromFile; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSsoSessionData.js b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSsoSessionData.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSsoSessionData.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-cjs/index.js b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/index.js new file mode 100644 index 00000000..de59bfa8 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/index.js @@ -0,0 +1,206 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + CONFIG_PREFIX_SEPARATOR: () => CONFIG_PREFIX_SEPARATOR, + DEFAULT_PROFILE: () => DEFAULT_PROFILE, + ENV_PROFILE: () => ENV_PROFILE, + getProfileName: () => getProfileName, + loadSharedConfigFiles: () => loadSharedConfigFiles, + loadSsoSessionData: () => loadSsoSessionData, + parseKnownFiles: () => parseKnownFiles +}); +module.exports = __toCommonJS(src_exports); +__reExport(src_exports, require("././getHomeDir"), module.exports); + +// src/getProfileName.ts +var ENV_PROFILE = "AWS_PROFILE"; +var DEFAULT_PROFILE = "default"; +var getProfileName = /* @__PURE__ */ __name((init) => init.profile || process.env[ENV_PROFILE] || DEFAULT_PROFILE, "getProfileName"); + +// src/index.ts +__reExport(src_exports, require("././getSSOTokenFilepath"), module.exports); +__reExport(src_exports, require("././getSSOTokenFromFile"), module.exports); + +// src/loadSharedConfigFiles.ts + + +// src/getConfigData.ts +var import_types = require("@smithy/types"); +var getConfigData = /* @__PURE__ */ __name((data) => Object.entries(data).filter(([key]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + if (indexOfSeparator === -1) { + return false; + } + return Object.values(import_types.IniSectionType).includes(key.substring(0, indexOfSeparator)); +}).reduce( + (acc, [key, value]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + const updatedKey = key.substring(0, indexOfSeparator) === import_types.IniSectionType.PROFILE ? key.substring(indexOfSeparator + 1) : key; + acc[updatedKey] = value; + return acc; + }, + { + // Populate default profile, if present. + ...data.default && { default: data.default } + } +), "getConfigData"); + +// src/getConfigFilepath.ts +var import_path = require("path"); +var import_getHomeDir = require("././getHomeDir"); +var ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +var getConfigFilepath = /* @__PURE__ */ __name(() => process.env[ENV_CONFIG_PATH] || (0, import_path.join)((0, import_getHomeDir.getHomeDir)(), ".aws", "config"), "getConfigFilepath"); + +// src/getCredentialsFilepath.ts + +var import_getHomeDir2 = require("././getHomeDir"); +var ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +var getCredentialsFilepath = /* @__PURE__ */ __name(() => process.env[ENV_CREDENTIALS_PATH] || (0, import_path.join)((0, import_getHomeDir2.getHomeDir)(), ".aws", "credentials"), "getCredentialsFilepath"); + +// src/loadSharedConfigFiles.ts +var import_getHomeDir3 = require("././getHomeDir"); + +// src/parseIni.ts + +var prefixKeyRegex = /^([\w-]+)\s(["'])?([\w-@\+\.%:/]+)\2$/; +var profileNameBlockList = ["__proto__", "profile __proto__"]; +var parseIni = /* @__PURE__ */ __name((iniData) => { + const map = {}; + let currentSection; + let currentSubSection; + for (const iniLine of iniData.split(/\r?\n/)) { + const trimmedLine = iniLine.split(/(^|\s)[;#]/)[0].trim(); + const isSection = trimmedLine[0] === "[" && trimmedLine[trimmedLine.length - 1] === "]"; + if (isSection) { + currentSection = void 0; + currentSubSection = void 0; + const sectionName = trimmedLine.substring(1, trimmedLine.length - 1); + const matches = prefixKeyRegex.exec(sectionName); + if (matches) { + const [, prefix, , name] = matches; + if (Object.values(import_types.IniSectionType).includes(prefix)) { + currentSection = [prefix, name].join(CONFIG_PREFIX_SEPARATOR); + } + } else { + currentSection = sectionName; + } + if (profileNameBlockList.includes(sectionName)) { + throw new Error(`Found invalid profile name "${sectionName}"`); + } + } else if (currentSection) { + const indexOfEqualsSign = trimmedLine.indexOf("="); + if (![0, -1].includes(indexOfEqualsSign)) { + const [name, value] = [ + trimmedLine.substring(0, indexOfEqualsSign).trim(), + trimmedLine.substring(indexOfEqualsSign + 1).trim() + ]; + if (value === "") { + currentSubSection = name; + } else { + if (currentSubSection && iniLine.trimStart() === iniLine) { + currentSubSection = void 0; + } + map[currentSection] = map[currentSection] || {}; + const key = currentSubSection ? [currentSubSection, name].join(CONFIG_PREFIX_SEPARATOR) : name; + map[currentSection][key] = value; + } + } + } + } + return map; +}, "parseIni"); + +// src/loadSharedConfigFiles.ts +var import_slurpFile = require("././slurpFile"); +var swallowError = /* @__PURE__ */ __name(() => ({}), "swallowError"); +var CONFIG_PREFIX_SEPARATOR = "."; +var loadSharedConfigFiles = /* @__PURE__ */ __name(async (init = {}) => { + const { filepath = getCredentialsFilepath(), configFilepath = getConfigFilepath() } = init; + const homeDir = (0, import_getHomeDir3.getHomeDir)(); + const relativeHomeDirPrefix = "~/"; + let resolvedFilepath = filepath; + if (filepath.startsWith(relativeHomeDirPrefix)) { + resolvedFilepath = (0, import_path.join)(homeDir, filepath.slice(2)); + } + let resolvedConfigFilepath = configFilepath; + if (configFilepath.startsWith(relativeHomeDirPrefix)) { + resolvedConfigFilepath = (0, import_path.join)(homeDir, configFilepath.slice(2)); + } + const parsedFiles = await Promise.all([ + (0, import_slurpFile.slurpFile)(resolvedConfigFilepath, { + ignoreCache: init.ignoreCache + }).then(parseIni).then(getConfigData).catch(swallowError), + (0, import_slurpFile.slurpFile)(resolvedFilepath, { + ignoreCache: init.ignoreCache + }).then(parseIni).catch(swallowError) + ]); + return { + configFile: parsedFiles[0], + credentialsFile: parsedFiles[1] + }; +}, "loadSharedConfigFiles"); + +// src/getSsoSessionData.ts + +var getSsoSessionData = /* @__PURE__ */ __name((data) => Object.entries(data).filter(([key]) => key.startsWith(import_types.IniSectionType.SSO_SESSION + CONFIG_PREFIX_SEPARATOR)).reduce((acc, [key, value]) => ({ ...acc, [key.substring(key.indexOf(CONFIG_PREFIX_SEPARATOR) + 1)]: value }), {}), "getSsoSessionData"); + +// src/loadSsoSessionData.ts +var import_slurpFile2 = require("././slurpFile"); +var swallowError2 = /* @__PURE__ */ __name(() => ({}), "swallowError"); +var loadSsoSessionData = /* @__PURE__ */ __name(async (init = {}) => (0, import_slurpFile2.slurpFile)(init.configFilepath ?? getConfigFilepath()).then(parseIni).then(getSsoSessionData).catch(swallowError2), "loadSsoSessionData"); + +// src/mergeConfigFiles.ts +var mergeConfigFiles = /* @__PURE__ */ __name((...files) => { + const merged = {}; + for (const file of files) { + for (const [key, values] of Object.entries(file)) { + if (merged[key] !== void 0) { + Object.assign(merged[key], values); + } else { + merged[key] = values; + } + } + } + return merged; +}, "mergeConfigFiles"); + +// src/parseKnownFiles.ts +var parseKnownFiles = /* @__PURE__ */ __name(async (init) => { + const parsedFiles = await loadSharedConfigFiles(init); + return mergeConfigFiles(parsedFiles.configFile, parsedFiles.credentialsFile); +}, "parseKnownFiles"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getHomeDir, + ENV_PROFILE, + DEFAULT_PROFILE, + getProfileName, + getSSOTokenFilepath, + getSSOTokenFromFile, + CONFIG_PREFIX_SEPARATOR, + loadSharedConfigFiles, + loadSsoSessionData, + parseKnownFiles +}); + diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSharedConfigFiles.js b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSharedConfigFiles.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSharedConfigFiles.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSsoSessionData.js b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSsoSessionData.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSsoSessionData.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-cjs/mergeConfigFiles.js b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/mergeConfigFiles.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/mergeConfigFiles.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseIni.js b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseIni.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseIni.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseKnownFiles.js b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseKnownFiles.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseKnownFiles.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-cjs/slurpFile.js b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/slurpFile.js new file mode 100644 index 00000000..82d7d655 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/slurpFile.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.slurpFile = void 0; +const fs_1 = require("fs"); +const { readFile } = fs_1.promises; +const filePromisesHash = {}; +const slurpFile = (path, options) => { + if (!filePromisesHash[path] || (options === null || options === void 0 ? void 0 : options.ignoreCache)) { + filePromisesHash[path] = readFile(path, "utf8"); + } + return filePromisesHash[path]; +}; +exports.slurpFile = slurpFile; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-cjs/types.js b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/types.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigData.js b/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigData.js new file mode 100644 index 00000000..45792866 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigData.js @@ -0,0 +1,18 @@ +import { IniSectionType } from "@smithy/types"; +import { CONFIG_PREFIX_SEPARATOR } from "./loadSharedConfigFiles"; +export const getConfigData = (data) => Object.entries(data) + .filter(([key]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + if (indexOfSeparator === -1) { + return false; + } + return Object.values(IniSectionType).includes(key.substring(0, indexOfSeparator)); +}) + .reduce((acc, [key, value]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + const updatedKey = key.substring(0, indexOfSeparator) === IniSectionType.PROFILE ? key.substring(indexOfSeparator + 1) : key; + acc[updatedKey] = value; + return acc; +}, { + ...(data.default && { default: data.default }), +}); diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigFilepath.js b/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigFilepath.js new file mode 100644 index 00000000..ca07c2dd --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigFilepath.js @@ -0,0 +1,4 @@ +import { join } from "path"; +import { getHomeDir } from "./getHomeDir"; +export const ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +export const getConfigFilepath = () => process.env[ENV_CONFIG_PATH] || join(getHomeDir(), ".aws", "config"); diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-es/getCredentialsFilepath.js b/node_modules/@smithy/shared-ini-file-loader/dist-es/getCredentialsFilepath.js new file mode 100644 index 00000000..393c0ae5 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-es/getCredentialsFilepath.js @@ -0,0 +1,4 @@ +import { join } from "path"; +import { getHomeDir } from "./getHomeDir"; +export const ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +export const getCredentialsFilepath = () => process.env[ENV_CREDENTIALS_PATH] || join(getHomeDir(), ".aws", "credentials"); diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-es/getHomeDir.js b/node_modules/@smithy/shared-ini-file-loader/dist-es/getHomeDir.js new file mode 100644 index 00000000..58772af3 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-es/getHomeDir.js @@ -0,0 +1,22 @@ +import { homedir } from "os"; +import { sep } from "path"; +const homeDirCache = {}; +const getHomeDirCacheKey = () => { + if (process && process.geteuid) { + return `${process.geteuid()}`; + } + return "DEFAULT"; +}; +export const getHomeDir = () => { + const { HOME, USERPROFILE, HOMEPATH, HOMEDRIVE = `C:${sep}` } = process.env; + if (HOME) + return HOME; + if (USERPROFILE) + return USERPROFILE; + if (HOMEPATH) + return `${HOMEDRIVE}${HOMEPATH}`; + const homeDirCacheKey = getHomeDirCacheKey(); + if (!homeDirCache[homeDirCacheKey]) + homeDirCache[homeDirCacheKey] = homedir(); + return homeDirCache[homeDirCacheKey]; +}; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-es/getProfileName.js b/node_modules/@smithy/shared-ini-file-loader/dist-es/getProfileName.js new file mode 100644 index 00000000..acc29f07 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-es/getProfileName.js @@ -0,0 +1,3 @@ +export const ENV_PROFILE = "AWS_PROFILE"; +export const DEFAULT_PROFILE = "default"; +export const getProfileName = (init) => init.profile || process.env[ENV_PROFILE] || DEFAULT_PROFILE; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFilepath.js b/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFilepath.js new file mode 100644 index 00000000..a44b4ad7 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFilepath.js @@ -0,0 +1,8 @@ +import { createHash } from "crypto"; +import { join } from "path"; +import { getHomeDir } from "./getHomeDir"; +export const getSSOTokenFilepath = (id) => { + const hasher = createHash("sha1"); + const cacheName = hasher.update(id).digest("hex"); + return join(getHomeDir(), ".aws", "sso", "cache", `${cacheName}.json`); +}; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFromFile.js b/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFromFile.js new file mode 100644 index 00000000..42659dbd --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFromFile.js @@ -0,0 +1,8 @@ +import { promises as fsPromises } from "fs"; +import { getSSOTokenFilepath } from "./getSSOTokenFilepath"; +const { readFile } = fsPromises; +export const getSSOTokenFromFile = async (id) => { + const ssoTokenFilepath = getSSOTokenFilepath(id); + const ssoTokenText = await readFile(ssoTokenFilepath, "utf8"); + return JSON.parse(ssoTokenText); +}; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-es/getSsoSessionData.js b/node_modules/@smithy/shared-ini-file-loader/dist-es/getSsoSessionData.js new file mode 100644 index 00000000..f2df194c --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-es/getSsoSessionData.js @@ -0,0 +1,5 @@ +import { IniSectionType } from "@smithy/types"; +import { CONFIG_PREFIX_SEPARATOR } from "./loadSharedConfigFiles"; +export const getSsoSessionData = (data) => Object.entries(data) + .filter(([key]) => key.startsWith(IniSectionType.SSO_SESSION + CONFIG_PREFIX_SEPARATOR)) + .reduce((acc, [key, value]) => ({ ...acc, [key.substring(key.indexOf(CONFIG_PREFIX_SEPARATOR) + 1)]: value }), {}); diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-es/index.js b/node_modules/@smithy/shared-ini-file-loader/dist-es/index.js new file mode 100644 index 00000000..3e8b2c74 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-es/index.js @@ -0,0 +1,8 @@ +export * from "./getHomeDir"; +export * from "./getProfileName"; +export * from "./getSSOTokenFilepath"; +export * from "./getSSOTokenFromFile"; +export * from "./loadSharedConfigFiles"; +export * from "./loadSsoSessionData"; +export * from "./parseKnownFiles"; +export * from "./types"; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSharedConfigFiles.js b/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSharedConfigFiles.js new file mode 100644 index 00000000..77ee32cc --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSharedConfigFiles.js @@ -0,0 +1,39 @@ +import { join } from "path"; +import { getConfigData } from "./getConfigData"; +import { getConfigFilepath } from "./getConfigFilepath"; +import { getCredentialsFilepath } from "./getCredentialsFilepath"; +import { getHomeDir } from "./getHomeDir"; +import { parseIni } from "./parseIni"; +import { slurpFile } from "./slurpFile"; +const swallowError = () => ({}); +export const CONFIG_PREFIX_SEPARATOR = "."; +export const loadSharedConfigFiles = async (init = {}) => { + const { filepath = getCredentialsFilepath(), configFilepath = getConfigFilepath() } = init; + const homeDir = getHomeDir(); + const relativeHomeDirPrefix = "~/"; + let resolvedFilepath = filepath; + if (filepath.startsWith(relativeHomeDirPrefix)) { + resolvedFilepath = join(homeDir, filepath.slice(2)); + } + let resolvedConfigFilepath = configFilepath; + if (configFilepath.startsWith(relativeHomeDirPrefix)) { + resolvedConfigFilepath = join(homeDir, configFilepath.slice(2)); + } + const parsedFiles = await Promise.all([ + slurpFile(resolvedConfigFilepath, { + ignoreCache: init.ignoreCache, + }) + .then(parseIni) + .then(getConfigData) + .catch(swallowError), + slurpFile(resolvedFilepath, { + ignoreCache: init.ignoreCache, + }) + .then(parseIni) + .catch(swallowError), + ]); + return { + configFile: parsedFiles[0], + credentialsFile: parsedFiles[1], + }; +}; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSsoSessionData.js b/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSsoSessionData.js new file mode 100644 index 00000000..3bd730b1 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSsoSessionData.js @@ -0,0 +1,9 @@ +import { getConfigFilepath } from "./getConfigFilepath"; +import { getSsoSessionData } from "./getSsoSessionData"; +import { parseIni } from "./parseIni"; +import { slurpFile } from "./slurpFile"; +const swallowError = () => ({}); +export const loadSsoSessionData = async (init = {}) => slurpFile(init.configFilepath ?? getConfigFilepath()) + .then(parseIni) + .then(getSsoSessionData) + .catch(swallowError); diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-es/mergeConfigFiles.js b/node_modules/@smithy/shared-ini-file-loader/dist-es/mergeConfigFiles.js new file mode 100644 index 00000000..58576f76 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-es/mergeConfigFiles.js @@ -0,0 +1,14 @@ +export const mergeConfigFiles = (...files) => { + const merged = {}; + for (const file of files) { + for (const [key, values] of Object.entries(file)) { + if (merged[key] !== undefined) { + Object.assign(merged[key], values); + } + else { + merged[key] = values; + } + } + } + return merged; +}; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-es/parseIni.js b/node_modules/@smithy/shared-ini-file-loader/dist-es/parseIni.js new file mode 100644 index 00000000..7af4a6a8 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-es/parseIni.js @@ -0,0 +1,52 @@ +import { IniSectionType } from "@smithy/types"; +import { CONFIG_PREFIX_SEPARATOR } from "./loadSharedConfigFiles"; +const prefixKeyRegex = /^([\w-]+)\s(["'])?([\w-@\+\.%:/]+)\2$/; +const profileNameBlockList = ["__proto__", "profile __proto__"]; +export const parseIni = (iniData) => { + const map = {}; + let currentSection; + let currentSubSection; + for (const iniLine of iniData.split(/\r?\n/)) { + const trimmedLine = iniLine.split(/(^|\s)[;#]/)[0].trim(); + const isSection = trimmedLine[0] === "[" && trimmedLine[trimmedLine.length - 1] === "]"; + if (isSection) { + currentSection = undefined; + currentSubSection = undefined; + const sectionName = trimmedLine.substring(1, trimmedLine.length - 1); + const matches = prefixKeyRegex.exec(sectionName); + if (matches) { + const [, prefix, , name] = matches; + if (Object.values(IniSectionType).includes(prefix)) { + currentSection = [prefix, name].join(CONFIG_PREFIX_SEPARATOR); + } + } + else { + currentSection = sectionName; + } + if (profileNameBlockList.includes(sectionName)) { + throw new Error(`Found invalid profile name "${sectionName}"`); + } + } + else if (currentSection) { + const indexOfEqualsSign = trimmedLine.indexOf("="); + if (![0, -1].includes(indexOfEqualsSign)) { + const [name, value] = [ + trimmedLine.substring(0, indexOfEqualsSign).trim(), + trimmedLine.substring(indexOfEqualsSign + 1).trim(), + ]; + if (value === "") { + currentSubSection = name; + } + else { + if (currentSubSection && iniLine.trimStart() === iniLine) { + currentSubSection = undefined; + } + map[currentSection] = map[currentSection] || {}; + const key = currentSubSection ? [currentSubSection, name].join(CONFIG_PREFIX_SEPARATOR) : name; + map[currentSection][key] = value; + } + } + } + } + return map; +}; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-es/parseKnownFiles.js b/node_modules/@smithy/shared-ini-file-loader/dist-es/parseKnownFiles.js new file mode 100644 index 00000000..4920e280 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-es/parseKnownFiles.js @@ -0,0 +1,6 @@ +import { loadSharedConfigFiles } from "./loadSharedConfigFiles"; +import { mergeConfigFiles } from "./mergeConfigFiles"; +export const parseKnownFiles = async (init) => { + const parsedFiles = await loadSharedConfigFiles(init); + return mergeConfigFiles(parsedFiles.configFile, parsedFiles.credentialsFile); +}; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-es/slurpFile.js b/node_modules/@smithy/shared-ini-file-loader/dist-es/slurpFile.js new file mode 100644 index 00000000..7b360ccf --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-es/slurpFile.js @@ -0,0 +1,9 @@ +import { promises as fsPromises } from "fs"; +const { readFile } = fsPromises; +const filePromisesHash = {}; +export const slurpFile = (path, options) => { + if (!filePromisesHash[path] || options?.ignoreCache) { + filePromisesHash[path] = readFile(path, "utf8"); + } + return filePromisesHash[path]; +}; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-es/types.js b/node_modules/@smithy/shared-ini-file-loader/dist-es/types.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigData.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigData.d.ts new file mode 100644 index 00000000..42598312 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigData.d.ts @@ -0,0 +1,8 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Returns the config data from parsed ini data. + * * Returns data for `default` + * * Returns profile name without prefix. + * * Returns non-profiles as is. + */ +export declare const getConfigData: (data: ParsedIniData) => ParsedIniData; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigFilepath.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigFilepath.d.ts new file mode 100644 index 00000000..1d123bed --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigFilepath.d.ts @@ -0,0 +1,2 @@ +export declare const ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +export declare const getConfigFilepath: () => string; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/getCredentialsFilepath.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/getCredentialsFilepath.d.ts new file mode 100644 index 00000000..26fda4a6 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/getCredentialsFilepath.d.ts @@ -0,0 +1,2 @@ +export declare const ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +export declare const getCredentialsFilepath: () => string; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/getHomeDir.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/getHomeDir.d.ts new file mode 100644 index 00000000..5d15bf1a --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/getHomeDir.d.ts @@ -0,0 +1,6 @@ +/** + * Get the HOME directory for the current runtime. + * + * @internal + */ +export declare const getHomeDir: () => string; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/getProfileName.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/getProfileName.d.ts new file mode 100644 index 00000000..5a608b28 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/getProfileName.d.ts @@ -0,0 +1,15 @@ +/** + * @internal + */ +export declare const ENV_PROFILE = "AWS_PROFILE"; +/** + * @internal + */ +export declare const DEFAULT_PROFILE = "default"; +/** + * Returns profile with priority order code - ENV - default. + * @internal + */ +export declare const getProfileName: (init: { + profile?: string; +}) => string; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFilepath.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFilepath.d.ts new file mode 100644 index 00000000..44a40309 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFilepath.d.ts @@ -0,0 +1,5 @@ +/** + * Returns the filepath of the file where SSO token is stored. + * @internal + */ +export declare const getSSOTokenFilepath: (id: string) => string; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFromFile.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFromFile.d.ts new file mode 100644 index 00000000..18199ac1 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFromFile.d.ts @@ -0,0 +1,46 @@ +/** + * Cached SSO token retrieved from SSO login flow. + * @public + */ +export interface SSOToken { + /** + * A base64 encoded string returned by the sso-oidc service. + */ + accessToken: string; + /** + * The expiration time of the accessToken as an RFC 3339 formatted timestamp. + */ + expiresAt: string; + /** + * The token used to obtain an access token in the event that the accessToken is invalid or expired. + */ + refreshToken?: string; + /** + * The unique identifier string for each client. The client ID generated when performing the registration + * portion of the OIDC authorization flow. This is used to refresh the accessToken. + */ + clientId?: string; + /** + * A secret string generated when performing the registration portion of the OIDC authorization flow. + * This is used to refresh the accessToken. + */ + clientSecret?: string; + /** + * The expiration time of the client registration (clientId and clientSecret) as an RFC 3339 formatted timestamp. + */ + registrationExpiresAt?: string; + /** + * The configured sso_region for the profile that credentials are being resolved for. + */ + region?: string; + /** + * The configured sso_start_url for the profile that credentials are being resolved for. + */ + startUrl?: string; +} +/** + * @internal + * @param id - can be either a start URL or the SSO session name. + * Returns the SSO token from the file system. + */ +export declare const getSSOTokenFromFile: (id: string) => Promise; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/getSsoSessionData.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/getSsoSessionData.d.ts new file mode 100644 index 00000000..9be020f5 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/getSsoSessionData.d.ts @@ -0,0 +1,6 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Returns the sso-session data from parsed ini data by reading + * ssoSessionName after sso-session prefix including/excluding quotes + */ +export declare const getSsoSessionData: (data: ParsedIniData) => ParsedIniData; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/index.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/index.d.ts new file mode 100644 index 00000000..3e8b2c74 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/index.d.ts @@ -0,0 +1,8 @@ +export * from "./getHomeDir"; +export * from "./getProfileName"; +export * from "./getSSOTokenFilepath"; +export * from "./getSSOTokenFromFile"; +export * from "./loadSharedConfigFiles"; +export * from "./loadSsoSessionData"; +export * from "./parseKnownFiles"; +export * from "./types"; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSharedConfigFiles.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSharedConfigFiles.d.ts new file mode 100644 index 00000000..3897ac33 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSharedConfigFiles.d.ts @@ -0,0 +1,36 @@ +import { Logger, SharedConfigFiles } from "@smithy/types"; +/** + * @public + */ +export interface SharedConfigInit { + /** + * The path at which to locate the ini credentials file. Defaults to the + * value of the `AWS_SHARED_CREDENTIALS_FILE` environment variable (if + * defined) or `~/.aws/credentials` otherwise. + */ + filepath?: string; + /** + * The path at which to locate the ini config file. Defaults to the value of + * the `AWS_CONFIG_FILE` environment variable (if defined) or + * `~/.aws/config` otherwise. + */ + configFilepath?: string; + /** + * Configuration files are normally cached after the first time they are loaded. When this + * property is set, the provider will always reload any configuration files loaded before. + */ + ignoreCache?: boolean; + /** + * For credential resolution trace logging. + */ + logger?: Logger; +} +/** + * @internal + */ +export declare const CONFIG_PREFIX_SEPARATOR = "."; +/** + * Loads the config and credentials files. + * @internal + */ +export declare const loadSharedConfigFiles: (init?: SharedConfigInit) => Promise; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSsoSessionData.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSsoSessionData.d.ts new file mode 100644 index 00000000..ed6c3673 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSsoSessionData.d.ts @@ -0,0 +1,17 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Subset of {@link SharedConfigInit}. + * @internal + */ +export interface SsoSessionInit { + /** + * The path at which to locate the ini config file. Defaults to the value of + * the `AWS_CONFIG_FILE` environment variable (if defined) or + * `~/.aws/config` otherwise. + */ + configFilepath?: string; +} +/** + * @internal + */ +export declare const loadSsoSessionData: (init?: SsoSessionInit) => Promise; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/mergeConfigFiles.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/mergeConfigFiles.d.ts new file mode 100644 index 00000000..46b8965d --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/mergeConfigFiles.d.ts @@ -0,0 +1,7 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Merge multiple profile config files such that settings each file are kept together + * + * @internal + */ +export declare const mergeConfigFiles: (...files: ParsedIniData[]) => ParsedIniData; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/parseIni.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/parseIni.d.ts new file mode 100644 index 00000000..0ae58510 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/parseIni.d.ts @@ -0,0 +1,2 @@ +import { ParsedIniData } from "@smithy/types"; +export declare const parseIni: (iniData: string) => ParsedIniData; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/parseKnownFiles.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/parseKnownFiles.d.ts new file mode 100644 index 00000000..d5fcafaa --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/parseKnownFiles.d.ts @@ -0,0 +1,18 @@ +import { ParsedIniData } from "@smithy/types"; +import { SharedConfigInit } from "./loadSharedConfigFiles"; +/** + * @public + */ +export interface SourceProfileInit extends SharedConfigInit { + /** + * The configuration profile to use. + */ + profile?: string; +} +/** + * Load profiles from credentials and config INI files and normalize them into a + * single profile list. + * + * @internal + */ +export declare const parseKnownFiles: (init: SourceProfileInit) => Promise; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/slurpFile.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/slurpFile.d.ts new file mode 100644 index 00000000..a3bc84c1 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/slurpFile.d.ts @@ -0,0 +1,5 @@ +interface SlurpFileOptions { + ignoreCache?: boolean; +} +export declare const slurpFile: (path: string, options?: SlurpFileOptions) => Promise; +export {}; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigData.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigData.d.ts new file mode 100644 index 00000000..c6b7588a --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigData.d.ts @@ -0,0 +1,8 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Returns the config data from parsed ini data. + * * Returns data for `default` + * * Returns profile name without prefix. + * * Returns non-profiles as is. + */ +export declare const getConfigData: (data: ParsedIniData) => ParsedIniData; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigFilepath.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigFilepath.d.ts new file mode 100644 index 00000000..dc3699b0 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigFilepath.d.ts @@ -0,0 +1,2 @@ +export declare const ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +export declare const getConfigFilepath: () => string; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getCredentialsFilepath.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getCredentialsFilepath.d.ts new file mode 100644 index 00000000..f2c95b4c --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getCredentialsFilepath.d.ts @@ -0,0 +1,2 @@ +export declare const ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +export declare const getCredentialsFilepath: () => string; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getHomeDir.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getHomeDir.d.ts new file mode 100644 index 00000000..4c1bd7af --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getHomeDir.d.ts @@ -0,0 +1,6 @@ +/** + * Get the HOME directory for the current runtime. + * + * @internal + */ +export declare const getHomeDir: () => string; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getProfileName.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getProfileName.d.ts new file mode 100644 index 00000000..91cb16b1 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getProfileName.d.ts @@ -0,0 +1,15 @@ +/** + * @internal + */ +export declare const ENV_PROFILE = "AWS_PROFILE"; +/** + * @internal + */ +export declare const DEFAULT_PROFILE = "default"; +/** + * Returns profile with priority order code - ENV - default. + * @internal + */ +export declare const getProfileName: (init: { + profile?: string; +}) => string; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFilepath.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFilepath.d.ts new file mode 100644 index 00000000..e549daaa --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFilepath.d.ts @@ -0,0 +1,5 @@ +/** + * Returns the filepath of the file where SSO token is stored. + * @internal + */ +export declare const getSSOTokenFilepath: (id: string) => string; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFromFile.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFromFile.d.ts new file mode 100644 index 00000000..140979c8 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFromFile.d.ts @@ -0,0 +1,46 @@ +/** + * Cached SSO token retrieved from SSO login flow. + * @public + */ +export interface SSOToken { + /** + * A base64 encoded string returned by the sso-oidc service. + */ + accessToken: string; + /** + * The expiration time of the accessToken as an RFC 3339 formatted timestamp. + */ + expiresAt: string; + /** + * The token used to obtain an access token in the event that the accessToken is invalid or expired. + */ + refreshToken?: string; + /** + * The unique identifier string for each client. The client ID generated when performing the registration + * portion of the OIDC authorization flow. This is used to refresh the accessToken. + */ + clientId?: string; + /** + * A secret string generated when performing the registration portion of the OIDC authorization flow. + * This is used to refresh the accessToken. + */ + clientSecret?: string; + /** + * The expiration time of the client registration (clientId and clientSecret) as an RFC 3339 formatted timestamp. + */ + registrationExpiresAt?: string; + /** + * The configured sso_region for the profile that credentials are being resolved for. + */ + region?: string; + /** + * The configured sso_start_url for the profile that credentials are being resolved for. + */ + startUrl?: string; +} +/** + * @internal + * @param id - can be either a start URL or the SSO session name. + * Returns the SSO token from the file system. + */ +export declare const getSSOTokenFromFile: (id: string) => Promise; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSsoSessionData.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSsoSessionData.d.ts new file mode 100644 index 00000000..04a1a997 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSsoSessionData.d.ts @@ -0,0 +1,6 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Returns the sso-session data from parsed ini data by reading + * ssoSessionName after sso-session prefix including/excluding quotes + */ +export declare const getSsoSessionData: (data: ParsedIniData) => ParsedIniData; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..12ed3bb7 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +export * from "./getHomeDir"; +export * from "./getProfileName"; +export * from "./getSSOTokenFilepath"; +export * from "./getSSOTokenFromFile"; +export * from "./loadSharedConfigFiles"; +export * from "./loadSsoSessionData"; +export * from "./parseKnownFiles"; +export * from "./types"; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSharedConfigFiles.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSharedConfigFiles.d.ts new file mode 100644 index 00000000..969254eb --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSharedConfigFiles.d.ts @@ -0,0 +1,36 @@ +import { Logger, SharedConfigFiles } from "@smithy/types"; +/** + * @public + */ +export interface SharedConfigInit { + /** + * The path at which to locate the ini credentials file. Defaults to the + * value of the `AWS_SHARED_CREDENTIALS_FILE` environment variable (if + * defined) or `~/.aws/credentials` otherwise. + */ + filepath?: string; + /** + * The path at which to locate the ini config file. Defaults to the value of + * the `AWS_CONFIG_FILE` environment variable (if defined) or + * `~/.aws/config` otherwise. + */ + configFilepath?: string; + /** + * Configuration files are normally cached after the first time they are loaded. When this + * property is set, the provider will always reload any configuration files loaded before. + */ + ignoreCache?: boolean; + /** + * For credential resolution trace logging. + */ + logger?: Logger; +} +/** + * @internal + */ +export declare const CONFIG_PREFIX_SEPARATOR = "."; +/** + * Loads the config and credentials files. + * @internal + */ +export declare const loadSharedConfigFiles: (init?: SharedConfigInit) => Promise; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSsoSessionData.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSsoSessionData.d.ts new file mode 100644 index 00000000..08e265e4 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSsoSessionData.d.ts @@ -0,0 +1,17 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Subset of {@link SharedConfigInit}. + * @internal + */ +export interface SsoSessionInit { + /** + * The path at which to locate the ini config file. Defaults to the value of + * the `AWS_CONFIG_FILE` environment variable (if defined) or + * `~/.aws/config` otherwise. + */ + configFilepath?: string; +} +/** + * @internal + */ +export declare const loadSsoSessionData: (init?: SsoSessionInit) => Promise; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/mergeConfigFiles.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/mergeConfigFiles.d.ts new file mode 100644 index 00000000..f94e7253 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/mergeConfigFiles.d.ts @@ -0,0 +1,7 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Merge multiple profile config files such that settings each file are kept together + * + * @internal + */ +export declare const mergeConfigFiles: (...files: ParsedIniData[]) => ParsedIniData; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseIni.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseIni.d.ts new file mode 100644 index 00000000..4e58d0e5 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseIni.d.ts @@ -0,0 +1,2 @@ +import { ParsedIniData } from "@smithy/types"; +export declare const parseIni: (iniData: string) => ParsedIniData; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseKnownFiles.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseKnownFiles.d.ts new file mode 100644 index 00000000..46ba24bc --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseKnownFiles.d.ts @@ -0,0 +1,18 @@ +import { ParsedIniData } from "@smithy/types"; +import { SharedConfigInit } from "./loadSharedConfigFiles"; +/** + * @public + */ +export interface SourceProfileInit extends SharedConfigInit { + /** + * The configuration profile to use. + */ + profile?: string; +} +/** + * Load profiles from credentials and config INI files and normalize them into a + * single profile list. + * + * @internal + */ +export declare const parseKnownFiles: (init: SourceProfileInit) => Promise; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/slurpFile.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/slurpFile.d.ts new file mode 100644 index 00000000..33e7a0c6 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/slurpFile.d.ts @@ -0,0 +1,5 @@ +interface SlurpFileOptions { + ignoreCache?: boolean; +} +export declare const slurpFile: (path: string, options?: SlurpFileOptions) => Promise; +export {}; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/types.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/types.d.ts new file mode 100644 index 00000000..6d6acbd5 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/types.d.ts @@ -0,0 +1,16 @@ +import { ParsedIniData as __ParsedIniData, Profile as __Profile, SharedConfigFiles as __SharedConfigFiles } from "@smithy/types"; +/** + * @internal + * @deprecated Use Profile from "\@smithy/types" instead + */ +export type Profile = __Profile; +/** + * @internal + * @deprecated Use ParsedIniData from "\@smithy/types" instead + */ +export type ParsedIniData = __ParsedIniData; +/** + * @internal + * @deprecated Use SharedConfigFiles from "\@smithy/types" instead + */ +export type SharedConfigFiles = __SharedConfigFiles; diff --git a/node_modules/@smithy/shared-ini-file-loader/dist-types/types.d.ts b/node_modules/@smithy/shared-ini-file-loader/dist-types/types.d.ts new file mode 100644 index 00000000..c7c8d925 --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/dist-types/types.d.ts @@ -0,0 +1,16 @@ +import { ParsedIniData as __ParsedIniData, Profile as __Profile, SharedConfigFiles as __SharedConfigFiles } from "@smithy/types"; +/** + * @internal + * @deprecated Use Profile from "\@smithy/types" instead + */ +export type Profile = __Profile; +/** + * @internal + * @deprecated Use ParsedIniData from "\@smithy/types" instead + */ +export type ParsedIniData = __ParsedIniData; +/** + * @internal + * @deprecated Use SharedConfigFiles from "\@smithy/types" instead + */ +export type SharedConfigFiles = __SharedConfigFiles; diff --git a/node_modules/@smithy/shared-ini-file-loader/package.json b/node_modules/@smithy/shared-ini-file-loader/package.json new file mode 100644 index 00000000..e6dfbfef --- /dev/null +++ b/node_modules/@smithy/shared-ini-file-loader/package.json @@ -0,0 +1,72 @@ +{ + "name": "@smithy/shared-ini-file-loader", + "version": "4.0.2", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline shared-ini-file-loader", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "browser": { + "./dist-es/getSSOTokenFromFile": false, + "./dist-es/slurpFile": false + }, + "react-native": { + "./dist-cjs/getSSOTokenFromFile": false, + "./dist-cjs/slurpFile": false, + "./dist-es/getSSOTokenFromFile": false, + "./dist-es/slurpFile": false + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/shared-ini-file-loader", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/shared-ini-file-loader" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/signature-v4/LICENSE b/node_modules/@smithy/signature-v4/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/signature-v4/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/signature-v4/README.md b/node_modules/@smithy/signature-v4/README.md new file mode 100644 index 00000000..3bc9a17a --- /dev/null +++ b/node_modules/@smithy/signature-v4/README.md @@ -0,0 +1,11 @@ +# @smithy/signature-v4 + +[![NPM version](https://img.shields.io/npm/v/@smithy/signature-v4/latest.svg)](https://www.npmjs.com/package/@smithy/signature-v4) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/signature-v4.svg)](https://www.npmjs.com/package/@smithy/signature-v4) + +This package contains an implementation of the [AWS Signature Version 4](https://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-authenticating-requests.html) +authentication scheme. + +It is internal to Smithy-TypeScript generated clients, and not generally intended for standalone usage outside this context. + +For custom usage, inspect the interface of the SignatureV4 class. diff --git a/node_modules/@smithy/signature-v4/dist-cjs/HeaderFormatter.js b/node_modules/@smithy/signature-v4/dist-cjs/HeaderFormatter.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-cjs/HeaderFormatter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4.js b/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4Base.js b/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4Base.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4Base.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/signature-v4/dist-cjs/constants.js b/node_modules/@smithy/signature-v4/dist-cjs/constants.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/signature-v4/dist-cjs/credentialDerivation.js b/node_modules/@smithy/signature-v4/dist-cjs/credentialDerivation.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-cjs/credentialDerivation.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalHeaders.js b/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalHeaders.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalHeaders.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalQuery.js b/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalQuery.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalQuery.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/signature-v4/dist-cjs/getPayloadHash.js b/node_modules/@smithy/signature-v4/dist-cjs/getPayloadHash.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-cjs/getPayloadHash.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/signature-v4/dist-cjs/headerUtil.js b/node_modules/@smithy/signature-v4/dist-cjs/headerUtil.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-cjs/headerUtil.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/signature-v4/dist-cjs/index.js b/node_modules/@smithy/signature-v4/dist-cjs/index.js new file mode 100644 index 00000000..41f93bb8 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-cjs/index.js @@ -0,0 +1,682 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + ALGORITHM_IDENTIFIER: () => ALGORITHM_IDENTIFIER, + ALGORITHM_IDENTIFIER_V4A: () => ALGORITHM_IDENTIFIER_V4A, + ALGORITHM_QUERY_PARAM: () => ALGORITHM_QUERY_PARAM, + ALWAYS_UNSIGNABLE_HEADERS: () => ALWAYS_UNSIGNABLE_HEADERS, + AMZ_DATE_HEADER: () => AMZ_DATE_HEADER, + AMZ_DATE_QUERY_PARAM: () => AMZ_DATE_QUERY_PARAM, + AUTH_HEADER: () => AUTH_HEADER, + CREDENTIAL_QUERY_PARAM: () => CREDENTIAL_QUERY_PARAM, + DATE_HEADER: () => DATE_HEADER, + EVENT_ALGORITHM_IDENTIFIER: () => EVENT_ALGORITHM_IDENTIFIER, + EXPIRES_QUERY_PARAM: () => EXPIRES_QUERY_PARAM, + GENERATED_HEADERS: () => GENERATED_HEADERS, + HOST_HEADER: () => HOST_HEADER, + KEY_TYPE_IDENTIFIER: () => KEY_TYPE_IDENTIFIER, + MAX_CACHE_SIZE: () => MAX_CACHE_SIZE, + MAX_PRESIGNED_TTL: () => MAX_PRESIGNED_TTL, + PROXY_HEADER_PATTERN: () => PROXY_HEADER_PATTERN, + REGION_SET_PARAM: () => REGION_SET_PARAM, + SEC_HEADER_PATTERN: () => SEC_HEADER_PATTERN, + SHA256_HEADER: () => SHA256_HEADER, + SIGNATURE_HEADER: () => SIGNATURE_HEADER, + SIGNATURE_QUERY_PARAM: () => SIGNATURE_QUERY_PARAM, + SIGNED_HEADERS_QUERY_PARAM: () => SIGNED_HEADERS_QUERY_PARAM, + SignatureV4: () => SignatureV4, + SignatureV4Base: () => SignatureV4Base, + TOKEN_HEADER: () => TOKEN_HEADER, + TOKEN_QUERY_PARAM: () => TOKEN_QUERY_PARAM, + UNSIGNABLE_PATTERNS: () => UNSIGNABLE_PATTERNS, + UNSIGNED_PAYLOAD: () => UNSIGNED_PAYLOAD, + clearCredentialCache: () => clearCredentialCache, + createScope: () => createScope, + getCanonicalHeaders: () => getCanonicalHeaders, + getCanonicalQuery: () => getCanonicalQuery, + getPayloadHash: () => getPayloadHash, + getSigningKey: () => getSigningKey, + hasHeader: () => hasHeader, + moveHeadersToQuery: () => moveHeadersToQuery, + prepareRequest: () => prepareRequest, + signatureV4aContainer: () => signatureV4aContainer +}); +module.exports = __toCommonJS(src_exports); + +// src/SignatureV4.ts + +var import_util_utf85 = require("@smithy/util-utf8"); + +// src/constants.ts +var ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +var CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +var AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +var SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +var EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +var SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +var TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +var REGION_SET_PARAM = "X-Amz-Region-Set"; +var AUTH_HEADER = "authorization"; +var AMZ_DATE_HEADER = AMZ_DATE_QUERY_PARAM.toLowerCase(); +var DATE_HEADER = "date"; +var GENERATED_HEADERS = [AUTH_HEADER, AMZ_DATE_HEADER, DATE_HEADER]; +var SIGNATURE_HEADER = SIGNATURE_QUERY_PARAM.toLowerCase(); +var SHA256_HEADER = "x-amz-content-sha256"; +var TOKEN_HEADER = TOKEN_QUERY_PARAM.toLowerCase(); +var HOST_HEADER = "host"; +var ALWAYS_UNSIGNABLE_HEADERS = { + authorization: true, + "cache-control": true, + connection: true, + expect: true, + from: true, + "keep-alive": true, + "max-forwards": true, + pragma: true, + referer: true, + te: true, + trailer: true, + "transfer-encoding": true, + upgrade: true, + "user-agent": true, + "x-amzn-trace-id": true +}; +var PROXY_HEADER_PATTERN = /^proxy-/; +var SEC_HEADER_PATTERN = /^sec-/; +var UNSIGNABLE_PATTERNS = [/^proxy-/i, /^sec-/i]; +var ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +var ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; +var EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +var UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +var MAX_CACHE_SIZE = 50; +var KEY_TYPE_IDENTIFIER = "aws4_request"; +var MAX_PRESIGNED_TTL = 60 * 60 * 24 * 7; + +// src/credentialDerivation.ts +var import_util_hex_encoding = require("@smithy/util-hex-encoding"); +var import_util_utf8 = require("@smithy/util-utf8"); +var signingKeyCache = {}; +var cacheQueue = []; +var createScope = /* @__PURE__ */ __name((shortDate, region, service) => `${shortDate}/${region}/${service}/${KEY_TYPE_IDENTIFIER}`, "createScope"); +var getSigningKey = /* @__PURE__ */ __name(async (sha256Constructor, credentials, shortDate, region, service) => { + const credsHash = await hmac(sha256Constructor, credentials.secretAccessKey, credentials.accessKeyId); + const cacheKey = `${shortDate}:${region}:${service}:${(0, import_util_hex_encoding.toHex)(credsHash)}:${credentials.sessionToken}`; + if (cacheKey in signingKeyCache) { + return signingKeyCache[cacheKey]; + } + cacheQueue.push(cacheKey); + while (cacheQueue.length > MAX_CACHE_SIZE) { + delete signingKeyCache[cacheQueue.shift()]; + } + let key = `AWS4${credentials.secretAccessKey}`; + for (const signable of [shortDate, region, service, KEY_TYPE_IDENTIFIER]) { + key = await hmac(sha256Constructor, key, signable); + } + return signingKeyCache[cacheKey] = key; +}, "getSigningKey"); +var clearCredentialCache = /* @__PURE__ */ __name(() => { + cacheQueue.length = 0; + Object.keys(signingKeyCache).forEach((cacheKey) => { + delete signingKeyCache[cacheKey]; + }); +}, "clearCredentialCache"); +var hmac = /* @__PURE__ */ __name((ctor, secret, data) => { + const hash = new ctor(secret); + hash.update((0, import_util_utf8.toUint8Array)(data)); + return hash.digest(); +}, "hmac"); + +// src/getCanonicalHeaders.ts +var getCanonicalHeaders = /* @__PURE__ */ __name(({ headers }, unsignableHeaders, signableHeaders) => { + const canonical = {}; + for (const headerName of Object.keys(headers).sort()) { + if (headers[headerName] == void 0) { + continue; + } + const canonicalHeaderName = headerName.toLowerCase(); + if (canonicalHeaderName in ALWAYS_UNSIGNABLE_HEADERS || unsignableHeaders?.has(canonicalHeaderName) || PROXY_HEADER_PATTERN.test(canonicalHeaderName) || SEC_HEADER_PATTERN.test(canonicalHeaderName)) { + if (!signableHeaders || signableHeaders && !signableHeaders.has(canonicalHeaderName)) { + continue; + } + } + canonical[canonicalHeaderName] = headers[headerName].trim().replace(/\s+/g, " "); + } + return canonical; +}, "getCanonicalHeaders"); + +// src/getPayloadHash.ts +var import_is_array_buffer = require("@smithy/is-array-buffer"); + +var import_util_utf82 = require("@smithy/util-utf8"); +var getPayloadHash = /* @__PURE__ */ __name(async ({ headers, body }, hashConstructor) => { + for (const headerName of Object.keys(headers)) { + if (headerName.toLowerCase() === SHA256_HEADER) { + return headers[headerName]; + } + } + if (body == void 0) { + return "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"; + } else if (typeof body === "string" || ArrayBuffer.isView(body) || (0, import_is_array_buffer.isArrayBuffer)(body)) { + const hashCtor = new hashConstructor(); + hashCtor.update((0, import_util_utf82.toUint8Array)(body)); + return (0, import_util_hex_encoding.toHex)(await hashCtor.digest()); + } + return UNSIGNED_PAYLOAD; +}, "getPayloadHash"); + +// src/HeaderFormatter.ts + +var import_util_utf83 = require("@smithy/util-utf8"); +var HeaderFormatter = class { + static { + __name(this, "HeaderFormatter"); + } + format(headers) { + const chunks = []; + for (const headerName of Object.keys(headers)) { + const bytes = (0, import_util_utf83.fromUtf8)(headerName); + chunks.push(Uint8Array.from([bytes.byteLength]), bytes, this.formatHeaderValue(headers[headerName])); + } + const out = new Uint8Array(chunks.reduce((carry, bytes) => carry + bytes.byteLength, 0)); + let position = 0; + for (const chunk of chunks) { + out.set(chunk, position); + position += chunk.byteLength; + } + return out; + } + formatHeaderValue(header) { + switch (header.type) { + case "boolean": + return Uint8Array.from([header.value ? 0 /* boolTrue */ : 1 /* boolFalse */]); + case "byte": + return Uint8Array.from([2 /* byte */, header.value]); + case "short": + const shortView = new DataView(new ArrayBuffer(3)); + shortView.setUint8(0, 3 /* short */); + shortView.setInt16(1, header.value, false); + return new Uint8Array(shortView.buffer); + case "integer": + const intView = new DataView(new ArrayBuffer(5)); + intView.setUint8(0, 4 /* integer */); + intView.setInt32(1, header.value, false); + return new Uint8Array(intView.buffer); + case "long": + const longBytes = new Uint8Array(9); + longBytes[0] = 5 /* long */; + longBytes.set(header.value.bytes, 1); + return longBytes; + case "binary": + const binView = new DataView(new ArrayBuffer(3 + header.value.byteLength)); + binView.setUint8(0, 6 /* byteArray */); + binView.setUint16(1, header.value.byteLength, false); + const binBytes = new Uint8Array(binView.buffer); + binBytes.set(header.value, 3); + return binBytes; + case "string": + const utf8Bytes = (0, import_util_utf83.fromUtf8)(header.value); + const strView = new DataView(new ArrayBuffer(3 + utf8Bytes.byteLength)); + strView.setUint8(0, 7 /* string */); + strView.setUint16(1, utf8Bytes.byteLength, false); + const strBytes = new Uint8Array(strView.buffer); + strBytes.set(utf8Bytes, 3); + return strBytes; + case "timestamp": + const tsBytes = new Uint8Array(9); + tsBytes[0] = 8 /* timestamp */; + tsBytes.set(Int64.fromNumber(header.value.valueOf()).bytes, 1); + return tsBytes; + case "uuid": + if (!UUID_PATTERN.test(header.value)) { + throw new Error(`Invalid UUID received: ${header.value}`); + } + const uuidBytes = new Uint8Array(17); + uuidBytes[0] = 9 /* uuid */; + uuidBytes.set((0, import_util_hex_encoding.fromHex)(header.value.replace(/\-/g, "")), 1); + return uuidBytes; + } + } +}; +var UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/; +var Int64 = class _Int64 { + constructor(bytes) { + this.bytes = bytes; + if (bytes.byteLength !== 8) { + throw new Error("Int64 buffers must be exactly 8 bytes"); + } + } + static { + __name(this, "Int64"); + } + static fromNumber(number) { + if (number > 9223372036854776e3 || number < -9223372036854776e3) { + throw new Error(`${number} is too large (or, if negative, too small) to represent as an Int64`); + } + const bytes = new Uint8Array(8); + for (let i = 7, remaining = Math.abs(Math.round(number)); i > -1 && remaining > 0; i--, remaining /= 256) { + bytes[i] = remaining; + } + if (number < 0) { + negate(bytes); + } + return new _Int64(bytes); + } + /** + * Called implicitly by infix arithmetic operators. + */ + valueOf() { + const bytes = this.bytes.slice(0); + const negative = bytes[0] & 128; + if (negative) { + negate(bytes); + } + return parseInt((0, import_util_hex_encoding.toHex)(bytes), 16) * (negative ? -1 : 1); + } + toString() { + return String(this.valueOf()); + } +}; +function negate(bytes) { + for (let i = 0; i < 8; i++) { + bytes[i] ^= 255; + } + for (let i = 7; i > -1; i--) { + bytes[i]++; + if (bytes[i] !== 0) + break; + } +} +__name(negate, "negate"); + +// src/headerUtil.ts +var hasHeader = /* @__PURE__ */ __name((soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return true; + } + } + return false; +}, "hasHeader"); + +// src/moveHeadersToQuery.ts +var import_protocol_http = require("@smithy/protocol-http"); +var moveHeadersToQuery = /* @__PURE__ */ __name((request, options = {}) => { + const { headers, query = {} } = import_protocol_http.HttpRequest.clone(request); + for (const name of Object.keys(headers)) { + const lname = name.toLowerCase(); + if (lname.slice(0, 6) === "x-amz-" && !options.unhoistableHeaders?.has(lname) || options.hoistableHeaders?.has(lname)) { + query[name] = headers[name]; + delete headers[name]; + } + } + return { + ...request, + headers, + query + }; +}, "moveHeadersToQuery"); + +// src/prepareRequest.ts + +var prepareRequest = /* @__PURE__ */ __name((request) => { + request = import_protocol_http.HttpRequest.clone(request); + for (const headerName of Object.keys(request.headers)) { + if (GENERATED_HEADERS.indexOf(headerName.toLowerCase()) > -1) { + delete request.headers[headerName]; + } + } + return request; +}, "prepareRequest"); + +// src/SignatureV4Base.ts + +var import_util_middleware = require("@smithy/util-middleware"); + +var import_util_utf84 = require("@smithy/util-utf8"); + +// src/getCanonicalQuery.ts +var import_util_uri_escape = require("@smithy/util-uri-escape"); +var getCanonicalQuery = /* @__PURE__ */ __name(({ query = {} }) => { + const keys = []; + const serialized = {}; + for (const key of Object.keys(query)) { + if (key.toLowerCase() === SIGNATURE_HEADER) { + continue; + } + const encodedKey = (0, import_util_uri_escape.escapeUri)(key); + keys.push(encodedKey); + const value = query[key]; + if (typeof value === "string") { + serialized[encodedKey] = `${encodedKey}=${(0, import_util_uri_escape.escapeUri)(value)}`; + } else if (Array.isArray(value)) { + serialized[encodedKey] = value.slice(0).reduce((encoded, value2) => encoded.concat([`${encodedKey}=${(0, import_util_uri_escape.escapeUri)(value2)}`]), []).sort().join("&"); + } + } + return keys.sort().map((key) => serialized[key]).filter((serialized2) => serialized2).join("&"); +}, "getCanonicalQuery"); + +// src/utilDate.ts +var iso8601 = /* @__PURE__ */ __name((time) => toDate(time).toISOString().replace(/\.\d{3}Z$/, "Z"), "iso8601"); +var toDate = /* @__PURE__ */ __name((time) => { + if (typeof time === "number") { + return new Date(time * 1e3); + } + if (typeof time === "string") { + if (Number(time)) { + return new Date(Number(time) * 1e3); + } + return new Date(time); + } + return time; +}, "toDate"); + +// src/SignatureV4Base.ts +var SignatureV4Base = class { + static { + __name(this, "SignatureV4Base"); + } + constructor({ + applyChecksum, + credentials, + region, + service, + sha256, + uriEscapePath = true + }) { + this.service = service; + this.sha256 = sha256; + this.uriEscapePath = uriEscapePath; + this.applyChecksum = typeof applyChecksum === "boolean" ? applyChecksum : true; + this.regionProvider = (0, import_util_middleware.normalizeProvider)(region); + this.credentialProvider = (0, import_util_middleware.normalizeProvider)(credentials); + } + createCanonicalRequest(request, canonicalHeaders, payloadHash) { + const sortedHeaders = Object.keys(canonicalHeaders).sort(); + return `${request.method} +${this.getCanonicalPath(request)} +${getCanonicalQuery(request)} +${sortedHeaders.map((name) => `${name}:${canonicalHeaders[name]}`).join("\n")} + +${sortedHeaders.join(";")} +${payloadHash}`; + } + async createStringToSign(longDate, credentialScope, canonicalRequest, algorithmIdentifier) { + const hash = new this.sha256(); + hash.update((0, import_util_utf84.toUint8Array)(canonicalRequest)); + const hashedRequest = await hash.digest(); + return `${algorithmIdentifier} +${longDate} +${credentialScope} +${(0, import_util_hex_encoding.toHex)(hashedRequest)}`; + } + getCanonicalPath({ path }) { + if (this.uriEscapePath) { + const normalizedPathSegments = []; + for (const pathSegment of path.split("/")) { + if (pathSegment?.length === 0) + continue; + if (pathSegment === ".") + continue; + if (pathSegment === "..") { + normalizedPathSegments.pop(); + } else { + normalizedPathSegments.push(pathSegment); + } + } + const normalizedPath = `${path?.startsWith("/") ? "/" : ""}${normalizedPathSegments.join("/")}${normalizedPathSegments.length > 0 && path?.endsWith("/") ? "/" : ""}`; + const doubleEncoded = (0, import_util_uri_escape.escapeUri)(normalizedPath); + return doubleEncoded.replace(/%2F/g, "/"); + } + return path; + } + validateResolvedCredentials(credentials) { + if (typeof credentials !== "object" || // @ts-expect-error: Property 'accessKeyId' does not exist on type 'object'.ts(2339) + typeof credentials.accessKeyId !== "string" || // @ts-expect-error: Property 'secretAccessKey' does not exist on type 'object'.ts(2339) + typeof credentials.secretAccessKey !== "string") { + throw new Error("Resolved credential object is not valid"); + } + } + formatDate(now) { + const longDate = iso8601(now).replace(/[\-:]/g, ""); + return { + longDate, + shortDate: longDate.slice(0, 8) + }; + } + getCanonicalHeaderList(headers) { + return Object.keys(headers).sort().join(";"); + } +}; + +// src/SignatureV4.ts +var SignatureV4 = class extends SignatureV4Base { + constructor({ + applyChecksum, + credentials, + region, + service, + sha256, + uriEscapePath = true + }) { + super({ + applyChecksum, + credentials, + region, + service, + sha256, + uriEscapePath + }); + this.headerFormatter = new HeaderFormatter(); + } + static { + __name(this, "SignatureV4"); + } + async presign(originalRequest, options = {}) { + const { + signingDate = /* @__PURE__ */ new Date(), + expiresIn = 3600, + unsignableHeaders, + unhoistableHeaders, + signableHeaders, + hoistableHeaders, + signingRegion, + signingService + } = options; + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? await this.regionProvider(); + const { longDate, shortDate } = this.formatDate(signingDate); + if (expiresIn > MAX_PRESIGNED_TTL) { + return Promise.reject( + "Signature version 4 presigned URLs must have an expiration date less than one week in the future" + ); + } + const scope = createScope(shortDate, region, signingService ?? this.service); + const request = moveHeadersToQuery(prepareRequest(originalRequest), { unhoistableHeaders, hoistableHeaders }); + if (credentials.sessionToken) { + request.query[TOKEN_QUERY_PARAM] = credentials.sessionToken; + } + request.query[ALGORITHM_QUERY_PARAM] = ALGORITHM_IDENTIFIER; + request.query[CREDENTIAL_QUERY_PARAM] = `${credentials.accessKeyId}/${scope}`; + request.query[AMZ_DATE_QUERY_PARAM] = longDate; + request.query[EXPIRES_QUERY_PARAM] = expiresIn.toString(10); + const canonicalHeaders = getCanonicalHeaders(request, unsignableHeaders, signableHeaders); + request.query[SIGNED_HEADERS_QUERY_PARAM] = this.getCanonicalHeaderList(canonicalHeaders); + request.query[SIGNATURE_QUERY_PARAM] = await this.getSignature( + longDate, + scope, + this.getSigningKey(credentials, region, shortDate, signingService), + this.createCanonicalRequest(request, canonicalHeaders, await getPayloadHash(originalRequest, this.sha256)) + ); + return request; + } + async sign(toSign, options) { + if (typeof toSign === "string") { + return this.signString(toSign, options); + } else if (toSign.headers && toSign.payload) { + return this.signEvent(toSign, options); + } else if (toSign.message) { + return this.signMessage(toSign, options); + } else { + return this.signRequest(toSign, options); + } + } + async signEvent({ headers, payload }, { signingDate = /* @__PURE__ */ new Date(), priorSignature, signingRegion, signingService }) { + const region = signingRegion ?? await this.regionProvider(); + const { shortDate, longDate } = this.formatDate(signingDate); + const scope = createScope(shortDate, region, signingService ?? this.service); + const hashedPayload = await getPayloadHash({ headers: {}, body: payload }, this.sha256); + const hash = new this.sha256(); + hash.update(headers); + const hashedHeaders = (0, import_util_hex_encoding.toHex)(await hash.digest()); + const stringToSign = [ + EVENT_ALGORITHM_IDENTIFIER, + longDate, + scope, + priorSignature, + hashedHeaders, + hashedPayload + ].join("\n"); + return this.signString(stringToSign, { signingDate, signingRegion: region, signingService }); + } + async signMessage(signableMessage, { signingDate = /* @__PURE__ */ new Date(), signingRegion, signingService }) { + const promise = this.signEvent( + { + headers: this.headerFormatter.format(signableMessage.message.headers), + payload: signableMessage.message.body + }, + { + signingDate, + signingRegion, + signingService, + priorSignature: signableMessage.priorSignature + } + ); + return promise.then((signature) => { + return { message: signableMessage.message, signature }; + }); + } + async signString(stringToSign, { signingDate = /* @__PURE__ */ new Date(), signingRegion, signingService } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? await this.regionProvider(); + const { shortDate } = this.formatDate(signingDate); + const hash = new this.sha256(await this.getSigningKey(credentials, region, shortDate, signingService)); + hash.update((0, import_util_utf85.toUint8Array)(stringToSign)); + return (0, import_util_hex_encoding.toHex)(await hash.digest()); + } + async signRequest(requestToSign, { + signingDate = /* @__PURE__ */ new Date(), + signableHeaders, + unsignableHeaders, + signingRegion, + signingService + } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? await this.regionProvider(); + const request = prepareRequest(requestToSign); + const { longDate, shortDate } = this.formatDate(signingDate); + const scope = createScope(shortDate, region, signingService ?? this.service); + request.headers[AMZ_DATE_HEADER] = longDate; + if (credentials.sessionToken) { + request.headers[TOKEN_HEADER] = credentials.sessionToken; + } + const payloadHash = await getPayloadHash(request, this.sha256); + if (!hasHeader(SHA256_HEADER, request.headers) && this.applyChecksum) { + request.headers[SHA256_HEADER] = payloadHash; + } + const canonicalHeaders = getCanonicalHeaders(request, unsignableHeaders, signableHeaders); + const signature = await this.getSignature( + longDate, + scope, + this.getSigningKey(credentials, region, shortDate, signingService), + this.createCanonicalRequest(request, canonicalHeaders, payloadHash) + ); + request.headers[AUTH_HEADER] = `${ALGORITHM_IDENTIFIER} Credential=${credentials.accessKeyId}/${scope}, SignedHeaders=${this.getCanonicalHeaderList(canonicalHeaders)}, Signature=${signature}`; + return request; + } + async getSignature(longDate, credentialScope, keyPromise, canonicalRequest) { + const stringToSign = await this.createStringToSign( + longDate, + credentialScope, + canonicalRequest, + ALGORITHM_IDENTIFIER + ); + const hash = new this.sha256(await keyPromise); + hash.update((0, import_util_utf85.toUint8Array)(stringToSign)); + return (0, import_util_hex_encoding.toHex)(await hash.digest()); + } + getSigningKey(credentials, region, shortDate, service) { + return getSigningKey(this.sha256, credentials, shortDate, region, service || this.service); + } +}; + +// src/signature-v4a-container.ts +var signatureV4aContainer = { + SignatureV4a: null +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getCanonicalHeaders, + getCanonicalQuery, + getPayloadHash, + moveHeadersToQuery, + prepareRequest, + SignatureV4Base, + hasHeader, + SignatureV4, + ALGORITHM_QUERY_PARAM, + CREDENTIAL_QUERY_PARAM, + AMZ_DATE_QUERY_PARAM, + SIGNED_HEADERS_QUERY_PARAM, + EXPIRES_QUERY_PARAM, + SIGNATURE_QUERY_PARAM, + TOKEN_QUERY_PARAM, + REGION_SET_PARAM, + AUTH_HEADER, + AMZ_DATE_HEADER, + DATE_HEADER, + GENERATED_HEADERS, + SIGNATURE_HEADER, + SHA256_HEADER, + TOKEN_HEADER, + HOST_HEADER, + ALWAYS_UNSIGNABLE_HEADERS, + PROXY_HEADER_PATTERN, + SEC_HEADER_PATTERN, + UNSIGNABLE_PATTERNS, + ALGORITHM_IDENTIFIER, + ALGORITHM_IDENTIFIER_V4A, + EVENT_ALGORITHM_IDENTIFIER, + UNSIGNED_PAYLOAD, + MAX_CACHE_SIZE, + KEY_TYPE_IDENTIFIER, + MAX_PRESIGNED_TTL, + createScope, + getSigningKey, + clearCredentialCache, + signatureV4aContainer +}); + diff --git a/node_modules/@smithy/signature-v4/dist-cjs/moveHeadersToQuery.js b/node_modules/@smithy/signature-v4/dist-cjs/moveHeadersToQuery.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-cjs/moveHeadersToQuery.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/signature-v4/dist-cjs/prepareRequest.js b/node_modules/@smithy/signature-v4/dist-cjs/prepareRequest.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-cjs/prepareRequest.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/signature-v4/dist-cjs/signature-v4a-container.js b/node_modules/@smithy/signature-v4/dist-cjs/signature-v4a-container.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-cjs/signature-v4a-container.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/signature-v4/dist-cjs/suite.fixture.js b/node_modules/@smithy/signature-v4/dist-cjs/suite.fixture.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-cjs/suite.fixture.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/signature-v4/dist-cjs/utilDate.js b/node_modules/@smithy/signature-v4/dist-cjs/utilDate.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-cjs/utilDate.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/signature-v4/dist-es/HeaderFormatter.js b/node_modules/@smithy/signature-v4/dist-es/HeaderFormatter.js new file mode 100644 index 00000000..4edc4b49 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-es/HeaderFormatter.js @@ -0,0 +1,125 @@ +import { fromHex, toHex } from "@smithy/util-hex-encoding"; +import { fromUtf8 } from "@smithy/util-utf8"; +export class HeaderFormatter { + format(headers) { + const chunks = []; + for (const headerName of Object.keys(headers)) { + const bytes = fromUtf8(headerName); + chunks.push(Uint8Array.from([bytes.byteLength]), bytes, this.formatHeaderValue(headers[headerName])); + } + const out = new Uint8Array(chunks.reduce((carry, bytes) => carry + bytes.byteLength, 0)); + let position = 0; + for (const chunk of chunks) { + out.set(chunk, position); + position += chunk.byteLength; + } + return out; + } + formatHeaderValue(header) { + switch (header.type) { + case "boolean": + return Uint8Array.from([header.value ? 0 : 1]); + case "byte": + return Uint8Array.from([2, header.value]); + case "short": + const shortView = new DataView(new ArrayBuffer(3)); + shortView.setUint8(0, 3); + shortView.setInt16(1, header.value, false); + return new Uint8Array(shortView.buffer); + case "integer": + const intView = new DataView(new ArrayBuffer(5)); + intView.setUint8(0, 4); + intView.setInt32(1, header.value, false); + return new Uint8Array(intView.buffer); + case "long": + const longBytes = new Uint8Array(9); + longBytes[0] = 5; + longBytes.set(header.value.bytes, 1); + return longBytes; + case "binary": + const binView = new DataView(new ArrayBuffer(3 + header.value.byteLength)); + binView.setUint8(0, 6); + binView.setUint16(1, header.value.byteLength, false); + const binBytes = new Uint8Array(binView.buffer); + binBytes.set(header.value, 3); + return binBytes; + case "string": + const utf8Bytes = fromUtf8(header.value); + const strView = new DataView(new ArrayBuffer(3 + utf8Bytes.byteLength)); + strView.setUint8(0, 7); + strView.setUint16(1, utf8Bytes.byteLength, false); + const strBytes = new Uint8Array(strView.buffer); + strBytes.set(utf8Bytes, 3); + return strBytes; + case "timestamp": + const tsBytes = new Uint8Array(9); + tsBytes[0] = 8; + tsBytes.set(Int64.fromNumber(header.value.valueOf()).bytes, 1); + return tsBytes; + case "uuid": + if (!UUID_PATTERN.test(header.value)) { + throw new Error(`Invalid UUID received: ${header.value}`); + } + const uuidBytes = new Uint8Array(17); + uuidBytes[0] = 9; + uuidBytes.set(fromHex(header.value.replace(/\-/g, "")), 1); + return uuidBytes; + } + } +} +var HEADER_VALUE_TYPE; +(function (HEADER_VALUE_TYPE) { + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["boolTrue"] = 0] = "boolTrue"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["boolFalse"] = 1] = "boolFalse"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["byte"] = 2] = "byte"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["short"] = 3] = "short"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["integer"] = 4] = "integer"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["long"] = 5] = "long"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["byteArray"] = 6] = "byteArray"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["string"] = 7] = "string"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["timestamp"] = 8] = "timestamp"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["uuid"] = 9] = "uuid"; +})(HEADER_VALUE_TYPE || (HEADER_VALUE_TYPE = {})); +const UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/; +export class Int64 { + constructor(bytes) { + this.bytes = bytes; + if (bytes.byteLength !== 8) { + throw new Error("Int64 buffers must be exactly 8 bytes"); + } + } + static fromNumber(number) { + if (number > 9223372036854776000 || number < -9223372036854776000) { + throw new Error(`${number} is too large (or, if negative, too small) to represent as an Int64`); + } + const bytes = new Uint8Array(8); + for (let i = 7, remaining = Math.abs(Math.round(number)); i > -1 && remaining > 0; i--, remaining /= 256) { + bytes[i] = remaining; + } + if (number < 0) { + negate(bytes); + } + return new Int64(bytes); + } + valueOf() { + const bytes = this.bytes.slice(0); + const negative = bytes[0] & 0b10000000; + if (negative) { + negate(bytes); + } + return parseInt(toHex(bytes), 16) * (negative ? -1 : 1); + } + toString() { + return String(this.valueOf()); + } +} +function negate(bytes) { + for (let i = 0; i < 8; i++) { + bytes[i] ^= 0xff; + } + for (let i = 7; i > -1; i--) { + bytes[i]++; + if (bytes[i] !== 0) + break; + } +} diff --git a/node_modules/@smithy/signature-v4/dist-es/SignatureV4.js b/node_modules/@smithy/signature-v4/dist-es/SignatureV4.js new file mode 100644 index 00000000..d1491326 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-es/SignatureV4.js @@ -0,0 +1,135 @@ +import { toHex } from "@smithy/util-hex-encoding"; +import { toUint8Array } from "@smithy/util-utf8"; +import { ALGORITHM_IDENTIFIER, ALGORITHM_QUERY_PARAM, AMZ_DATE_HEADER, AMZ_DATE_QUERY_PARAM, AUTH_HEADER, CREDENTIAL_QUERY_PARAM, EVENT_ALGORITHM_IDENTIFIER, EXPIRES_QUERY_PARAM, MAX_PRESIGNED_TTL, SHA256_HEADER, SIGNATURE_QUERY_PARAM, SIGNED_HEADERS_QUERY_PARAM, TOKEN_HEADER, TOKEN_QUERY_PARAM, } from "./constants"; +import { createScope, getSigningKey } from "./credentialDerivation"; +import { getCanonicalHeaders } from "./getCanonicalHeaders"; +import { getPayloadHash } from "./getPayloadHash"; +import { HeaderFormatter } from "./HeaderFormatter"; +import { hasHeader } from "./headerUtil"; +import { moveHeadersToQuery } from "./moveHeadersToQuery"; +import { prepareRequest } from "./prepareRequest"; +import { SignatureV4Base } from "./SignatureV4Base"; +export class SignatureV4 extends SignatureV4Base { + constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath = true, }) { + super({ + applyChecksum, + credentials, + region, + service, + sha256, + uriEscapePath, + }); + this.headerFormatter = new HeaderFormatter(); + } + async presign(originalRequest, options = {}) { + const { signingDate = new Date(), expiresIn = 3600, unsignableHeaders, unhoistableHeaders, signableHeaders, hoistableHeaders, signingRegion, signingService, } = options; + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? (await this.regionProvider()); + const { longDate, shortDate } = this.formatDate(signingDate); + if (expiresIn > MAX_PRESIGNED_TTL) { + return Promise.reject("Signature version 4 presigned URLs" + " must have an expiration date less than one week in" + " the future"); + } + const scope = createScope(shortDate, region, signingService ?? this.service); + const request = moveHeadersToQuery(prepareRequest(originalRequest), { unhoistableHeaders, hoistableHeaders }); + if (credentials.sessionToken) { + request.query[TOKEN_QUERY_PARAM] = credentials.sessionToken; + } + request.query[ALGORITHM_QUERY_PARAM] = ALGORITHM_IDENTIFIER; + request.query[CREDENTIAL_QUERY_PARAM] = `${credentials.accessKeyId}/${scope}`; + request.query[AMZ_DATE_QUERY_PARAM] = longDate; + request.query[EXPIRES_QUERY_PARAM] = expiresIn.toString(10); + const canonicalHeaders = getCanonicalHeaders(request, unsignableHeaders, signableHeaders); + request.query[SIGNED_HEADERS_QUERY_PARAM] = this.getCanonicalHeaderList(canonicalHeaders); + request.query[SIGNATURE_QUERY_PARAM] = await this.getSignature(longDate, scope, this.getSigningKey(credentials, region, shortDate, signingService), this.createCanonicalRequest(request, canonicalHeaders, await getPayloadHash(originalRequest, this.sha256))); + return request; + } + async sign(toSign, options) { + if (typeof toSign === "string") { + return this.signString(toSign, options); + } + else if (toSign.headers && toSign.payload) { + return this.signEvent(toSign, options); + } + else if (toSign.message) { + return this.signMessage(toSign, options); + } + else { + return this.signRequest(toSign, options); + } + } + async signEvent({ headers, payload }, { signingDate = new Date(), priorSignature, signingRegion, signingService }) { + const region = signingRegion ?? (await this.regionProvider()); + const { shortDate, longDate } = this.formatDate(signingDate); + const scope = createScope(shortDate, region, signingService ?? this.service); + const hashedPayload = await getPayloadHash({ headers: {}, body: payload }, this.sha256); + const hash = new this.sha256(); + hash.update(headers); + const hashedHeaders = toHex(await hash.digest()); + const stringToSign = [ + EVENT_ALGORITHM_IDENTIFIER, + longDate, + scope, + priorSignature, + hashedHeaders, + hashedPayload, + ].join("\n"); + return this.signString(stringToSign, { signingDate, signingRegion: region, signingService }); + } + async signMessage(signableMessage, { signingDate = new Date(), signingRegion, signingService }) { + const promise = this.signEvent({ + headers: this.headerFormatter.format(signableMessage.message.headers), + payload: signableMessage.message.body, + }, { + signingDate, + signingRegion, + signingService, + priorSignature: signableMessage.priorSignature, + }); + return promise.then((signature) => { + return { message: signableMessage.message, signature }; + }); + } + async signString(stringToSign, { signingDate = new Date(), signingRegion, signingService } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? (await this.regionProvider()); + const { shortDate } = this.formatDate(signingDate); + const hash = new this.sha256(await this.getSigningKey(credentials, region, shortDate, signingService)); + hash.update(toUint8Array(stringToSign)); + return toHex(await hash.digest()); + } + async signRequest(requestToSign, { signingDate = new Date(), signableHeaders, unsignableHeaders, signingRegion, signingService, } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? (await this.regionProvider()); + const request = prepareRequest(requestToSign); + const { longDate, shortDate } = this.formatDate(signingDate); + const scope = createScope(shortDate, region, signingService ?? this.service); + request.headers[AMZ_DATE_HEADER] = longDate; + if (credentials.sessionToken) { + request.headers[TOKEN_HEADER] = credentials.sessionToken; + } + const payloadHash = await getPayloadHash(request, this.sha256); + if (!hasHeader(SHA256_HEADER, request.headers) && this.applyChecksum) { + request.headers[SHA256_HEADER] = payloadHash; + } + const canonicalHeaders = getCanonicalHeaders(request, unsignableHeaders, signableHeaders); + const signature = await this.getSignature(longDate, scope, this.getSigningKey(credentials, region, shortDate, signingService), this.createCanonicalRequest(request, canonicalHeaders, payloadHash)); + request.headers[AUTH_HEADER] = + `${ALGORITHM_IDENTIFIER} ` + + `Credential=${credentials.accessKeyId}/${scope}, ` + + `SignedHeaders=${this.getCanonicalHeaderList(canonicalHeaders)}, ` + + `Signature=${signature}`; + return request; + } + async getSignature(longDate, credentialScope, keyPromise, canonicalRequest) { + const stringToSign = await this.createStringToSign(longDate, credentialScope, canonicalRequest, ALGORITHM_IDENTIFIER); + const hash = new this.sha256(await keyPromise); + hash.update(toUint8Array(stringToSign)); + return toHex(await hash.digest()); + } + getSigningKey(credentials, region, shortDate, service) { + return getSigningKey(this.sha256, credentials, shortDate, region, service || this.service); + } +} diff --git a/node_modules/@smithy/signature-v4/dist-es/SignatureV4Base.js b/node_modules/@smithy/signature-v4/dist-es/SignatureV4Base.js new file mode 100644 index 00000000..857ff0ce --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-es/SignatureV4Base.js @@ -0,0 +1,73 @@ +import { toHex } from "@smithy/util-hex-encoding"; +import { normalizeProvider } from "@smithy/util-middleware"; +import { escapeUri } from "@smithy/util-uri-escape"; +import { toUint8Array } from "@smithy/util-utf8"; +import { getCanonicalQuery } from "./getCanonicalQuery"; +import { iso8601 } from "./utilDate"; +export class SignatureV4Base { + constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath = true, }) { + this.service = service; + this.sha256 = sha256; + this.uriEscapePath = uriEscapePath; + this.applyChecksum = typeof applyChecksum === "boolean" ? applyChecksum : true; + this.regionProvider = normalizeProvider(region); + this.credentialProvider = normalizeProvider(credentials); + } + createCanonicalRequest(request, canonicalHeaders, payloadHash) { + const sortedHeaders = Object.keys(canonicalHeaders).sort(); + return `${request.method} +${this.getCanonicalPath(request)} +${getCanonicalQuery(request)} +${sortedHeaders.map((name) => `${name}:${canonicalHeaders[name]}`).join("\n")} + +${sortedHeaders.join(";")} +${payloadHash}`; + } + async createStringToSign(longDate, credentialScope, canonicalRequest, algorithmIdentifier) { + const hash = new this.sha256(); + hash.update(toUint8Array(canonicalRequest)); + const hashedRequest = await hash.digest(); + return `${algorithmIdentifier} +${longDate} +${credentialScope} +${toHex(hashedRequest)}`; + } + getCanonicalPath({ path }) { + if (this.uriEscapePath) { + const normalizedPathSegments = []; + for (const pathSegment of path.split("/")) { + if (pathSegment?.length === 0) + continue; + if (pathSegment === ".") + continue; + if (pathSegment === "..") { + normalizedPathSegments.pop(); + } + else { + normalizedPathSegments.push(pathSegment); + } + } + const normalizedPath = `${path?.startsWith("/") ? "/" : ""}${normalizedPathSegments.join("/")}${normalizedPathSegments.length > 0 && path?.endsWith("/") ? "/" : ""}`; + const doubleEncoded = escapeUri(normalizedPath); + return doubleEncoded.replace(/%2F/g, "/"); + } + return path; + } + validateResolvedCredentials(credentials) { + if (typeof credentials !== "object" || + typeof credentials.accessKeyId !== "string" || + typeof credentials.secretAccessKey !== "string") { + throw new Error("Resolved credential object is not valid"); + } + } + formatDate(now) { + const longDate = iso8601(now).replace(/[\-:]/g, ""); + return { + longDate, + shortDate: longDate.slice(0, 8), + }; + } + getCanonicalHeaderList(headers) { + return Object.keys(headers).sort().join(";"); + } +} diff --git a/node_modules/@smithy/signature-v4/dist-es/constants.js b/node_modules/@smithy/signature-v4/dist-es/constants.js new file mode 100644 index 00000000..602728ad --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-es/constants.js @@ -0,0 +1,43 @@ +export const ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +export const CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +export const AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +export const SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +export const EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +export const SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +export const TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +export const REGION_SET_PARAM = "X-Amz-Region-Set"; +export const AUTH_HEADER = "authorization"; +export const AMZ_DATE_HEADER = AMZ_DATE_QUERY_PARAM.toLowerCase(); +export const DATE_HEADER = "date"; +export const GENERATED_HEADERS = [AUTH_HEADER, AMZ_DATE_HEADER, DATE_HEADER]; +export const SIGNATURE_HEADER = SIGNATURE_QUERY_PARAM.toLowerCase(); +export const SHA256_HEADER = "x-amz-content-sha256"; +export const TOKEN_HEADER = TOKEN_QUERY_PARAM.toLowerCase(); +export const HOST_HEADER = "host"; +export const ALWAYS_UNSIGNABLE_HEADERS = { + authorization: true, + "cache-control": true, + connection: true, + expect: true, + from: true, + "keep-alive": true, + "max-forwards": true, + pragma: true, + referer: true, + te: true, + trailer: true, + "transfer-encoding": true, + upgrade: true, + "user-agent": true, + "x-amzn-trace-id": true, +}; +export const PROXY_HEADER_PATTERN = /^proxy-/; +export const SEC_HEADER_PATTERN = /^sec-/; +export const UNSIGNABLE_PATTERNS = [/^proxy-/i, /^sec-/i]; +export const ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +export const ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; +export const EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +export const UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +export const MAX_CACHE_SIZE = 50; +export const KEY_TYPE_IDENTIFIER = "aws4_request"; +export const MAX_PRESIGNED_TTL = 60 * 60 * 24 * 7; diff --git a/node_modules/@smithy/signature-v4/dist-es/credentialDerivation.js b/node_modules/@smithy/signature-v4/dist-es/credentialDerivation.js new file mode 100644 index 00000000..b16ab8c7 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-es/credentialDerivation.js @@ -0,0 +1,33 @@ +import { toHex } from "@smithy/util-hex-encoding"; +import { toUint8Array } from "@smithy/util-utf8"; +import { KEY_TYPE_IDENTIFIER, MAX_CACHE_SIZE } from "./constants"; +const signingKeyCache = {}; +const cacheQueue = []; +export const createScope = (shortDate, region, service) => `${shortDate}/${region}/${service}/${KEY_TYPE_IDENTIFIER}`; +export const getSigningKey = async (sha256Constructor, credentials, shortDate, region, service) => { + const credsHash = await hmac(sha256Constructor, credentials.secretAccessKey, credentials.accessKeyId); + const cacheKey = `${shortDate}:${region}:${service}:${toHex(credsHash)}:${credentials.sessionToken}`; + if (cacheKey in signingKeyCache) { + return signingKeyCache[cacheKey]; + } + cacheQueue.push(cacheKey); + while (cacheQueue.length > MAX_CACHE_SIZE) { + delete signingKeyCache[cacheQueue.shift()]; + } + let key = `AWS4${credentials.secretAccessKey}`; + for (const signable of [shortDate, region, service, KEY_TYPE_IDENTIFIER]) { + key = await hmac(sha256Constructor, key, signable); + } + return (signingKeyCache[cacheKey] = key); +}; +export const clearCredentialCache = () => { + cacheQueue.length = 0; + Object.keys(signingKeyCache).forEach((cacheKey) => { + delete signingKeyCache[cacheKey]; + }); +}; +const hmac = (ctor, secret, data) => { + const hash = new ctor(secret); + hash.update(toUint8Array(data)); + return hash.digest(); +}; diff --git a/node_modules/@smithy/signature-v4/dist-es/getCanonicalHeaders.js b/node_modules/@smithy/signature-v4/dist-es/getCanonicalHeaders.js new file mode 100644 index 00000000..33211255 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-es/getCanonicalHeaders.js @@ -0,0 +1,20 @@ +import { ALWAYS_UNSIGNABLE_HEADERS, PROXY_HEADER_PATTERN, SEC_HEADER_PATTERN } from "./constants"; +export const getCanonicalHeaders = ({ headers }, unsignableHeaders, signableHeaders) => { + const canonical = {}; + for (const headerName of Object.keys(headers).sort()) { + if (headers[headerName] == undefined) { + continue; + } + const canonicalHeaderName = headerName.toLowerCase(); + if (canonicalHeaderName in ALWAYS_UNSIGNABLE_HEADERS || + unsignableHeaders?.has(canonicalHeaderName) || + PROXY_HEADER_PATTERN.test(canonicalHeaderName) || + SEC_HEADER_PATTERN.test(canonicalHeaderName)) { + if (!signableHeaders || (signableHeaders && !signableHeaders.has(canonicalHeaderName))) { + continue; + } + } + canonical[canonicalHeaderName] = headers[headerName].trim().replace(/\s+/g, " "); + } + return canonical; +}; diff --git a/node_modules/@smithy/signature-v4/dist-es/getCanonicalQuery.js b/node_modules/@smithy/signature-v4/dist-es/getCanonicalQuery.js new file mode 100644 index 00000000..0623f1aa --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-es/getCanonicalQuery.js @@ -0,0 +1,29 @@ +import { escapeUri } from "@smithy/util-uri-escape"; +import { SIGNATURE_HEADER } from "./constants"; +export const getCanonicalQuery = ({ query = {} }) => { + const keys = []; + const serialized = {}; + for (const key of Object.keys(query)) { + if (key.toLowerCase() === SIGNATURE_HEADER) { + continue; + } + const encodedKey = escapeUri(key); + keys.push(encodedKey); + const value = query[key]; + if (typeof value === "string") { + serialized[encodedKey] = `${encodedKey}=${escapeUri(value)}`; + } + else if (Array.isArray(value)) { + serialized[encodedKey] = value + .slice(0) + .reduce((encoded, value) => encoded.concat([`${encodedKey}=${escapeUri(value)}`]), []) + .sort() + .join("&"); + } + } + return keys + .sort() + .map((key) => serialized[key]) + .filter((serialized) => serialized) + .join("&"); +}; diff --git a/node_modules/@smithy/signature-v4/dist-es/getPayloadHash.js b/node_modules/@smithy/signature-v4/dist-es/getPayloadHash.js new file mode 100644 index 00000000..cba165c3 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-es/getPayloadHash.js @@ -0,0 +1,20 @@ +import { isArrayBuffer } from "@smithy/is-array-buffer"; +import { toHex } from "@smithy/util-hex-encoding"; +import { toUint8Array } from "@smithy/util-utf8"; +import { SHA256_HEADER, UNSIGNED_PAYLOAD } from "./constants"; +export const getPayloadHash = async ({ headers, body }, hashConstructor) => { + for (const headerName of Object.keys(headers)) { + if (headerName.toLowerCase() === SHA256_HEADER) { + return headers[headerName]; + } + } + if (body == undefined) { + return "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"; + } + else if (typeof body === "string" || ArrayBuffer.isView(body) || isArrayBuffer(body)) { + const hashCtor = new hashConstructor(); + hashCtor.update(toUint8Array(body)); + return toHex(await hashCtor.digest()); + } + return UNSIGNED_PAYLOAD; +}; diff --git a/node_modules/@smithy/signature-v4/dist-es/headerUtil.js b/node_modules/@smithy/signature-v4/dist-es/headerUtil.js new file mode 100644 index 00000000..e502cbbc --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-es/headerUtil.js @@ -0,0 +1,26 @@ +export const hasHeader = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return true; + } + } + return false; +}; +export const getHeaderValue = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return headers[headerName]; + } + } + return undefined; +}; +export const deleteHeader = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + delete headers[headerName]; + } + } +}; diff --git a/node_modules/@smithy/signature-v4/dist-es/index.js b/node_modules/@smithy/signature-v4/dist-es/index.js new file mode 100644 index 00000000..062752d6 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-es/index.js @@ -0,0 +1,11 @@ +export * from "./SignatureV4"; +export * from "./constants"; +export { getCanonicalHeaders } from "./getCanonicalHeaders"; +export { getCanonicalQuery } from "./getCanonicalQuery"; +export { getPayloadHash } from "./getPayloadHash"; +export { moveHeadersToQuery } from "./moveHeadersToQuery"; +export { prepareRequest } from "./prepareRequest"; +export * from "./credentialDerivation"; +export { SignatureV4Base } from "./SignatureV4Base"; +export { hasHeader } from "./headerUtil"; +export * from "./signature-v4a-container"; diff --git a/node_modules/@smithy/signature-v4/dist-es/moveHeadersToQuery.js b/node_modules/@smithy/signature-v4/dist-es/moveHeadersToQuery.js new file mode 100644 index 00000000..806703ac --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-es/moveHeadersToQuery.js @@ -0,0 +1,17 @@ +import { HttpRequest } from "@smithy/protocol-http"; +export const moveHeadersToQuery = (request, options = {}) => { + const { headers, query = {} } = HttpRequest.clone(request); + for (const name of Object.keys(headers)) { + const lname = name.toLowerCase(); + if ((lname.slice(0, 6) === "x-amz-" && !options.unhoistableHeaders?.has(lname)) || + options.hoistableHeaders?.has(lname)) { + query[name] = headers[name]; + delete headers[name]; + } + } + return { + ...request, + headers, + query, + }; +}; diff --git a/node_modules/@smithy/signature-v4/dist-es/prepareRequest.js b/node_modules/@smithy/signature-v4/dist-es/prepareRequest.js new file mode 100644 index 00000000..7fe5136c --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-es/prepareRequest.js @@ -0,0 +1,11 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { GENERATED_HEADERS } from "./constants"; +export const prepareRequest = (request) => { + request = HttpRequest.clone(request); + for (const headerName of Object.keys(request.headers)) { + if (GENERATED_HEADERS.indexOf(headerName.toLowerCase()) > -1) { + delete request.headers[headerName]; + } + } + return request; +}; diff --git a/node_modules/@smithy/signature-v4/dist-es/signature-v4a-container.js b/node_modules/@smithy/signature-v4/dist-es/signature-v4a-container.js new file mode 100644 index 00000000..a309b0a0 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-es/signature-v4a-container.js @@ -0,0 +1,3 @@ +export const signatureV4aContainer = { + SignatureV4a: null, +}; diff --git a/node_modules/@smithy/signature-v4/dist-es/suite.fixture.js b/node_modules/@smithy/signature-v4/dist-es/suite.fixture.js new file mode 100644 index 00000000..bb704a99 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-es/suite.fixture.js @@ -0,0 +1,399 @@ +export const region = "us-east-1"; +export const service = "service"; +export const credentials = { + accessKeyId: "AKIDEXAMPLE", + secretAccessKey: "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY", +}; +export const signingDate = new Date("2015-08-30T12:36:00Z"); +export const requests = [ + { + name: "get-header-key-duplicate", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value2,value2,value1", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=c9d5ea9f3f72853aea855b47ea873832890dbdd183b4468f858259531a5138ea", + }, + { + name: "get-header-value-multiline", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value1,value2,value3", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=ba17b383a53190154eb5fa66a1b836cc297cc0a3d70a5d00705980573d8ff790", + }, + { + name: "get-header-value-order", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value4,value1,value3,value2", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=08c7e5a9acfcfeb3ab6b2185e75ce8b1deb5e634ec47601a50643f830c755c01", + }, + { + name: "get-header-value-trim", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value1", + "my-header2": '"a b c"', + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;my-header2;x-amz-date, Signature=acc3ed3afb60bb290fc8d2dd0098b9911fcaa05412b367055dee359757a9c736", + }, + { + name: "get-unreserved", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/-._~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=07ef7494c76fa4850883e2b006601f940f8a34d404d0cfa977f52a65bbf5f24f", + }, + { + name: "get-utf8", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/ሴ", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=8318018e0b0f223aa2bbf98705b62bb787dc9c0e678f255a891fd03141be5d85", + }, + { + name: "get-vanilla", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5fa00fa31553b73ebf1942676e86291e8372ff2a2260956d9b8aae1d763fbf31", + }, + { + name: "get-vanilla-empty-query-key", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: { + Param1: "value1", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=a67d582fa61cc504c4bae71f336f98b97f1ea3c7a6bfe1b6e45aec72011b9aeb", + }, + { + name: "get-vanilla-query", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5fa00fa31553b73ebf1942676e86291e8372ff2a2260956d9b8aae1d763fbf31", + }, + { + name: "get-vanilla-query-order-key-case", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: { + Param2: "value2", + Param1: "value1", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=b97d918cfa904a5beff61c982a1b6f458b799221646efd99d3219ec94cdf2500", + }, + { + name: "get-vanilla-query-unreserved", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: { + "-._~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz": "-._~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=9c3e54bfcdf0b19771a7f523ee5669cdf59bc7cc0884027167c21bb143a40197", + }, + { + name: "get-vanilla-utf8-query", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: { + ሴ: "bar", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=2cdec8eed098649ff3a119c94853b13c643bcf08f8b0a1d91e12c9027818dd04", + }, + { + name: "post-header-key-case", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5da7c1a2acd57cee7505fc6676e4e544621c30862966e37dddb68e92efbe5d6b", + }, + { + name: "post-header-key-sort", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value1", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=c5410059b04c1ee005303aed430f6e6645f61f4dc9e1461ec8f8916fdf18852c", + }, + { + name: "post-header-value-case", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "VALUE1", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=cdbc9802e29d2942e5e10b5bccfdd67c5f22c7c4e8ae67b53629efa58b974b7d", + }, + { + name: "post-sts-header-after", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5da7c1a2acd57cee7505fc6676e4e544621c30862966e37dddb68e92efbe5d6b", + }, + { + name: "post-sts-header-before", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + "x-amz-security-token": "AQoDYXdzEPT//////////wEXAMPLEtc764bNrC9SAPBSM22wDOk4x4HIZ8j4FZTwdQWLWsKWHGBuFqwAeMicRXmxfpSPfIeoIYRqTflfKD8YUuwthAx7mSEI/qkPpKPi/kMcGdQrmGdeehM4IC1NtBmUpp2wUE8phUZampKsburEDy0KPkyQDYwT7WZ0wq5VSXDvp75YU9HFvlRd8Tx6q6fE8YQcHNVXAkiY9q6d+xo0rKwT38xVqr7ZD0u0iPPkUL64lIZbqBAz+scqKmlzm8FDrypNC9Yjc8fPOLn9FX9KSYvKTr4rvx3iSIlTJabIQwj2ICCR/oLxBA==", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date;x-amz-security-token, Signature=85d96828115b5dc0cfc3bd16ad9e210dd772bbebba041836c64533a82be05ead", + }, + { + name: "post-vanilla", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5da7c1a2acd57cee7505fc6676e4e544621c30862966e37dddb68e92efbe5d6b", + }, + { + name: "post-vanilla-empty-query-value", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: { + Param1: "value1", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=28038455d6de14eafc1f9222cf5aa6f1a96197d7deb8263271d420d138af7f11", + }, + { + name: "post-vanilla-query", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: { + Param1: "value1", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=28038455d6de14eafc1f9222cf5aa6f1a96197d7deb8263271d420d138af7f11", + }, + { + name: "post-vanilla-query-nonunreserved", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: { + "@#$%^": "", + "+": '/,?><`";:\\|][{}', + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=66c82657c86e26fb25238d0e69f011edc4c6df5ae71119d7cb98ed9b87393c1e", + }, + { + name: "post-vanilla-query-space", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: { + p: "", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=e71688addb58a26418614085fb730ba3faa623b461c17f48f2fbdb9361b94a9b", + }, + { + name: "post-x-www-form-urlencoded", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + "content-type": "application/x-www-form-urlencoded", + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + body: "Param1=value1", + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=content-type;host;x-amz-date, Signature=ff11897932ad3f4e8b18135d722051e5ac45fc38421b1da7b9d196a0fe09473a", + }, + { + name: "post-x-www-form-urlencoded-parameters", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + "content-type": "application/x-www-form-urlencoded; charset=utf8", + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + body: "Param1=value1", + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=content-type;host;x-amz-date, Signature=1a72ec8f64bd914b0e42e42607c7fbce7fb2c7465f63e3092b3b0d39fa77a6fe", + }, +]; diff --git a/node_modules/@smithy/signature-v4/dist-es/utilDate.js b/node_modules/@smithy/signature-v4/dist-es/utilDate.js new file mode 100644 index 00000000..4aad623e --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-es/utilDate.js @@ -0,0 +1,15 @@ +export const iso8601 = (time) => toDate(time) + .toISOString() + .replace(/\.\d{3}Z$/, "Z"); +export const toDate = (time) => { + if (typeof time === "number") { + return new Date(time * 1000); + } + if (typeof time === "string") { + if (Number(time)) { + return new Date(Number(time) * 1000); + } + return new Date(time); + } + return time; +}; diff --git a/node_modules/@smithy/signature-v4/dist-types/HeaderFormatter.d.ts b/node_modules/@smithy/signature-v4/dist-types/HeaderFormatter.d.ts new file mode 100644 index 00000000..92056a69 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/HeaderFormatter.d.ts @@ -0,0 +1,24 @@ +import type { Int64 as IInt64, MessageHeaders } from "@smithy/types"; +/** + * @internal + * TODO: duplicated from @smithy/eventstream-codec to break large dependency. + * TODO: This should be moved to its own deduped submodule in @smithy/core when submodules are implemented. + */ +export declare class HeaderFormatter { + format(headers: MessageHeaders): Uint8Array; + private formatHeaderValue; +} +/** + * TODO: duplicated from @smithy/eventstream-codec to break large dependency. + * TODO: This should be moved to its own deduped submodule in @smithy/core when submodules are implemented. + */ +export declare class Int64 implements IInt64 { + readonly bytes: Uint8Array; + constructor(bytes: Uint8Array); + static fromNumber(number: number): Int64; + /** + * Called implicitly by infix arithmetic operators. + */ + valueOf(): number; + toString(): string; +} diff --git a/node_modules/@smithy/signature-v4/dist-types/SignatureV4.d.ts b/node_modules/@smithy/signature-v4/dist-types/SignatureV4.d.ts new file mode 100644 index 00000000..99499d4e --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/SignatureV4.d.ts @@ -0,0 +1,20 @@ +import { EventSigner, EventSigningArguments, FormattedEvent, HttpRequest, MessageSigner, RequestPresigner, RequestPresigningArguments, RequestSigner, RequestSigningArguments, SignableMessage, SignedMessage, SigningArguments, StringSigner } from "@smithy/types"; +import { SignatureV4Base, SignatureV4CryptoInit, SignatureV4Init } from "./SignatureV4Base"; +/** + * @public + */ +export declare class SignatureV4 extends SignatureV4Base implements RequestPresigner, RequestSigner, StringSigner, EventSigner, MessageSigner { + private readonly headerFormatter; + constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath, }: SignatureV4Init & SignatureV4CryptoInit); + presign(originalRequest: HttpRequest, options?: RequestPresigningArguments): Promise; + sign(stringToSign: string, options?: SigningArguments): Promise; + sign(event: FormattedEvent, options: EventSigningArguments): Promise; + sign(event: SignableMessage, options: SigningArguments): Promise; + sign(requestToSign: HttpRequest, options?: RequestSigningArguments): Promise; + private signEvent; + signMessage(signableMessage: SignableMessage, { signingDate, signingRegion, signingService }: SigningArguments): Promise; + private signString; + private signRequest; + private getSignature; + private getSigningKey; +} diff --git a/node_modules/@smithy/signature-v4/dist-types/SignatureV4Base.d.ts b/node_modules/@smithy/signature-v4/dist-types/SignatureV4Base.d.ts new file mode 100644 index 00000000..9a0e6ada --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/SignatureV4Base.d.ts @@ -0,0 +1,69 @@ +import { AwsCredentialIdentity, ChecksumConstructor, DateInput, HashConstructor, HeaderBag, HttpRequest, Provider } from "@smithy/types"; +/** + * @public + */ +export interface SignatureV4Init { + /** + * The service signing name. + */ + service: string; + /** + * The region name or a function that returns a promise that will be + * resolved with the region name. + */ + region: string | Provider; + /** + * The credentials with which the request should be signed or a function + * that returns a promise that will be resolved with credentials. + */ + credentials: AwsCredentialIdentity | Provider; + /** + * A constructor function for a hash object that will calculate SHA-256 HMAC + * checksums. + */ + sha256?: ChecksumConstructor | HashConstructor; + /** + * Whether to uri-escape the request URI path as part of computing the + * canonical request string. This is required for every AWS service, except + * Amazon S3, as of late 2017. + * + * @default [true] + */ + uriEscapePath?: boolean; + /** + * Whether to calculate a checksum of the request body and include it as + * either a request header (when signing) or as a query string parameter + * (when presigning). This is required for AWS Glacier and Amazon S3 and optional for + * every other AWS service as of late 2017. + * + * @default [true] + */ + applyChecksum?: boolean; +} +/** + * @public + */ +export interface SignatureV4CryptoInit { + sha256: ChecksumConstructor | HashConstructor; +} +/** + * @internal + */ +export declare abstract class SignatureV4Base { + protected readonly service: string; + protected readonly regionProvider: Provider; + protected readonly credentialProvider: Provider; + protected readonly sha256: ChecksumConstructor | HashConstructor; + private readonly uriEscapePath; + protected readonly applyChecksum: boolean; + protected constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath, }: SignatureV4Init & SignatureV4CryptoInit); + protected createCanonicalRequest(request: HttpRequest, canonicalHeaders: HeaderBag, payloadHash: string): string; + protected createStringToSign(longDate: string, credentialScope: string, canonicalRequest: string, algorithmIdentifier: string): Promise; + private getCanonicalPath; + protected validateResolvedCredentials(credentials: unknown): void; + protected formatDate(now: DateInput): { + longDate: string; + shortDate: string; + }; + protected getCanonicalHeaderList(headers: object): string; +} diff --git a/node_modules/@smithy/signature-v4/dist-types/constants.d.ts b/node_modules/@smithy/signature-v4/dist-types/constants.d.ts new file mode 100644 index 00000000..ea1cfb5d --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/constants.d.ts @@ -0,0 +1,43 @@ +export declare const ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +export declare const CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +export declare const AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +export declare const SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +export declare const EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +export declare const SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +export declare const TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +export declare const REGION_SET_PARAM = "X-Amz-Region-Set"; +export declare const AUTH_HEADER = "authorization"; +export declare const AMZ_DATE_HEADER: string; +export declare const DATE_HEADER = "date"; +export declare const GENERATED_HEADERS: string[]; +export declare const SIGNATURE_HEADER: string; +export declare const SHA256_HEADER = "x-amz-content-sha256"; +export declare const TOKEN_HEADER: string; +export declare const HOST_HEADER = "host"; +export declare const ALWAYS_UNSIGNABLE_HEADERS: { + authorization: boolean; + "cache-control": boolean; + connection: boolean; + expect: boolean; + from: boolean; + "keep-alive": boolean; + "max-forwards": boolean; + pragma: boolean; + referer: boolean; + te: boolean; + trailer: boolean; + "transfer-encoding": boolean; + upgrade: boolean; + "user-agent": boolean; + "x-amzn-trace-id": boolean; +}; +export declare const PROXY_HEADER_PATTERN: RegExp; +export declare const SEC_HEADER_PATTERN: RegExp; +export declare const UNSIGNABLE_PATTERNS: RegExp[]; +export declare const ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +export declare const ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; +export declare const EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +export declare const UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +export declare const MAX_CACHE_SIZE = 50; +export declare const KEY_TYPE_IDENTIFIER = "aws4_request"; +export declare const MAX_PRESIGNED_TTL: number; diff --git a/node_modules/@smithy/signature-v4/dist-types/credentialDerivation.d.ts b/node_modules/@smithy/signature-v4/dist-types/credentialDerivation.d.ts new file mode 100644 index 00000000..a560c2c2 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/credentialDerivation.d.ts @@ -0,0 +1,30 @@ +import { AwsCredentialIdentity, ChecksumConstructor, HashConstructor } from "@smithy/types"; +/** + * Create a string describing the scope of credentials used to sign a request. + * + * @internal + * + * @param shortDate - the current calendar date in the form YYYYMMDD. + * @param region - the AWS region in which the service resides. + * @param service - the service to which the signed request is being sent. + */ +export declare const createScope: (shortDate: string, region: string, service: string) => string; +/** + * Derive a signing key from its composite parts. + * + * @internal + * + * @param sha256Constructor - a constructor function that can instantiate SHA-256 + * hash objects. + * @param credentials - the credentials with which the request will be + * signed. + * @param shortDate - the current calendar date in the form YYYYMMDD. + * @param region - the AWS region in which the service resides. + * @param service - the service to which the signed request is being + * sent. + */ +export declare const getSigningKey: (sha256Constructor: ChecksumConstructor | HashConstructor, credentials: AwsCredentialIdentity, shortDate: string, region: string, service: string) => Promise; +/** + * @internal + */ +export declare const clearCredentialCache: () => void; diff --git a/node_modules/@smithy/signature-v4/dist-types/getCanonicalHeaders.d.ts b/node_modules/@smithy/signature-v4/dist-types/getCanonicalHeaders.d.ts new file mode 100644 index 00000000..efc417c8 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/getCanonicalHeaders.d.ts @@ -0,0 +1,5 @@ +import { HeaderBag, HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getCanonicalHeaders: ({ headers }: HttpRequest, unsignableHeaders?: Set, signableHeaders?: Set) => HeaderBag; diff --git a/node_modules/@smithy/signature-v4/dist-types/getCanonicalQuery.d.ts b/node_modules/@smithy/signature-v4/dist-types/getCanonicalQuery.d.ts new file mode 100644 index 00000000..a8e18005 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/getCanonicalQuery.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getCanonicalQuery: ({ query }: HttpRequest) => string; diff --git a/node_modules/@smithy/signature-v4/dist-types/getPayloadHash.d.ts b/node_modules/@smithy/signature-v4/dist-types/getPayloadHash.d.ts new file mode 100644 index 00000000..2de0858a --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/getPayloadHash.d.ts @@ -0,0 +1,5 @@ +import { ChecksumConstructor, HashConstructor, HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getPayloadHash: ({ headers, body }: HttpRequest, hashConstructor: ChecksumConstructor | HashConstructor) => Promise; diff --git a/node_modules/@smithy/signature-v4/dist-types/headerUtil.d.ts b/node_modules/@smithy/signature-v4/dist-types/headerUtil.d.ts new file mode 100644 index 00000000..c0b66eb6 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/headerUtil.d.ts @@ -0,0 +1,4 @@ +import { HeaderBag } from "@smithy/types"; +export declare const hasHeader: (soughtHeader: string, headers: HeaderBag) => boolean; +export declare const getHeaderValue: (soughtHeader: string, headers: HeaderBag) => string | undefined; +export declare const deleteHeader: (soughtHeader: string, headers: HeaderBag) => void; diff --git a/node_modules/@smithy/signature-v4/dist-types/index.d.ts b/node_modules/@smithy/signature-v4/dist-types/index.d.ts new file mode 100644 index 00000000..9305cf31 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/index.d.ts @@ -0,0 +1,11 @@ +export * from "./SignatureV4"; +export * from "./constants"; +export { getCanonicalHeaders } from "./getCanonicalHeaders"; +export { getCanonicalQuery } from "./getCanonicalQuery"; +export { getPayloadHash } from "./getPayloadHash"; +export { moveHeadersToQuery } from "./moveHeadersToQuery"; +export { prepareRequest } from "./prepareRequest"; +export * from "./credentialDerivation"; +export { SignatureV4Init, SignatureV4CryptoInit, SignatureV4Base } from "./SignatureV4Base"; +export { hasHeader } from "./headerUtil"; +export * from "./signature-v4a-container"; diff --git a/node_modules/@smithy/signature-v4/dist-types/moveHeadersToQuery.d.ts b/node_modules/@smithy/signature-v4/dist-types/moveHeadersToQuery.d.ts new file mode 100644 index 00000000..e2c31e0a --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/moveHeadersToQuery.d.ts @@ -0,0 +1,10 @@ +import type { HttpRequest as IHttpRequest, QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare const moveHeadersToQuery: (request: IHttpRequest, options?: { + unhoistableHeaders?: Set; + hoistableHeaders?: Set; +}) => IHttpRequest & { + query: QueryParameterBag; +}; diff --git a/node_modules/@smithy/signature-v4/dist-types/prepareRequest.d.ts b/node_modules/@smithy/signature-v4/dist-types/prepareRequest.d.ts new file mode 100644 index 00000000..b20e0e35 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/prepareRequest.d.ts @@ -0,0 +1,5 @@ +import type { HttpRequest as IHttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const prepareRequest: (request: IHttpRequest) => IHttpRequest; diff --git a/node_modules/@smithy/signature-v4/dist-types/signature-v4a-container.d.ts b/node_modules/@smithy/signature-v4/dist-types/signature-v4a-container.d.ts new file mode 100644 index 00000000..89010361 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/signature-v4a-container.d.ts @@ -0,0 +1,24 @@ +import type { RequestSigner } from "@smithy/types"; +/** + * @public + */ +export type OptionalSigV4aSigner = { + /** + * This constructor is not typed so as not to require a type import + * from the signature-v4a package. + * + * The true type is SignatureV4a from @smithy/signature-v4a. + */ + new (options: any): RequestSigner; +}; +/** + * @public + * + * \@smithy/signature-v4a will install the constructor in this + * container if it's installed. + * + * This avoids a runtime-require being interpreted statically by bundlers. + */ +export declare const signatureV4aContainer: { + SignatureV4a: null | OptionalSigV4aSigner; +}; diff --git a/node_modules/@smithy/signature-v4/dist-types/suite.fixture.d.ts b/node_modules/@smithy/signature-v4/dist-types/suite.fixture.d.ts new file mode 100644 index 00000000..383bc35f --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/suite.fixture.d.ts @@ -0,0 +1,14 @@ +import { HttpRequest } from "@smithy/types"; +export interface TestCase { + name: string; + request: HttpRequest; + authorization: string; +} +export declare const region = "us-east-1"; +export declare const service = "service"; +export declare const credentials: { + accessKeyId: string; + secretAccessKey: string; +}; +export declare const signingDate: Date; +export declare const requests: Array; diff --git a/node_modules/@smithy/signature-v4/dist-types/ts3.4/HeaderFormatter.d.ts b/node_modules/@smithy/signature-v4/dist-types/ts3.4/HeaderFormatter.d.ts new file mode 100644 index 00000000..6c294c30 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/ts3.4/HeaderFormatter.d.ts @@ -0,0 +1,24 @@ +import { Int64 as IInt64, MessageHeaders } from "@smithy/types"; +/** + * @internal + * TODO: duplicated from @smithy/eventstream-codec to break large dependency. + * TODO: This should be moved to its own deduped submodule in @smithy/core when submodules are implemented. + */ +export declare class HeaderFormatter { + format(headers: MessageHeaders): Uint8Array; + private formatHeaderValue; +} +/** + * TODO: duplicated from @smithy/eventstream-codec to break large dependency. + * TODO: This should be moved to its own deduped submodule in @smithy/core when submodules are implemented. + */ +export declare class Int64 implements IInt64 { + readonly bytes: Uint8Array; + constructor(bytes: Uint8Array); + static fromNumber(number: number): Int64; + /** + * Called implicitly by infix arithmetic operators. + */ + valueOf(): number; + toString(): string; +} diff --git a/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4.d.ts b/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4.d.ts new file mode 100644 index 00000000..c6137536 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4.d.ts @@ -0,0 +1,20 @@ +import { EventSigner, EventSigningArguments, FormattedEvent, HttpRequest, MessageSigner, RequestPresigner, RequestPresigningArguments, RequestSigner, RequestSigningArguments, SignableMessage, SignedMessage, SigningArguments, StringSigner } from "@smithy/types"; +import { SignatureV4Base, SignatureV4CryptoInit, SignatureV4Init } from "./SignatureV4Base"; +/** + * @public + */ +export declare class SignatureV4 extends SignatureV4Base implements RequestPresigner, RequestSigner, StringSigner, EventSigner, MessageSigner { + private readonly headerFormatter; + constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath, }: SignatureV4Init & SignatureV4CryptoInit); + presign(originalRequest: HttpRequest, options?: RequestPresigningArguments): Promise; + sign(stringToSign: string, options?: SigningArguments): Promise; + sign(event: FormattedEvent, options: EventSigningArguments): Promise; + sign(event: SignableMessage, options: SigningArguments): Promise; + sign(requestToSign: HttpRequest, options?: RequestSigningArguments): Promise; + private signEvent; + signMessage(signableMessage: SignableMessage, { signingDate, signingRegion, signingService }: SigningArguments): Promise; + private signString; + private signRequest; + private getSignature; + private getSigningKey; +} diff --git a/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4Base.d.ts b/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4Base.d.ts new file mode 100644 index 00000000..be1da1f6 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4Base.d.ts @@ -0,0 +1,69 @@ +import { AwsCredentialIdentity, ChecksumConstructor, DateInput, HashConstructor, HeaderBag, HttpRequest, Provider } from "@smithy/types"; +/** + * @public + */ +export interface SignatureV4Init { + /** + * The service signing name. + */ + service: string; + /** + * The region name or a function that returns a promise that will be + * resolved with the region name. + */ + region: string | Provider; + /** + * The credentials with which the request should be signed or a function + * that returns a promise that will be resolved with credentials. + */ + credentials: AwsCredentialIdentity | Provider; + /** + * A constructor function for a hash object that will calculate SHA-256 HMAC + * checksums. + */ + sha256?: ChecksumConstructor | HashConstructor; + /** + * Whether to uri-escape the request URI path as part of computing the + * canonical request string. This is required for every AWS service, except + * Amazon S3, as of late 2017. + * + * @default [true] + */ + uriEscapePath?: boolean; + /** + * Whether to calculate a checksum of the request body and include it as + * either a request header (when signing) or as a query string parameter + * (when presigning). This is required for AWS Glacier and Amazon S3 and optional for + * every other AWS service as of late 2017. + * + * @default [true] + */ + applyChecksum?: boolean; +} +/** + * @public + */ +export interface SignatureV4CryptoInit { + sha256: ChecksumConstructor | HashConstructor; +} +/** + * @internal + */ +export declare abstract class SignatureV4Base { + protected readonly service: string; + protected readonly regionProvider: Provider; + protected readonly credentialProvider: Provider; + protected readonly sha256: ChecksumConstructor | HashConstructor; + private readonly uriEscapePath; + protected readonly applyChecksum: boolean; + protected constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath, }: SignatureV4Init & SignatureV4CryptoInit); + protected createCanonicalRequest(request: HttpRequest, canonicalHeaders: HeaderBag, payloadHash: string): string; + protected createStringToSign(longDate: string, credentialScope: string, canonicalRequest: string, algorithmIdentifier: string): Promise; + private getCanonicalPath; + protected validateResolvedCredentials(credentials: unknown): void; + protected formatDate(now: DateInput): { + longDate: string; + shortDate: string; + }; + protected getCanonicalHeaderList(headers: object): string; +} diff --git a/node_modules/@smithy/signature-v4/dist-types/ts3.4/constants.d.ts b/node_modules/@smithy/signature-v4/dist-types/ts3.4/constants.d.ts new file mode 100644 index 00000000..ff54b673 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,43 @@ +export declare const ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +export declare const CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +export declare const AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +export declare const SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +export declare const EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +export declare const SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +export declare const TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +export declare const REGION_SET_PARAM = "X-Amz-Region-Set"; +export declare const AUTH_HEADER = "authorization"; +export declare const AMZ_DATE_HEADER: string; +export declare const DATE_HEADER = "date"; +export declare const GENERATED_HEADERS: string[]; +export declare const SIGNATURE_HEADER: string; +export declare const SHA256_HEADER = "x-amz-content-sha256"; +export declare const TOKEN_HEADER: string; +export declare const HOST_HEADER = "host"; +export declare const ALWAYS_UNSIGNABLE_HEADERS: { + authorization: boolean; + "cache-control": boolean; + connection: boolean; + expect: boolean; + from: boolean; + "keep-alive": boolean; + "max-forwards": boolean; + pragma: boolean; + referer: boolean; + te: boolean; + trailer: boolean; + "transfer-encoding": boolean; + upgrade: boolean; + "user-agent": boolean; + "x-amzn-trace-id": boolean; +}; +export declare const PROXY_HEADER_PATTERN: RegExp; +export declare const SEC_HEADER_PATTERN: RegExp; +export declare const UNSIGNABLE_PATTERNS: RegExp[]; +export declare const ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +export declare const ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; +export declare const EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +export declare const UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +export declare const MAX_CACHE_SIZE = 50; +export declare const KEY_TYPE_IDENTIFIER = "aws4_request"; +export declare const MAX_PRESIGNED_TTL: number; diff --git a/node_modules/@smithy/signature-v4/dist-types/ts3.4/credentialDerivation.d.ts b/node_modules/@smithy/signature-v4/dist-types/ts3.4/credentialDerivation.d.ts new file mode 100644 index 00000000..6cba9b6f --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/ts3.4/credentialDerivation.d.ts @@ -0,0 +1,30 @@ +import { AwsCredentialIdentity, ChecksumConstructor, HashConstructor } from "@smithy/types"; +/** + * Create a string describing the scope of credentials used to sign a request. + * + * @internal + * + * @param shortDate - the current calendar date in the form YYYYMMDD. + * @param region - the AWS region in which the service resides. + * @param service - the service to which the signed request is being sent. + */ +export declare const createScope: (shortDate: string, region: string, service: string) => string; +/** + * Derive a signing key from its composite parts. + * + * @internal + * + * @param sha256Constructor - a constructor function that can instantiate SHA-256 + * hash objects. + * @param credentials - the credentials with which the request will be + * signed. + * @param shortDate - the current calendar date in the form YYYYMMDD. + * @param region - the AWS region in which the service resides. + * @param service - the service to which the signed request is being + * sent. + */ +export declare const getSigningKey: (sha256Constructor: ChecksumConstructor | HashConstructor, credentials: AwsCredentialIdentity, shortDate: string, region: string, service: string) => Promise; +/** + * @internal + */ +export declare const clearCredentialCache: () => void; diff --git a/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalHeaders.d.ts b/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalHeaders.d.ts new file mode 100644 index 00000000..e8f2e986 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalHeaders.d.ts @@ -0,0 +1,5 @@ +import { HeaderBag, HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getCanonicalHeaders: ({ headers }: HttpRequest, unsignableHeaders?: Set, signableHeaders?: Set) => HeaderBag; diff --git a/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalQuery.d.ts b/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalQuery.d.ts new file mode 100644 index 00000000..6a2d4fac --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalQuery.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getCanonicalQuery: ({ query }: HttpRequest) => string; diff --git a/node_modules/@smithy/signature-v4/dist-types/ts3.4/getPayloadHash.d.ts b/node_modules/@smithy/signature-v4/dist-types/ts3.4/getPayloadHash.d.ts new file mode 100644 index 00000000..c14a46d7 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/ts3.4/getPayloadHash.d.ts @@ -0,0 +1,5 @@ +import { ChecksumConstructor, HashConstructor, HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getPayloadHash: ({ headers, body }: HttpRequest, hashConstructor: ChecksumConstructor | HashConstructor) => Promise; diff --git a/node_modules/@smithy/signature-v4/dist-types/ts3.4/headerUtil.d.ts b/node_modules/@smithy/signature-v4/dist-types/ts3.4/headerUtil.d.ts new file mode 100644 index 00000000..41ca217d --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/ts3.4/headerUtil.d.ts @@ -0,0 +1,4 @@ +import { HeaderBag } from "@smithy/types"; +export declare const hasHeader: (soughtHeader: string, headers: HeaderBag) => boolean; +export declare const getHeaderValue: (soughtHeader: string, headers: HeaderBag) => string | undefined; +export declare const deleteHeader: (soughtHeader: string, headers: HeaderBag) => void; diff --git a/node_modules/@smithy/signature-v4/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/signature-v4/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..c9fa5f63 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/ts3.4/index.d.ts @@ -0,0 +1,11 @@ +export * from "./SignatureV4"; +export * from "./constants"; +export { getCanonicalHeaders } from "./getCanonicalHeaders"; +export { getCanonicalQuery } from "./getCanonicalQuery"; +export { getPayloadHash } from "./getPayloadHash"; +export { moveHeadersToQuery } from "./moveHeadersToQuery"; +export { prepareRequest } from "./prepareRequest"; +export * from "./credentialDerivation"; +export { SignatureV4Init, SignatureV4CryptoInit, SignatureV4Base } from "./SignatureV4Base"; +export { hasHeader } from "./headerUtil"; +export * from "./signature-v4a-container"; diff --git a/node_modules/@smithy/signature-v4/dist-types/ts3.4/moveHeadersToQuery.d.ts b/node_modules/@smithy/signature-v4/dist-types/ts3.4/moveHeadersToQuery.d.ts new file mode 100644 index 00000000..2017f3bd --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/ts3.4/moveHeadersToQuery.d.ts @@ -0,0 +1,10 @@ +import { HttpRequest as IHttpRequest, QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare const moveHeadersToQuery: (request: IHttpRequest, options?: { + unhoistableHeaders?: Set; + hoistableHeaders?: Set; +}) => IHttpRequest & { + query: QueryParameterBag; +}; diff --git a/node_modules/@smithy/signature-v4/dist-types/ts3.4/prepareRequest.d.ts b/node_modules/@smithy/signature-v4/dist-types/ts3.4/prepareRequest.d.ts new file mode 100644 index 00000000..57cf7821 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/ts3.4/prepareRequest.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest as IHttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const prepareRequest: (request: IHttpRequest) => IHttpRequest; diff --git a/node_modules/@smithy/signature-v4/dist-types/ts3.4/signature-v4a-container.d.ts b/node_modules/@smithy/signature-v4/dist-types/ts3.4/signature-v4a-container.d.ts new file mode 100644 index 00000000..f1adc976 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/ts3.4/signature-v4a-container.d.ts @@ -0,0 +1,24 @@ +import { RequestSigner } from "@smithy/types"; +/** + * @public + */ +export type OptionalSigV4aSigner = { + /** + * This constructor is not typed so as not to require a type import + * from the signature-v4a package. + * + * The true type is SignatureV4a from @smithy/signature-v4a. + */ + new (options: any): RequestSigner; +}; +/** + * @public + * + * \@smithy/signature-v4a will install the constructor in this + * container if it's installed. + * + * This avoids a runtime-require being interpreted statically by bundlers. + */ +export declare const signatureV4aContainer: { + SignatureV4a: null | OptionalSigV4aSigner; +}; diff --git a/node_modules/@smithy/signature-v4/dist-types/ts3.4/suite.fixture.d.ts b/node_modules/@smithy/signature-v4/dist-types/ts3.4/suite.fixture.d.ts new file mode 100644 index 00000000..9363eeb1 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/ts3.4/suite.fixture.d.ts @@ -0,0 +1,14 @@ +import { HttpRequest } from "@smithy/types"; +export interface TestCase { + name: string; + request: HttpRequest; + authorization: string; +} +export declare const region = "us-east-1"; +export declare const service = "service"; +export declare const credentials: { + accessKeyId: string; + secretAccessKey: string; +}; +export declare const signingDate: Date; +export declare const requests: Array; diff --git a/node_modules/@smithy/signature-v4/dist-types/ts3.4/utilDate.d.ts b/node_modules/@smithy/signature-v4/dist-types/ts3.4/utilDate.d.ts new file mode 100644 index 00000000..9a6f3832 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/ts3.4/utilDate.d.ts @@ -0,0 +1,2 @@ +export declare const iso8601: (time: number | string | Date) => string; +export declare const toDate: (time: number | string | Date) => Date; diff --git a/node_modules/@smithy/signature-v4/dist-types/utilDate.d.ts b/node_modules/@smithy/signature-v4/dist-types/utilDate.d.ts new file mode 100644 index 00000000..e8c6a684 --- /dev/null +++ b/node_modules/@smithy/signature-v4/dist-types/utilDate.d.ts @@ -0,0 +1,2 @@ +export declare const iso8601: (time: number | string | Date) => string; +export declare const toDate: (time: number | string | Date) => Date; diff --git a/node_modules/@smithy/signature-v4/package.json b/node_modules/@smithy/signature-v4/package.json new file mode 100644 index 00000000..56e028c6 --- /dev/null +++ b/node_modules/@smithy/signature-v4/package.json @@ -0,0 +1,69 @@ +{ + "name": "@smithy/signature-v4", + "version": "5.1.0", + "description": "A standalone implementation of the AWS Signature V4 request signing algorithm", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline signature-v4", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-uri-escape": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@aws-crypto/sha256-js": "5.2.0", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/signature-v4", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/signature-v4" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/LICENSE b/node_modules/@smithy/smithy-client/LICENSE new file mode 100644 index 00000000..e907b586 --- /dev/null +++ b/node_modules/@smithy/smithy-client/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@smithy/smithy-client/README.md b/node_modules/@smithy/smithy-client/README.md new file mode 100644 index 00000000..365cd629 --- /dev/null +++ b/node_modules/@smithy/smithy-client/README.md @@ -0,0 +1,10 @@ +# @smithy/smithy-client + +[![NPM version](https://img.shields.io/npm/v/@smithy/smithy-client/latest.svg)](https://www.npmjs.com/package/@smithy/smithy-client) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/smithy-client.svg)](https://www.npmjs.com/package/@smithy/smithy-client) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/smithy-client/dist-cjs/NoOpLogger.js b/node_modules/@smithy/smithy-client/dist-cjs/NoOpLogger.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/NoOpLogger.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/client.js b/node_modules/@smithy/smithy-client/dist-cjs/client.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/client.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/collect-stream-body.js b/node_modules/@smithy/smithy-client/dist-cjs/collect-stream-body.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/collect-stream-body.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/command.js b/node_modules/@smithy/smithy-client/dist-cjs/command.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/command.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/constants.js b/node_modules/@smithy/smithy-client/dist-cjs/constants.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/create-aggregated-client.js b/node_modules/@smithy/smithy-client/dist-cjs/create-aggregated-client.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/create-aggregated-client.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/date-utils.js b/node_modules/@smithy/smithy-client/dist-cjs/date-utils.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/date-utils.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/default-error-handler.js b/node_modules/@smithy/smithy-client/dist-cjs/default-error-handler.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/default-error-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/defaults-mode.js b/node_modules/@smithy/smithy-client/dist-cjs/defaults-mode.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/defaults-mode.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/emitWarningIfUnsupportedVersion.js b/node_modules/@smithy/smithy-client/dist-cjs/emitWarningIfUnsupportedVersion.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/emitWarningIfUnsupportedVersion.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/exceptions.js b/node_modules/@smithy/smithy-client/dist-cjs/exceptions.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/exceptions.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/extended-encode-uri-component.js b/node_modules/@smithy/smithy-client/dist-cjs/extended-encode-uri-component.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/extended-encode-uri-component.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/extensions/checksum.js b/node_modules/@smithy/smithy-client/dist-cjs/extensions/checksum.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/extensions/checksum.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/extensions/defaultExtensionConfiguration.js b/node_modules/@smithy/smithy-client/dist-cjs/extensions/defaultExtensionConfiguration.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/extensions/defaultExtensionConfiguration.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/extensions/index.js b/node_modules/@smithy/smithy-client/dist-cjs/extensions/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/extensions/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/extensions/retry.js b/node_modules/@smithy/smithy-client/dist-cjs/extensions/retry.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/extensions/retry.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/get-array-if-single-item.js b/node_modules/@smithy/smithy-client/dist-cjs/get-array-if-single-item.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/get-array-if-single-item.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/get-value-from-text-node.js b/node_modules/@smithy/smithy-client/dist-cjs/get-value-from-text-node.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/get-value-from-text-node.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/index.js b/node_modules/@smithy/smithy-client/dist-cjs/index.js new file mode 100644 index 00000000..c410d8df --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/index.js @@ -0,0 +1,1321 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Client: () => Client, + Command: () => Command, + LazyJsonString: () => LazyJsonString, + NoOpLogger: () => NoOpLogger, + SENSITIVE_STRING: () => SENSITIVE_STRING, + ServiceException: () => ServiceException, + _json: () => _json, + collectBody: () => import_protocols.collectBody, + convertMap: () => convertMap, + createAggregatedClient: () => createAggregatedClient, + dateToUtcString: () => dateToUtcString, + decorateServiceException: () => decorateServiceException, + emitWarningIfUnsupportedVersion: () => emitWarningIfUnsupportedVersion, + expectBoolean: () => expectBoolean, + expectByte: () => expectByte, + expectFloat32: () => expectFloat32, + expectInt: () => expectInt, + expectInt32: () => expectInt32, + expectLong: () => expectLong, + expectNonNull: () => expectNonNull, + expectNumber: () => expectNumber, + expectObject: () => expectObject, + expectShort: () => expectShort, + expectString: () => expectString, + expectUnion: () => expectUnion, + extendedEncodeURIComponent: () => import_protocols.extendedEncodeURIComponent, + getArrayIfSingleItem: () => getArrayIfSingleItem, + getDefaultClientConfiguration: () => getDefaultClientConfiguration, + getDefaultExtensionConfiguration: () => getDefaultExtensionConfiguration, + getValueFromTextNode: () => getValueFromTextNode, + handleFloat: () => handleFloat, + isSerializableHeaderValue: () => isSerializableHeaderValue, + limitedParseDouble: () => limitedParseDouble, + limitedParseFloat: () => limitedParseFloat, + limitedParseFloat32: () => limitedParseFloat32, + loadConfigsForDefaultMode: () => loadConfigsForDefaultMode, + logger: () => logger, + map: () => map, + parseBoolean: () => parseBoolean, + parseEpochTimestamp: () => parseEpochTimestamp, + parseRfc3339DateTime: () => parseRfc3339DateTime, + parseRfc3339DateTimeWithOffset: () => parseRfc3339DateTimeWithOffset, + parseRfc7231DateTime: () => parseRfc7231DateTime, + quoteHeader: () => quoteHeader, + resolveDefaultRuntimeConfig: () => resolveDefaultRuntimeConfig, + resolvedPath: () => import_protocols.resolvedPath, + serializeDateTime: () => serializeDateTime, + serializeFloat: () => serializeFloat, + splitEvery: () => splitEvery, + splitHeader: () => splitHeader, + strictParseByte: () => strictParseByte, + strictParseDouble: () => strictParseDouble, + strictParseFloat: () => strictParseFloat, + strictParseFloat32: () => strictParseFloat32, + strictParseInt: () => strictParseInt, + strictParseInt32: () => strictParseInt32, + strictParseLong: () => strictParseLong, + strictParseShort: () => strictParseShort, + take: () => take, + throwDefaultError: () => throwDefaultError, + withBaseException: () => withBaseException +}); +module.exports = __toCommonJS(src_exports); + +// src/client.ts +var import_middleware_stack = require("@smithy/middleware-stack"); +var Client = class { + constructor(config) { + this.config = config; + this.middlewareStack = (0, import_middleware_stack.constructStack)(); + } + static { + __name(this, "Client"); + } + send(command, optionsOrCb, cb) { + const options = typeof optionsOrCb !== "function" ? optionsOrCb : void 0; + const callback = typeof optionsOrCb === "function" ? optionsOrCb : cb; + const useHandlerCache = options === void 0 && this.config.cacheMiddleware === true; + let handler; + if (useHandlerCache) { + if (!this.handlers) { + this.handlers = /* @__PURE__ */ new WeakMap(); + } + const handlers = this.handlers; + if (handlers.has(command.constructor)) { + handler = handlers.get(command.constructor); + } else { + handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + handlers.set(command.constructor, handler); + } + } else { + delete this.handlers; + handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + } + if (callback) { + handler(command).then( + (result) => callback(null, result.output), + (err) => callback(err) + ).catch( + // prevent any errors thrown in the callback from triggering an + // unhandled promise rejection + () => { + } + ); + } else { + return handler(command).then((result) => result.output); + } + } + destroy() { + this.config?.requestHandler?.destroy?.(); + delete this.handlers; + } +}; + +// src/collect-stream-body.ts +var import_protocols = require("@smithy/core/protocols"); + +// src/command.ts + +var import_types = require("@smithy/types"); +var Command = class { + constructor() { + this.middlewareStack = (0, import_middleware_stack.constructStack)(); + } + static { + __name(this, "Command"); + } + /** + * Factory for Command ClassBuilder. + * @internal + */ + static classBuilder() { + return new ClassBuilder(); + } + /** + * @internal + */ + resolveMiddlewareWithContext(clientStack, configuration, options, { + middlewareFn, + clientName, + commandName, + inputFilterSensitiveLog, + outputFilterSensitiveLog, + smithyContext, + additionalContext, + CommandCtor + }) { + for (const mw of middlewareFn.bind(this)(CommandCtor, clientStack, configuration, options)) { + this.middlewareStack.use(mw); + } + const stack = clientStack.concat(this.middlewareStack); + const { logger: logger2 } = configuration; + const handlerExecutionContext = { + logger: logger2, + clientName, + commandName, + inputFilterSensitiveLog, + outputFilterSensitiveLog, + [import_types.SMITHY_CONTEXT_KEY]: { + commandInstance: this, + ...smithyContext + }, + ...additionalContext + }; + const { requestHandler } = configuration; + return stack.resolve( + (request) => requestHandler.handle(request.request, options || {}), + handlerExecutionContext + ); + } +}; +var ClassBuilder = class { + constructor() { + this._init = () => { + }; + this._ep = {}; + this._middlewareFn = () => []; + this._commandName = ""; + this._clientName = ""; + this._additionalContext = {}; + this._smithyContext = {}; + this._inputFilterSensitiveLog = (_) => _; + this._outputFilterSensitiveLog = (_) => _; + this._serializer = null; + this._deserializer = null; + } + static { + __name(this, "ClassBuilder"); + } + /** + * Optional init callback. + */ + init(cb) { + this._init = cb; + } + /** + * Set the endpoint parameter instructions. + */ + ep(endpointParameterInstructions) { + this._ep = endpointParameterInstructions; + return this; + } + /** + * Add any number of middleware. + */ + m(middlewareSupplier) { + this._middlewareFn = middlewareSupplier; + return this; + } + /** + * Set the initial handler execution context Smithy field. + */ + s(service, operation, smithyContext = {}) { + this._smithyContext = { + service, + operation, + ...smithyContext + }; + return this; + } + /** + * Set the initial handler execution context. + */ + c(additionalContext = {}) { + this._additionalContext = additionalContext; + return this; + } + /** + * Set constant string identifiers for the operation. + */ + n(clientName, commandName) { + this._clientName = clientName; + this._commandName = commandName; + return this; + } + /** + * Set the input and output sensistive log filters. + */ + f(inputFilter = (_) => _, outputFilter = (_) => _) { + this._inputFilterSensitiveLog = inputFilter; + this._outputFilterSensitiveLog = outputFilter; + return this; + } + /** + * Sets the serializer. + */ + ser(serializer) { + this._serializer = serializer; + return this; + } + /** + * Sets the deserializer. + */ + de(deserializer) { + this._deserializer = deserializer; + return this; + } + /** + * @returns a Command class with the classBuilder properties. + */ + build() { + const closure = this; + let CommandRef; + return CommandRef = class extends Command { + /** + * @public + */ + constructor(...[input]) { + super(); + /** + * @internal + */ + // @ts-ignore used in middlewareFn closure. + this.serialize = closure._serializer; + /** + * @internal + */ + // @ts-ignore used in middlewareFn closure. + this.deserialize = closure._deserializer; + this.input = input ?? {}; + closure._init(this); + } + static { + __name(this, "CommandRef"); + } + /** + * @public + */ + static getEndpointParameterInstructions() { + return closure._ep; + } + /** + * @internal + */ + resolveMiddleware(stack, configuration, options) { + return this.resolveMiddlewareWithContext(stack, configuration, options, { + CommandCtor: CommandRef, + middlewareFn: closure._middlewareFn, + clientName: closure._clientName, + commandName: closure._commandName, + inputFilterSensitiveLog: closure._inputFilterSensitiveLog, + outputFilterSensitiveLog: closure._outputFilterSensitiveLog, + smithyContext: closure._smithyContext, + additionalContext: closure._additionalContext + }); + } + }; + } +}; + +// src/constants.ts +var SENSITIVE_STRING = "***SensitiveInformation***"; + +// src/create-aggregated-client.ts +var createAggregatedClient = /* @__PURE__ */ __name((commands, Client2) => { + for (const command of Object.keys(commands)) { + const CommandCtor = commands[command]; + const methodImpl = /* @__PURE__ */ __name(async function(args, optionsOrCb, cb) { + const command2 = new CommandCtor(args); + if (typeof optionsOrCb === "function") { + this.send(command2, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") + throw new Error(`Expected http options but got ${typeof optionsOrCb}`); + this.send(command2, optionsOrCb || {}, cb); + } else { + return this.send(command2, optionsOrCb); + } + }, "methodImpl"); + const methodName = (command[0].toLowerCase() + command.slice(1)).replace(/Command$/, ""); + Client2.prototype[methodName] = methodImpl; + } +}, "createAggregatedClient"); + +// src/parse-utils.ts +var parseBoolean = /* @__PURE__ */ __name((value) => { + switch (value) { + case "true": + return true; + case "false": + return false; + default: + throw new Error(`Unable to parse boolean value "${value}"`); + } +}, "parseBoolean"); +var expectBoolean = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "number") { + if (value === 0 || value === 1) { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (value === 0) { + return false; + } + if (value === 1) { + return true; + } + } + if (typeof value === "string") { + const lower = value.toLowerCase(); + if (lower === "false" || lower === "true") { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (lower === "false") { + return false; + } + if (lower === "true") { + return true; + } + } + if (typeof value === "boolean") { + return value; + } + throw new TypeError(`Expected boolean, got ${typeof value}: ${value}`); +}, "expectBoolean"); +var expectNumber = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "string") { + const parsed = parseFloat(value); + if (!Number.isNaN(parsed)) { + if (String(parsed) !== String(value)) { + logger.warn(stackTraceWarning(`Expected number but observed string: ${value}`)); + } + return parsed; + } + } + if (typeof value === "number") { + return value; + } + throw new TypeError(`Expected number, got ${typeof value}: ${value}`); +}, "expectNumber"); +var MAX_FLOAT = Math.ceil(2 ** 127 * (2 - 2 ** -23)); +var expectFloat32 = /* @__PURE__ */ __name((value) => { + const expected = expectNumber(value); + if (expected !== void 0 && !Number.isNaN(expected) && expected !== Infinity && expected !== -Infinity) { + if (Math.abs(expected) > MAX_FLOAT) { + throw new TypeError(`Expected 32-bit float, got ${value}`); + } + } + return expected; +}, "expectFloat32"); +var expectLong = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (Number.isInteger(value) && !Number.isNaN(value)) { + return value; + } + throw new TypeError(`Expected integer, got ${typeof value}: ${value}`); +}, "expectLong"); +var expectInt = expectLong; +var expectInt32 = /* @__PURE__ */ __name((value) => expectSizedInt(value, 32), "expectInt32"); +var expectShort = /* @__PURE__ */ __name((value) => expectSizedInt(value, 16), "expectShort"); +var expectByte = /* @__PURE__ */ __name((value) => expectSizedInt(value, 8), "expectByte"); +var expectSizedInt = /* @__PURE__ */ __name((value, size) => { + const expected = expectLong(value); + if (expected !== void 0 && castInt(expected, size) !== expected) { + throw new TypeError(`Expected ${size}-bit integer, got ${value}`); + } + return expected; +}, "expectSizedInt"); +var castInt = /* @__PURE__ */ __name((value, size) => { + switch (size) { + case 32: + return Int32Array.of(value)[0]; + case 16: + return Int16Array.of(value)[0]; + case 8: + return Int8Array.of(value)[0]; + } +}, "castInt"); +var expectNonNull = /* @__PURE__ */ __name((value, location) => { + if (value === null || value === void 0) { + if (location) { + throw new TypeError(`Expected a non-null value for ${location}`); + } + throw new TypeError("Expected a non-null value"); + } + return value; +}, "expectNonNull"); +var expectObject = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "object" && !Array.isArray(value)) { + return value; + } + const receivedType = Array.isArray(value) ? "array" : typeof value; + throw new TypeError(`Expected object, got ${receivedType}: ${value}`); +}, "expectObject"); +var expectString = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "string") { + return value; + } + if (["boolean", "number", "bigint"].includes(typeof value)) { + logger.warn(stackTraceWarning(`Expected string, got ${typeof value}: ${value}`)); + return String(value); + } + throw new TypeError(`Expected string, got ${typeof value}: ${value}`); +}, "expectString"); +var expectUnion = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + const asObject = expectObject(value); + const setKeys = Object.entries(asObject).filter(([, v]) => v != null).map(([k]) => k); + if (setKeys.length === 0) { + throw new TypeError(`Unions must have exactly one non-null member. None were found.`); + } + if (setKeys.length > 1) { + throw new TypeError(`Unions must have exactly one non-null member. Keys ${setKeys} were not null.`); + } + return asObject; +}, "expectUnion"); +var strictParseDouble = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return expectNumber(parseNumber(value)); + } + return expectNumber(value); +}, "strictParseDouble"); +var strictParseFloat = strictParseDouble; +var strictParseFloat32 = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return expectFloat32(parseNumber(value)); + } + return expectFloat32(value); +}, "strictParseFloat32"); +var NUMBER_REGEX = /(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)|(-?Infinity)|(NaN)/g; +var parseNumber = /* @__PURE__ */ __name((value) => { + const matches = value.match(NUMBER_REGEX); + if (matches === null || matches[0].length !== value.length) { + throw new TypeError(`Expected real number, got implicit NaN`); + } + return parseFloat(value); +}, "parseNumber"); +var limitedParseDouble = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectNumber(value); +}, "limitedParseDouble"); +var handleFloat = limitedParseDouble; +var limitedParseFloat = limitedParseDouble; +var limitedParseFloat32 = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectFloat32(value); +}, "limitedParseFloat32"); +var parseFloatString = /* @__PURE__ */ __name((value) => { + switch (value) { + case "NaN": + return NaN; + case "Infinity": + return Infinity; + case "-Infinity": + return -Infinity; + default: + throw new Error(`Unable to parse float value: ${value}`); + } +}, "parseFloatString"); +var strictParseLong = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectLong(parseNumber(value)); + } + return expectLong(value); +}, "strictParseLong"); +var strictParseInt = strictParseLong; +var strictParseInt32 = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectInt32(parseNumber(value)); + } + return expectInt32(value); +}, "strictParseInt32"); +var strictParseShort = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectShort(parseNumber(value)); + } + return expectShort(value); +}, "strictParseShort"); +var strictParseByte = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectByte(parseNumber(value)); + } + return expectByte(value); +}, "strictParseByte"); +var stackTraceWarning = /* @__PURE__ */ __name((message) => { + return String(new TypeError(message).stack || message).split("\n").slice(0, 5).filter((s) => !s.includes("stackTraceWarning")).join("\n"); +}, "stackTraceWarning"); +var logger = { + warn: console.warn +}; + +// src/date-utils.ts +var DAYS = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]; +var MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]; +function dateToUtcString(date) { + const year = date.getUTCFullYear(); + const month = date.getUTCMonth(); + const dayOfWeek = date.getUTCDay(); + const dayOfMonthInt = date.getUTCDate(); + const hoursInt = date.getUTCHours(); + const minutesInt = date.getUTCMinutes(); + const secondsInt = date.getUTCSeconds(); + const dayOfMonthString = dayOfMonthInt < 10 ? `0${dayOfMonthInt}` : `${dayOfMonthInt}`; + const hoursString = hoursInt < 10 ? `0${hoursInt}` : `${hoursInt}`; + const minutesString = minutesInt < 10 ? `0${minutesInt}` : `${minutesInt}`; + const secondsString = secondsInt < 10 ? `0${secondsInt}` : `${secondsInt}`; + return `${DAYS[dayOfWeek]}, ${dayOfMonthString} ${MONTHS[month]} ${year} ${hoursString}:${minutesString}:${secondsString} GMT`; +} +__name(dateToUtcString, "dateToUtcString"); +var RFC3339 = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?[zZ]$/); +var parseRfc3339DateTime = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + return buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); +}, "parseRfc3339DateTime"); +var RFC3339_WITH_OFFSET = new RegExp( + /^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?(([-+]\d{2}\:\d{2})|[zZ])$/ +); +var parseRfc3339DateTimeWithOffset = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339_WITH_OFFSET.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, offsetStr] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + const date = buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); + if (offsetStr.toUpperCase() != "Z") { + date.setTime(date.getTime() - parseOffsetToMilliseconds(offsetStr)); + } + return date; +}, "parseRfc3339DateTimeWithOffset"); +var IMF_FIXDATE = new RegExp( + /^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), (\d{2}) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) (\d{4}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/ +); +var RFC_850_DATE = new RegExp( + /^(?:Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday), (\d{2})-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-(\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/ +); +var ASC_TIME = new RegExp( + /^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) ( [1-9]|\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? (\d{4})$/ +); +var parseRfc7231DateTime = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value !== "string") { + throw new TypeError("RFC-7231 date-times must be expressed as strings"); + } + let match = IMF_FIXDATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return buildDate( + strictParseShort(stripLeadingZeroes(yearStr)), + parseMonthByShortName(monthStr), + parseDateValue(dayStr, "day", 1, 31), + { hours, minutes, seconds, fractionalMilliseconds } + ); + } + match = RFC_850_DATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return adjustRfc850Year( + buildDate(parseTwoDigitYear(yearStr), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { + hours, + minutes, + seconds, + fractionalMilliseconds + }) + ); + } + match = ASC_TIME.exec(value); + if (match) { + const [_, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, yearStr] = match; + return buildDate( + strictParseShort(stripLeadingZeroes(yearStr)), + parseMonthByShortName(monthStr), + parseDateValue(dayStr.trimLeft(), "day", 1, 31), + { hours, minutes, seconds, fractionalMilliseconds } + ); + } + throw new TypeError("Invalid RFC-7231 date-time value"); +}, "parseRfc7231DateTime"); +var parseEpochTimestamp = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + let valueAsDouble; + if (typeof value === "number") { + valueAsDouble = value; + } else if (typeof value === "string") { + valueAsDouble = strictParseDouble(value); + } else if (typeof value === "object" && value.tag === 1) { + valueAsDouble = value.value; + } else { + throw new TypeError("Epoch timestamps must be expressed as floating point numbers or their string representation"); + } + if (Number.isNaN(valueAsDouble) || valueAsDouble === Infinity || valueAsDouble === -Infinity) { + throw new TypeError("Epoch timestamps must be valid, non-Infinite, non-NaN numerics"); + } + return new Date(Math.round(valueAsDouble * 1e3)); +}, "parseEpochTimestamp"); +var buildDate = /* @__PURE__ */ __name((year, month, day, time) => { + const adjustedMonth = month - 1; + validateDayOfMonth(year, adjustedMonth, day); + return new Date( + Date.UTC( + year, + adjustedMonth, + day, + parseDateValue(time.hours, "hour", 0, 23), + parseDateValue(time.minutes, "minute", 0, 59), + // seconds can go up to 60 for leap seconds + parseDateValue(time.seconds, "seconds", 0, 60), + parseMilliseconds(time.fractionalMilliseconds) + ) + ); +}, "buildDate"); +var parseTwoDigitYear = /* @__PURE__ */ __name((value) => { + const thisYear = (/* @__PURE__ */ new Date()).getUTCFullYear(); + const valueInThisCentury = Math.floor(thisYear / 100) * 100 + strictParseShort(stripLeadingZeroes(value)); + if (valueInThisCentury < thisYear) { + return valueInThisCentury + 100; + } + return valueInThisCentury; +}, "parseTwoDigitYear"); +var FIFTY_YEARS_IN_MILLIS = 50 * 365 * 24 * 60 * 60 * 1e3; +var adjustRfc850Year = /* @__PURE__ */ __name((input) => { + if (input.getTime() - (/* @__PURE__ */ new Date()).getTime() > FIFTY_YEARS_IN_MILLIS) { + return new Date( + Date.UTC( + input.getUTCFullYear() - 100, + input.getUTCMonth(), + input.getUTCDate(), + input.getUTCHours(), + input.getUTCMinutes(), + input.getUTCSeconds(), + input.getUTCMilliseconds() + ) + ); + } + return input; +}, "adjustRfc850Year"); +var parseMonthByShortName = /* @__PURE__ */ __name((value) => { + const monthIdx = MONTHS.indexOf(value); + if (monthIdx < 0) { + throw new TypeError(`Invalid month: ${value}`); + } + return monthIdx + 1; +}, "parseMonthByShortName"); +var DAYS_IN_MONTH = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; +var validateDayOfMonth = /* @__PURE__ */ __name((year, month, day) => { + let maxDays = DAYS_IN_MONTH[month]; + if (month === 1 && isLeapYear(year)) { + maxDays = 29; + } + if (day > maxDays) { + throw new TypeError(`Invalid day for ${MONTHS[month]} in ${year}: ${day}`); + } +}, "validateDayOfMonth"); +var isLeapYear = /* @__PURE__ */ __name((year) => { + return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0); +}, "isLeapYear"); +var parseDateValue = /* @__PURE__ */ __name((value, type, lower, upper) => { + const dateVal = strictParseByte(stripLeadingZeroes(value)); + if (dateVal < lower || dateVal > upper) { + throw new TypeError(`${type} must be between ${lower} and ${upper}, inclusive`); + } + return dateVal; +}, "parseDateValue"); +var parseMilliseconds = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return 0; + } + return strictParseFloat32("0." + value) * 1e3; +}, "parseMilliseconds"); +var parseOffsetToMilliseconds = /* @__PURE__ */ __name((value) => { + const directionStr = value[0]; + let direction = 1; + if (directionStr == "+") { + direction = 1; + } else if (directionStr == "-") { + direction = -1; + } else { + throw new TypeError(`Offset direction, ${directionStr}, must be "+" or "-"`); + } + const hour = Number(value.substring(1, 3)); + const minute = Number(value.substring(4, 6)); + return direction * (hour * 60 + minute) * 60 * 1e3; +}, "parseOffsetToMilliseconds"); +var stripLeadingZeroes = /* @__PURE__ */ __name((value) => { + let idx = 0; + while (idx < value.length - 1 && value.charAt(idx) === "0") { + idx++; + } + if (idx === 0) { + return value; + } + return value.slice(idx); +}, "stripLeadingZeroes"); + +// src/exceptions.ts +var ServiceException = class _ServiceException extends Error { + static { + __name(this, "ServiceException"); + } + constructor(options) { + super(options.message); + Object.setPrototypeOf(this, Object.getPrototypeOf(this).constructor.prototype); + this.name = options.name; + this.$fault = options.$fault; + this.$metadata = options.$metadata; + } + /** + * Checks if a value is an instance of ServiceException (duck typed) + */ + static isInstance(value) { + if (!value) + return false; + const candidate = value; + return _ServiceException.prototype.isPrototypeOf(candidate) || Boolean(candidate.$fault) && Boolean(candidate.$metadata) && (candidate.$fault === "client" || candidate.$fault === "server"); + } + /** + * Custom instanceof check to support the operator for ServiceException base class + */ + static [Symbol.hasInstance](instance) { + if (!instance) + return false; + const candidate = instance; + if (this === _ServiceException) { + return _ServiceException.isInstance(instance); + } + if (_ServiceException.isInstance(instance)) { + if (candidate.name && this.name) { + return this.prototype.isPrototypeOf(instance) || candidate.name === this.name; + } + return this.prototype.isPrototypeOf(instance); + } + return false; + } +}; +var decorateServiceException = /* @__PURE__ */ __name((exception, additions = {}) => { + Object.entries(additions).filter(([, v]) => v !== void 0).forEach(([k, v]) => { + if (exception[k] == void 0 || exception[k] === "") { + exception[k] = v; + } + }); + const message = exception.message || exception.Message || "UnknownError"; + exception.message = message; + delete exception.Message; + return exception; +}, "decorateServiceException"); + +// src/default-error-handler.ts +var throwDefaultError = /* @__PURE__ */ __name(({ output, parsedBody, exceptionCtor, errorCode }) => { + const $metadata = deserializeMetadata(output); + const statusCode = $metadata.httpStatusCode ? $metadata.httpStatusCode + "" : void 0; + const response = new exceptionCtor({ + name: parsedBody?.code || parsedBody?.Code || errorCode || statusCode || "UnknownError", + $fault: "client", + $metadata + }); + throw decorateServiceException(response, parsedBody); +}, "throwDefaultError"); +var withBaseException = /* @__PURE__ */ __name((ExceptionCtor) => { + return ({ output, parsedBody, errorCode }) => { + throwDefaultError({ output, parsedBody, exceptionCtor: ExceptionCtor, errorCode }); + }; +}, "withBaseException"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); + +// src/defaults-mode.ts +var loadConfigsForDefaultMode = /* @__PURE__ */ __name((mode) => { + switch (mode) { + case "standard": + return { + retryMode: "standard", + connectionTimeout: 3100 + }; + case "in-region": + return { + retryMode: "standard", + connectionTimeout: 1100 + }; + case "cross-region": + return { + retryMode: "standard", + connectionTimeout: 3100 + }; + case "mobile": + return { + retryMode: "standard", + connectionTimeout: 3e4 + }; + default: + return {}; + } +}, "loadConfigsForDefaultMode"); + +// src/emitWarningIfUnsupportedVersion.ts +var warningEmitted = false; +var emitWarningIfUnsupportedVersion = /* @__PURE__ */ __name((version) => { + if (version && !warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 16) { + warningEmitted = true; + } +}, "emitWarningIfUnsupportedVersion"); + +// src/extended-encode-uri-component.ts + + +// src/extensions/checksum.ts + +var getChecksumConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const checksumAlgorithms = []; + for (const id in import_types.AlgorithmId) { + const algorithmId = import_types.AlgorithmId[id]; + if (runtimeConfig[algorithmId] === void 0) { + continue; + } + checksumAlgorithms.push({ + algorithmId: () => algorithmId, + checksumConstructor: () => runtimeConfig[algorithmId] + }); + } + return { + addChecksumAlgorithm(algo) { + checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return checksumAlgorithms; + } + }; +}, "getChecksumConfiguration"); +var resolveChecksumRuntimeConfig = /* @__PURE__ */ __name((clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}, "resolveChecksumRuntimeConfig"); + +// src/extensions/retry.ts +var getRetryConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return { + setRetryStrategy(retryStrategy) { + runtimeConfig.retryStrategy = retryStrategy; + }, + retryStrategy() { + return runtimeConfig.retryStrategy; + } + }; +}, "getRetryConfiguration"); +var resolveRetryRuntimeConfig = /* @__PURE__ */ __name((retryStrategyConfiguration) => { + const runtimeConfig = {}; + runtimeConfig.retryStrategy = retryStrategyConfiguration.retryStrategy(); + return runtimeConfig; +}, "resolveRetryRuntimeConfig"); + +// src/extensions/defaultExtensionConfiguration.ts +var getDefaultExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return Object.assign(getChecksumConfiguration(runtimeConfig), getRetryConfiguration(runtimeConfig)); +}, "getDefaultExtensionConfiguration"); +var getDefaultClientConfiguration = getDefaultExtensionConfiguration; +var resolveDefaultRuntimeConfig = /* @__PURE__ */ __name((config) => { + return Object.assign(resolveChecksumRuntimeConfig(config), resolveRetryRuntimeConfig(config)); +}, "resolveDefaultRuntimeConfig"); + +// src/get-array-if-single-item.ts +var getArrayIfSingleItem = /* @__PURE__ */ __name((mayBeArray) => Array.isArray(mayBeArray) ? mayBeArray : [mayBeArray], "getArrayIfSingleItem"); + +// src/get-value-from-text-node.ts +var getValueFromTextNode = /* @__PURE__ */ __name((obj) => { + const textNodeName = "#text"; + for (const key in obj) { + if (obj.hasOwnProperty(key) && obj[key][textNodeName] !== void 0) { + obj[key] = obj[key][textNodeName]; + } else if (typeof obj[key] === "object" && obj[key] !== null) { + obj[key] = getValueFromTextNode(obj[key]); + } + } + return obj; +}, "getValueFromTextNode"); + +// src/is-serializable-header-value.ts +var isSerializableHeaderValue = /* @__PURE__ */ __name((value) => { + return value != null; +}, "isSerializableHeaderValue"); + +// src/lazy-json.ts +var LazyJsonString = /* @__PURE__ */ __name(function LazyJsonString2(val) { + const str = Object.assign(new String(val), { + deserializeJSON() { + return JSON.parse(String(val)); + }, + toString() { + return String(val); + }, + toJSON() { + return String(val); + } + }); + return str; +}, "LazyJsonString"); +LazyJsonString.from = (object) => { + if (object && typeof object === "object" && (object instanceof LazyJsonString || "deserializeJSON" in object)) { + return object; + } else if (typeof object === "string" || Object.getPrototypeOf(object) === String.prototype) { + return LazyJsonString(String(object)); + } + return LazyJsonString(JSON.stringify(object)); +}; +LazyJsonString.fromObject = LazyJsonString.from; + +// src/NoOpLogger.ts +var NoOpLogger = class { + static { + __name(this, "NoOpLogger"); + } + trace() { + } + debug() { + } + info() { + } + warn() { + } + error() { + } +}; + +// src/object-mapping.ts +function map(arg0, arg1, arg2) { + let target; + let filter; + let instructions; + if (typeof arg1 === "undefined" && typeof arg2 === "undefined") { + target = {}; + instructions = arg0; + } else { + target = arg0; + if (typeof arg1 === "function") { + filter = arg1; + instructions = arg2; + return mapWithFilter(target, filter, instructions); + } else { + instructions = arg1; + } + } + for (const key of Object.keys(instructions)) { + if (!Array.isArray(instructions[key])) { + target[key] = instructions[key]; + continue; + } + applyInstruction(target, null, instructions, key); + } + return target; +} +__name(map, "map"); +var convertMap = /* @__PURE__ */ __name((target) => { + const output = {}; + for (const [k, v] of Object.entries(target || {})) { + output[k] = [, v]; + } + return output; +}, "convertMap"); +var take = /* @__PURE__ */ __name((source, instructions) => { + const out = {}; + for (const key in instructions) { + applyInstruction(out, source, instructions, key); + } + return out; +}, "take"); +var mapWithFilter = /* @__PURE__ */ __name((target, filter, instructions) => { + return map( + target, + Object.entries(instructions).reduce( + (_instructions, [key, value]) => { + if (Array.isArray(value)) { + _instructions[key] = value; + } else { + if (typeof value === "function") { + _instructions[key] = [filter, value()]; + } else { + _instructions[key] = [filter, value]; + } + } + return _instructions; + }, + {} + ) + ); +}, "mapWithFilter"); +var applyInstruction = /* @__PURE__ */ __name((target, source, instructions, targetKey) => { + if (source !== null) { + let instruction = instructions[targetKey]; + if (typeof instruction === "function") { + instruction = [, instruction]; + } + const [filter2 = nonNullish, valueFn = pass, sourceKey = targetKey] = instruction; + if (typeof filter2 === "function" && filter2(source[sourceKey]) || typeof filter2 !== "function" && !!filter2) { + target[targetKey] = valueFn(source[sourceKey]); + } + return; + } + let [filter, value] = instructions[targetKey]; + if (typeof value === "function") { + let _value; + const defaultFilterPassed = filter === void 0 && (_value = value()) != null; + const customFilterPassed = typeof filter === "function" && !!filter(void 0) || typeof filter !== "function" && !!filter; + if (defaultFilterPassed) { + target[targetKey] = _value; + } else if (customFilterPassed) { + target[targetKey] = value(); + } + } else { + const defaultFilterPassed = filter === void 0 && value != null; + const customFilterPassed = typeof filter === "function" && !!filter(value) || typeof filter !== "function" && !!filter; + if (defaultFilterPassed || customFilterPassed) { + target[targetKey] = value; + } + } +}, "applyInstruction"); +var nonNullish = /* @__PURE__ */ __name((_) => _ != null, "nonNullish"); +var pass = /* @__PURE__ */ __name((_) => _, "pass"); + +// src/quote-header.ts +function quoteHeader(part) { + if (part.includes(",") || part.includes('"')) { + part = `"${part.replace(/"/g, '\\"')}"`; + } + return part; +} +__name(quoteHeader, "quoteHeader"); + +// src/resolve-path.ts + + +// src/ser-utils.ts +var serializeFloat = /* @__PURE__ */ __name((value) => { + if (value !== value) { + return "NaN"; + } + switch (value) { + case Infinity: + return "Infinity"; + case -Infinity: + return "-Infinity"; + default: + return value; + } +}, "serializeFloat"); +var serializeDateTime = /* @__PURE__ */ __name((date) => date.toISOString().replace(".000Z", "Z"), "serializeDateTime"); + +// src/serde-json.ts +var _json = /* @__PURE__ */ __name((obj) => { + if (obj == null) { + return {}; + } + if (Array.isArray(obj)) { + return obj.filter((_) => _ != null).map(_json); + } + if (typeof obj === "object") { + const target = {}; + for (const key of Object.keys(obj)) { + if (obj[key] == null) { + continue; + } + target[key] = _json(obj[key]); + } + return target; + } + return obj; +}, "_json"); + +// src/split-every.ts +function splitEvery(value, delimiter, numDelimiters) { + if (numDelimiters <= 0 || !Number.isInteger(numDelimiters)) { + throw new Error("Invalid number of delimiters (" + numDelimiters + ") for splitEvery."); + } + const segments = value.split(delimiter); + if (numDelimiters === 1) { + return segments; + } + const compoundSegments = []; + let currentSegment = ""; + for (let i = 0; i < segments.length; i++) { + if (currentSegment === "") { + currentSegment = segments[i]; + } else { + currentSegment += delimiter + segments[i]; + } + if ((i + 1) % numDelimiters === 0) { + compoundSegments.push(currentSegment); + currentSegment = ""; + } + } + if (currentSegment !== "") { + compoundSegments.push(currentSegment); + } + return compoundSegments; +} +__name(splitEvery, "splitEvery"); + +// src/split-header.ts +var splitHeader = /* @__PURE__ */ __name((value) => { + const z = value.length; + const values = []; + let withinQuotes = false; + let prevChar = void 0; + let anchor = 0; + for (let i = 0; i < z; ++i) { + const char = value[i]; + switch (char) { + case `"`: + if (prevChar !== "\\") { + withinQuotes = !withinQuotes; + } + break; + case ",": + if (!withinQuotes) { + values.push(value.slice(anchor, i)); + anchor = i + 1; + } + break; + default: + } + prevChar = char; + } + values.push(value.slice(anchor)); + return values.map((v) => { + v = v.trim(); + const z2 = v.length; + if (z2 < 2) { + return v; + } + if (v[0] === `"` && v[z2 - 1] === `"`) { + v = v.slice(1, z2 - 1); + } + return v.replace(/\\"/g, '"'); + }); +}, "splitHeader"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + Client, + collectBody, + Command, + SENSITIVE_STRING, + createAggregatedClient, + dateToUtcString, + parseRfc3339DateTime, + parseRfc3339DateTimeWithOffset, + parseRfc7231DateTime, + parseEpochTimestamp, + throwDefaultError, + withBaseException, + loadConfigsForDefaultMode, + emitWarningIfUnsupportedVersion, + ServiceException, + decorateServiceException, + extendedEncodeURIComponent, + getDefaultExtensionConfiguration, + getDefaultClientConfiguration, + resolveDefaultRuntimeConfig, + getArrayIfSingleItem, + getValueFromTextNode, + isSerializableHeaderValue, + LazyJsonString, + NoOpLogger, + map, + convertMap, + take, + parseBoolean, + expectBoolean, + expectNumber, + expectFloat32, + expectLong, + expectInt, + expectInt32, + expectShort, + expectByte, + expectNonNull, + expectObject, + expectString, + expectUnion, + strictParseDouble, + strictParseFloat, + strictParseFloat32, + limitedParseDouble, + handleFloat, + limitedParseFloat, + limitedParseFloat32, + strictParseLong, + strictParseInt, + strictParseInt32, + strictParseShort, + strictParseByte, + logger, + quoteHeader, + resolvedPath, + serializeFloat, + serializeDateTime, + _json, + splitEvery, + splitHeader +}); + diff --git a/node_modules/@smithy/smithy-client/dist-cjs/is-serializable-header-value.js b/node_modules/@smithy/smithy-client/dist-cjs/is-serializable-header-value.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/is-serializable-header-value.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/lazy-json.js b/node_modules/@smithy/smithy-client/dist-cjs/lazy-json.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/lazy-json.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/object-mapping.js b/node_modules/@smithy/smithy-client/dist-cjs/object-mapping.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/object-mapping.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/parse-utils.js b/node_modules/@smithy/smithy-client/dist-cjs/parse-utils.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/parse-utils.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/quote-header.js b/node_modules/@smithy/smithy-client/dist-cjs/quote-header.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/quote-header.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/resolve-path.js b/node_modules/@smithy/smithy-client/dist-cjs/resolve-path.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/resolve-path.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/ser-utils.js b/node_modules/@smithy/smithy-client/dist-cjs/ser-utils.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/ser-utils.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/serde-json.js b/node_modules/@smithy/smithy-client/dist-cjs/serde-json.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/serde-json.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/split-every.js b/node_modules/@smithy/smithy-client/dist-cjs/split-every.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/split-every.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-cjs/split-header.js b/node_modules/@smithy/smithy-client/dist-cjs/split-header.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-cjs/split-header.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/smithy-client/dist-es/NoOpLogger.js b/node_modules/@smithy/smithy-client/dist-es/NoOpLogger.js new file mode 100644 index 00000000..73cd0764 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/NoOpLogger.js @@ -0,0 +1,7 @@ +export class NoOpLogger { + trace() { } + debug() { } + info() { } + warn() { } + error() { } +} diff --git a/node_modules/@smithy/smithy-client/dist-es/client.js b/node_modules/@smithy/smithy-client/dist-es/client.js new file mode 100644 index 00000000..56cbc2ec --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/client.js @@ -0,0 +1,42 @@ +import { constructStack } from "@smithy/middleware-stack"; +export class Client { + constructor(config) { + this.config = config; + this.middlewareStack = constructStack(); + } + send(command, optionsOrCb, cb) { + const options = typeof optionsOrCb !== "function" ? optionsOrCb : undefined; + const callback = typeof optionsOrCb === "function" ? optionsOrCb : cb; + const useHandlerCache = options === undefined && this.config.cacheMiddleware === true; + let handler; + if (useHandlerCache) { + if (!this.handlers) { + this.handlers = new WeakMap(); + } + const handlers = this.handlers; + if (handlers.has(command.constructor)) { + handler = handlers.get(command.constructor); + } + else { + handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + handlers.set(command.constructor, handler); + } + } + else { + delete this.handlers; + handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + } + if (callback) { + handler(command) + .then((result) => callback(null, result.output), (err) => callback(err)) + .catch(() => { }); + } + else { + return handler(command).then((result) => result.output); + } + } + destroy() { + this.config?.requestHandler?.destroy?.(); + delete this.handlers; + } +} diff --git a/node_modules/@smithy/smithy-client/dist-es/collect-stream-body.js b/node_modules/@smithy/smithy-client/dist-es/collect-stream-body.js new file mode 100644 index 00000000..2b76f0a0 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/collect-stream-body.js @@ -0,0 +1 @@ +export { collectBody } from "@smithy/core/protocols"; diff --git a/node_modules/@smithy/smithy-client/dist-es/command.js b/node_modules/@smithy/smithy-client/dist-es/command.js new file mode 100644 index 00000000..078eecbb --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/command.js @@ -0,0 +1,115 @@ +import { constructStack } from "@smithy/middleware-stack"; +import { SMITHY_CONTEXT_KEY } from "@smithy/types"; +export class Command { + constructor() { + this.middlewareStack = constructStack(); + } + static classBuilder() { + return new ClassBuilder(); + } + resolveMiddlewareWithContext(clientStack, configuration, options, { middlewareFn, clientName, commandName, inputFilterSensitiveLog, outputFilterSensitiveLog, smithyContext, additionalContext, CommandCtor, }) { + for (const mw of middlewareFn.bind(this)(CommandCtor, clientStack, configuration, options)) { + this.middlewareStack.use(mw); + } + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog, + outputFilterSensitiveLog, + [SMITHY_CONTEXT_KEY]: { + commandInstance: this, + ...smithyContext, + }, + ...additionalContext, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } +} +class ClassBuilder { + constructor() { + this._init = () => { }; + this._ep = {}; + this._middlewareFn = () => []; + this._commandName = ""; + this._clientName = ""; + this._additionalContext = {}; + this._smithyContext = {}; + this._inputFilterSensitiveLog = (_) => _; + this._outputFilterSensitiveLog = (_) => _; + this._serializer = null; + this._deserializer = null; + } + init(cb) { + this._init = cb; + } + ep(endpointParameterInstructions) { + this._ep = endpointParameterInstructions; + return this; + } + m(middlewareSupplier) { + this._middlewareFn = middlewareSupplier; + return this; + } + s(service, operation, smithyContext = {}) { + this._smithyContext = { + service, + operation, + ...smithyContext, + }; + return this; + } + c(additionalContext = {}) { + this._additionalContext = additionalContext; + return this; + } + n(clientName, commandName) { + this._clientName = clientName; + this._commandName = commandName; + return this; + } + f(inputFilter = (_) => _, outputFilter = (_) => _) { + this._inputFilterSensitiveLog = inputFilter; + this._outputFilterSensitiveLog = outputFilter; + return this; + } + ser(serializer) { + this._serializer = serializer; + return this; + } + de(deserializer) { + this._deserializer = deserializer; + return this; + } + build() { + const closure = this; + let CommandRef; + return (CommandRef = class extends Command { + static getEndpointParameterInstructions() { + return closure._ep; + } + constructor(...[input]) { + super(); + this.serialize = closure._serializer; + this.deserialize = closure._deserializer; + this.input = input ?? {}; + closure._init(this); + } + resolveMiddleware(stack, configuration, options) { + return this.resolveMiddlewareWithContext(stack, configuration, options, { + CommandCtor: CommandRef, + middlewareFn: closure._middlewareFn, + clientName: closure._clientName, + commandName: closure._commandName, + inputFilterSensitiveLog: closure._inputFilterSensitiveLog, + outputFilterSensitiveLog: closure._outputFilterSensitiveLog, + smithyContext: closure._smithyContext, + additionalContext: closure._additionalContext, + }); + } + }); + } +} diff --git a/node_modules/@smithy/smithy-client/dist-es/constants.js b/node_modules/@smithy/smithy-client/dist-es/constants.js new file mode 100644 index 00000000..9b193d78 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/constants.js @@ -0,0 +1 @@ +export const SENSITIVE_STRING = "***SensitiveInformation***"; diff --git a/node_modules/@smithy/smithy-client/dist-es/create-aggregated-client.js b/node_modules/@smithy/smithy-client/dist-es/create-aggregated-client.js new file mode 100644 index 00000000..44cf4dc2 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/create-aggregated-client.js @@ -0,0 +1,21 @@ +export const createAggregatedClient = (commands, Client) => { + for (const command of Object.keys(commands)) { + const CommandCtor = commands[command]; + const methodImpl = async function (args, optionsOrCb, cb) { + const command = new CommandCtor(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") + throw new Error(`Expected http options but got ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + }; + const methodName = (command[0].toLowerCase() + command.slice(1)).replace(/Command$/, ""); + Client.prototype[methodName] = methodImpl; + } +}; diff --git a/node_modules/@smithy/smithy-client/dist-es/date-utils.js b/node_modules/@smithy/smithy-client/dist-es/date-utils.js new file mode 100644 index 00000000..0d0abf25 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/date-utils.js @@ -0,0 +1,190 @@ +import { strictParseByte, strictParseDouble, strictParseFloat32, strictParseShort } from "./parse-utils"; +const DAYS = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]; +const MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]; +export function dateToUtcString(date) { + const year = date.getUTCFullYear(); + const month = date.getUTCMonth(); + const dayOfWeek = date.getUTCDay(); + const dayOfMonthInt = date.getUTCDate(); + const hoursInt = date.getUTCHours(); + const minutesInt = date.getUTCMinutes(); + const secondsInt = date.getUTCSeconds(); + const dayOfMonthString = dayOfMonthInt < 10 ? `0${dayOfMonthInt}` : `${dayOfMonthInt}`; + const hoursString = hoursInt < 10 ? `0${hoursInt}` : `${hoursInt}`; + const minutesString = minutesInt < 10 ? `0${minutesInt}` : `${minutesInt}`; + const secondsString = secondsInt < 10 ? `0${secondsInt}` : `${secondsInt}`; + return `${DAYS[dayOfWeek]}, ${dayOfMonthString} ${MONTHS[month]} ${year} ${hoursString}:${minutesString}:${secondsString} GMT`; +} +const RFC3339 = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?[zZ]$/); +export const parseRfc3339DateTime = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + return buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); +}; +const RFC3339_WITH_OFFSET = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?(([-+]\d{2}\:\d{2})|[zZ])$/); +export const parseRfc3339DateTimeWithOffset = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339_WITH_OFFSET.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, offsetStr] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + const date = buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); + if (offsetStr.toUpperCase() != "Z") { + date.setTime(date.getTime() - parseOffsetToMilliseconds(offsetStr)); + } + return date; +}; +const IMF_FIXDATE = new RegExp(/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), (\d{2}) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) (\d{4}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/); +const RFC_850_DATE = new RegExp(/^(?:Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday), (\d{2})-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-(\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/); +const ASC_TIME = new RegExp(/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) ( [1-9]|\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? (\d{4})$/); +export const parseRfc7231DateTime = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-7231 date-times must be expressed as strings"); + } + let match = IMF_FIXDATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return buildDate(strictParseShort(stripLeadingZeroes(yearStr)), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { hours, minutes, seconds, fractionalMilliseconds }); + } + match = RFC_850_DATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return adjustRfc850Year(buildDate(parseTwoDigitYear(yearStr), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { + hours, + minutes, + seconds, + fractionalMilliseconds, + })); + } + match = ASC_TIME.exec(value); + if (match) { + const [_, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, yearStr] = match; + return buildDate(strictParseShort(stripLeadingZeroes(yearStr)), parseMonthByShortName(monthStr), parseDateValue(dayStr.trimLeft(), "day", 1, 31), { hours, minutes, seconds, fractionalMilliseconds }); + } + throw new TypeError("Invalid RFC-7231 date-time value"); +}; +export const parseEpochTimestamp = (value) => { + if (value === null || value === undefined) { + return undefined; + } + let valueAsDouble; + if (typeof value === "number") { + valueAsDouble = value; + } + else if (typeof value === "string") { + valueAsDouble = strictParseDouble(value); + } + else if (typeof value === "object" && value.tag === 1) { + valueAsDouble = value.value; + } + else { + throw new TypeError("Epoch timestamps must be expressed as floating point numbers or their string representation"); + } + if (Number.isNaN(valueAsDouble) || valueAsDouble === Infinity || valueAsDouble === -Infinity) { + throw new TypeError("Epoch timestamps must be valid, non-Infinite, non-NaN numerics"); + } + return new Date(Math.round(valueAsDouble * 1000)); +}; +const buildDate = (year, month, day, time) => { + const adjustedMonth = month - 1; + validateDayOfMonth(year, adjustedMonth, day); + return new Date(Date.UTC(year, adjustedMonth, day, parseDateValue(time.hours, "hour", 0, 23), parseDateValue(time.minutes, "minute", 0, 59), parseDateValue(time.seconds, "seconds", 0, 60), parseMilliseconds(time.fractionalMilliseconds))); +}; +const parseTwoDigitYear = (value) => { + const thisYear = new Date().getUTCFullYear(); + const valueInThisCentury = Math.floor(thisYear / 100) * 100 + strictParseShort(stripLeadingZeroes(value)); + if (valueInThisCentury < thisYear) { + return valueInThisCentury + 100; + } + return valueInThisCentury; +}; +const FIFTY_YEARS_IN_MILLIS = 50 * 365 * 24 * 60 * 60 * 1000; +const adjustRfc850Year = (input) => { + if (input.getTime() - new Date().getTime() > FIFTY_YEARS_IN_MILLIS) { + return new Date(Date.UTC(input.getUTCFullYear() - 100, input.getUTCMonth(), input.getUTCDate(), input.getUTCHours(), input.getUTCMinutes(), input.getUTCSeconds(), input.getUTCMilliseconds())); + } + return input; +}; +const parseMonthByShortName = (value) => { + const monthIdx = MONTHS.indexOf(value); + if (monthIdx < 0) { + throw new TypeError(`Invalid month: ${value}`); + } + return monthIdx + 1; +}; +const DAYS_IN_MONTH = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; +const validateDayOfMonth = (year, month, day) => { + let maxDays = DAYS_IN_MONTH[month]; + if (month === 1 && isLeapYear(year)) { + maxDays = 29; + } + if (day > maxDays) { + throw new TypeError(`Invalid day for ${MONTHS[month]} in ${year}: ${day}`); + } +}; +const isLeapYear = (year) => { + return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0); +}; +const parseDateValue = (value, type, lower, upper) => { + const dateVal = strictParseByte(stripLeadingZeroes(value)); + if (dateVal < lower || dateVal > upper) { + throw new TypeError(`${type} must be between ${lower} and ${upper}, inclusive`); + } + return dateVal; +}; +const parseMilliseconds = (value) => { + if (value === null || value === undefined) { + return 0; + } + return strictParseFloat32("0." + value) * 1000; +}; +const parseOffsetToMilliseconds = (value) => { + const directionStr = value[0]; + let direction = 1; + if (directionStr == "+") { + direction = 1; + } + else if (directionStr == "-") { + direction = -1; + } + else { + throw new TypeError(`Offset direction, ${directionStr}, must be "+" or "-"`); + } + const hour = Number(value.substring(1, 3)); + const minute = Number(value.substring(4, 6)); + return direction * (hour * 60 + minute) * 60 * 1000; +}; +const stripLeadingZeroes = (value) => { + let idx = 0; + while (idx < value.length - 1 && value.charAt(idx) === "0") { + idx++; + } + if (idx === 0) { + return value; + } + return value.slice(idx); +}; diff --git a/node_modules/@smithy/smithy-client/dist-es/default-error-handler.js b/node_modules/@smithy/smithy-client/dist-es/default-error-handler.js new file mode 100644 index 00000000..7da1091a --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/default-error-handler.js @@ -0,0 +1,22 @@ +import { decorateServiceException } from "./exceptions"; +export const throwDefaultError = ({ output, parsedBody, exceptionCtor, errorCode }) => { + const $metadata = deserializeMetadata(output); + const statusCode = $metadata.httpStatusCode ? $metadata.httpStatusCode + "" : undefined; + const response = new exceptionCtor({ + name: parsedBody?.code || parsedBody?.Code || errorCode || statusCode || "UnknownError", + $fault: "client", + $metadata, + }); + throw decorateServiceException(response, parsedBody); +}; +export const withBaseException = (ExceptionCtor) => { + return ({ output, parsedBody, errorCode }) => { + throwDefaultError({ output, parsedBody, exceptionCtor: ExceptionCtor, errorCode }); + }; +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); diff --git a/node_modules/@smithy/smithy-client/dist-es/defaults-mode.js b/node_modules/@smithy/smithy-client/dist-es/defaults-mode.js new file mode 100644 index 00000000..f19079c0 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/defaults-mode.js @@ -0,0 +1,26 @@ +export const loadConfigsForDefaultMode = (mode) => { + switch (mode) { + case "standard": + return { + retryMode: "standard", + connectionTimeout: 3100, + }; + case "in-region": + return { + retryMode: "standard", + connectionTimeout: 1100, + }; + case "cross-region": + return { + retryMode: "standard", + connectionTimeout: 3100, + }; + case "mobile": + return { + retryMode: "standard", + connectionTimeout: 30000, + }; + default: + return {}; + } +}; diff --git a/node_modules/@smithy/smithy-client/dist-es/emitWarningIfUnsupportedVersion.js b/node_modules/@smithy/smithy-client/dist-es/emitWarningIfUnsupportedVersion.js new file mode 100644 index 00000000..7b308939 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/emitWarningIfUnsupportedVersion.js @@ -0,0 +1,6 @@ +let warningEmitted = false; +export const emitWarningIfUnsupportedVersion = (version) => { + if (version && !warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 16) { + warningEmitted = true; + } +}; diff --git a/node_modules/@smithy/smithy-client/dist-es/exceptions.js b/node_modules/@smithy/smithy-client/dist-es/exceptions.js new file mode 100644 index 00000000..db6a8010 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/exceptions.js @@ -0,0 +1,46 @@ +export class ServiceException extends Error { + constructor(options) { + super(options.message); + Object.setPrototypeOf(this, Object.getPrototypeOf(this).constructor.prototype); + this.name = options.name; + this.$fault = options.$fault; + this.$metadata = options.$metadata; + } + static isInstance(value) { + if (!value) + return false; + const candidate = value; + return (ServiceException.prototype.isPrototypeOf(candidate) || + (Boolean(candidate.$fault) && + Boolean(candidate.$metadata) && + (candidate.$fault === "client" || candidate.$fault === "server"))); + } + static [Symbol.hasInstance](instance) { + if (!instance) + return false; + const candidate = instance; + if (this === ServiceException) { + return ServiceException.isInstance(instance); + } + if (ServiceException.isInstance(instance)) { + if (candidate.name && this.name) { + return this.prototype.isPrototypeOf(instance) || candidate.name === this.name; + } + return this.prototype.isPrototypeOf(instance); + } + return false; + } +} +export const decorateServiceException = (exception, additions = {}) => { + Object.entries(additions) + .filter(([, v]) => v !== undefined) + .forEach(([k, v]) => { + if (exception[k] == undefined || exception[k] === "") { + exception[k] = v; + } + }); + const message = exception.message || exception.Message || "UnknownError"; + exception.message = message; + delete exception.Message; + return exception; +}; diff --git a/node_modules/@smithy/smithy-client/dist-es/extended-encode-uri-component.js b/node_modules/@smithy/smithy-client/dist-es/extended-encode-uri-component.js new file mode 100644 index 00000000..cb4f9916 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/extended-encode-uri-component.js @@ -0,0 +1 @@ +export { extendedEncodeURIComponent } from "@smithy/core/protocols"; diff --git a/node_modules/@smithy/smithy-client/dist-es/extensions/checksum.js b/node_modules/@smithy/smithy-client/dist-es/extensions/checksum.js new file mode 100644 index 00000000..f3831ee1 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/extensions/checksum.js @@ -0,0 +1,30 @@ +import { AlgorithmId } from "@smithy/types"; +export { AlgorithmId }; +export const getChecksumConfiguration = (runtimeConfig) => { + const checksumAlgorithms = []; + for (const id in AlgorithmId) { + const algorithmId = AlgorithmId[id]; + if (runtimeConfig[algorithmId] === undefined) { + continue; + } + checksumAlgorithms.push({ + algorithmId: () => algorithmId, + checksumConstructor: () => runtimeConfig[algorithmId], + }); + } + return { + addChecksumAlgorithm(algo) { + checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return checksumAlgorithms; + }, + }; +}; +export const resolveChecksumRuntimeConfig = (clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}; diff --git a/node_modules/@smithy/smithy-client/dist-es/extensions/defaultExtensionConfiguration.js b/node_modules/@smithy/smithy-client/dist-es/extensions/defaultExtensionConfiguration.js new file mode 100644 index 00000000..272cd3ae --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/extensions/defaultExtensionConfiguration.js @@ -0,0 +1,9 @@ +import { getChecksumConfiguration, resolveChecksumRuntimeConfig } from "./checksum"; +import { getRetryConfiguration, resolveRetryRuntimeConfig } from "./retry"; +export const getDefaultExtensionConfiguration = (runtimeConfig) => { + return Object.assign(getChecksumConfiguration(runtimeConfig), getRetryConfiguration(runtimeConfig)); +}; +export const getDefaultClientConfiguration = getDefaultExtensionConfiguration; +export const resolveDefaultRuntimeConfig = (config) => { + return Object.assign(resolveChecksumRuntimeConfig(config), resolveRetryRuntimeConfig(config)); +}; diff --git a/node_modules/@smithy/smithy-client/dist-es/extensions/index.js b/node_modules/@smithy/smithy-client/dist-es/extensions/index.js new file mode 100644 index 00000000..f1b80749 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/extensions/index.js @@ -0,0 +1 @@ +export * from "./defaultExtensionConfiguration"; diff --git a/node_modules/@smithy/smithy-client/dist-es/extensions/retry.js b/node_modules/@smithy/smithy-client/dist-es/extensions/retry.js new file mode 100644 index 00000000..2c18b0a1 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/extensions/retry.js @@ -0,0 +1,15 @@ +export const getRetryConfiguration = (runtimeConfig) => { + return { + setRetryStrategy(retryStrategy) { + runtimeConfig.retryStrategy = retryStrategy; + }, + retryStrategy() { + return runtimeConfig.retryStrategy; + }, + }; +}; +export const resolveRetryRuntimeConfig = (retryStrategyConfiguration) => { + const runtimeConfig = {}; + runtimeConfig.retryStrategy = retryStrategyConfiguration.retryStrategy(); + return runtimeConfig; +}; diff --git a/node_modules/@smithy/smithy-client/dist-es/get-array-if-single-item.js b/node_modules/@smithy/smithy-client/dist-es/get-array-if-single-item.js new file mode 100644 index 00000000..25d94327 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/get-array-if-single-item.js @@ -0,0 +1 @@ +export const getArrayIfSingleItem = (mayBeArray) => Array.isArray(mayBeArray) ? mayBeArray : [mayBeArray]; diff --git a/node_modules/@smithy/smithy-client/dist-es/get-value-from-text-node.js b/node_modules/@smithy/smithy-client/dist-es/get-value-from-text-node.js new file mode 100644 index 00000000..aa0f8271 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/get-value-from-text-node.js @@ -0,0 +1,12 @@ +export const getValueFromTextNode = (obj) => { + const textNodeName = "#text"; + for (const key in obj) { + if (obj.hasOwnProperty(key) && obj[key][textNodeName] !== undefined) { + obj[key] = obj[key][textNodeName]; + } + else if (typeof obj[key] === "object" && obj[key] !== null) { + obj[key] = getValueFromTextNode(obj[key]); + } + } + return obj; +}; diff --git a/node_modules/@smithy/smithy-client/dist-es/index.js b/node_modules/@smithy/smithy-client/dist-es/index.js new file mode 100644 index 00000000..b05ab01d --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/index.js @@ -0,0 +1,25 @@ +export * from "./client"; +export * from "./collect-stream-body"; +export * from "./command"; +export * from "./constants"; +export * from "./create-aggregated-client"; +export * from "./date-utils"; +export * from "./default-error-handler"; +export * from "./defaults-mode"; +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./exceptions"; +export * from "./extended-encode-uri-component"; +export * from "./extensions"; +export * from "./get-array-if-single-item"; +export * from "./get-value-from-text-node"; +export * from "./is-serializable-header-value"; +export * from "./lazy-json"; +export * from "./NoOpLogger"; +export * from "./object-mapping"; +export * from "./parse-utils"; +export * from "./quote-header"; +export * from "./resolve-path"; +export * from "./ser-utils"; +export * from "./serde-json"; +export * from "./split-every"; +export * from "./split-header"; diff --git a/node_modules/@smithy/smithy-client/dist-es/is-serializable-header-value.js b/node_modules/@smithy/smithy-client/dist-es/is-serializable-header-value.js new file mode 100644 index 00000000..cb117caf --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/is-serializable-header-value.js @@ -0,0 +1,3 @@ +export const isSerializableHeaderValue = (value) => { + return value != null; +}; diff --git a/node_modules/@smithy/smithy-client/dist-es/lazy-json.js b/node_modules/@smithy/smithy-client/dist-es/lazy-json.js new file mode 100644 index 00000000..9bddfce3 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/lazy-json.js @@ -0,0 +1,24 @@ +export const LazyJsonString = function LazyJsonString(val) { + const str = Object.assign(new String(val), { + deserializeJSON() { + return JSON.parse(String(val)); + }, + toString() { + return String(val); + }, + toJSON() { + return String(val); + }, + }); + return str; +}; +LazyJsonString.from = (object) => { + if (object && typeof object === "object" && (object instanceof LazyJsonString || "deserializeJSON" in object)) { + return object; + } + else if (typeof object === "string" || Object.getPrototypeOf(object) === String.prototype) { + return LazyJsonString(String(object)); + } + return LazyJsonString(JSON.stringify(object)); +}; +LazyJsonString.fromObject = LazyJsonString.from; diff --git a/node_modules/@smithy/smithy-client/dist-es/object-mapping.js b/node_modules/@smithy/smithy-client/dist-es/object-mapping.js new file mode 100644 index 00000000..84a1f263 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/object-mapping.js @@ -0,0 +1,92 @@ +export function map(arg0, arg1, arg2) { + let target; + let filter; + let instructions; + if (typeof arg1 === "undefined" && typeof arg2 === "undefined") { + target = {}; + instructions = arg0; + } + else { + target = arg0; + if (typeof arg1 === "function") { + filter = arg1; + instructions = arg2; + return mapWithFilter(target, filter, instructions); + } + else { + instructions = arg1; + } + } + for (const key of Object.keys(instructions)) { + if (!Array.isArray(instructions[key])) { + target[key] = instructions[key]; + continue; + } + applyInstruction(target, null, instructions, key); + } + return target; +} +export const convertMap = (target) => { + const output = {}; + for (const [k, v] of Object.entries(target || {})) { + output[k] = [, v]; + } + return output; +}; +export const take = (source, instructions) => { + const out = {}; + for (const key in instructions) { + applyInstruction(out, source, instructions, key); + } + return out; +}; +const mapWithFilter = (target, filter, instructions) => { + return map(target, Object.entries(instructions).reduce((_instructions, [key, value]) => { + if (Array.isArray(value)) { + _instructions[key] = value; + } + else { + if (typeof value === "function") { + _instructions[key] = [filter, value()]; + } + else { + _instructions[key] = [filter, value]; + } + } + return _instructions; + }, {})); +}; +const applyInstruction = (target, source, instructions, targetKey) => { + if (source !== null) { + let instruction = instructions[targetKey]; + if (typeof instruction === "function") { + instruction = [, instruction]; + } + const [filter = nonNullish, valueFn = pass, sourceKey = targetKey] = instruction; + if ((typeof filter === "function" && filter(source[sourceKey])) || (typeof filter !== "function" && !!filter)) { + target[targetKey] = valueFn(source[sourceKey]); + } + return; + } + let [filter, value] = instructions[targetKey]; + if (typeof value === "function") { + let _value; + const defaultFilterPassed = filter === undefined && (_value = value()) != null; + const customFilterPassed = (typeof filter === "function" && !!filter(void 0)) || (typeof filter !== "function" && !!filter); + if (defaultFilterPassed) { + target[targetKey] = _value; + } + else if (customFilterPassed) { + target[targetKey] = value(); + } + } + else { + const defaultFilterPassed = filter === undefined && value != null; + const customFilterPassed = (typeof filter === "function" && !!filter(value)) || (typeof filter !== "function" && !!filter); + if (defaultFilterPassed || customFilterPassed) { + target[targetKey] = value; + } + } +}; +const nonNullish = (_) => _ != null; +const pass = (_) => _; diff --git a/node_modules/@smithy/smithy-client/dist-es/parse-utils.js b/node_modules/@smithy/smithy-client/dist-es/parse-utils.js new file mode 100644 index 00000000..209db79a --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/parse-utils.js @@ -0,0 +1,230 @@ +export const parseBoolean = (value) => { + switch (value) { + case "true": + return true; + case "false": + return false; + default: + throw new Error(`Unable to parse boolean value "${value}"`); + } +}; +export const expectBoolean = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "number") { + if (value === 0 || value === 1) { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (value === 0) { + return false; + } + if (value === 1) { + return true; + } + } + if (typeof value === "string") { + const lower = value.toLowerCase(); + if (lower === "false" || lower === "true") { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (lower === "false") { + return false; + } + if (lower === "true") { + return true; + } + } + if (typeof value === "boolean") { + return value; + } + throw new TypeError(`Expected boolean, got ${typeof value}: ${value}`); +}; +export const expectNumber = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "string") { + const parsed = parseFloat(value); + if (!Number.isNaN(parsed)) { + if (String(parsed) !== String(value)) { + logger.warn(stackTraceWarning(`Expected number but observed string: ${value}`)); + } + return parsed; + } + } + if (typeof value === "number") { + return value; + } + throw new TypeError(`Expected number, got ${typeof value}: ${value}`); +}; +const MAX_FLOAT = Math.ceil(2 ** 127 * (2 - 2 ** -23)); +export const expectFloat32 = (value) => { + const expected = expectNumber(value); + if (expected !== undefined && !Number.isNaN(expected) && expected !== Infinity && expected !== -Infinity) { + if (Math.abs(expected) > MAX_FLOAT) { + throw new TypeError(`Expected 32-bit float, got ${value}`); + } + } + return expected; +}; +export const expectLong = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (Number.isInteger(value) && !Number.isNaN(value)) { + return value; + } + throw new TypeError(`Expected integer, got ${typeof value}: ${value}`); +}; +export const expectInt = expectLong; +export const expectInt32 = (value) => expectSizedInt(value, 32); +export const expectShort = (value) => expectSizedInt(value, 16); +export const expectByte = (value) => expectSizedInt(value, 8); +const expectSizedInt = (value, size) => { + const expected = expectLong(value); + if (expected !== undefined && castInt(expected, size) !== expected) { + throw new TypeError(`Expected ${size}-bit integer, got ${value}`); + } + return expected; +}; +const castInt = (value, size) => { + switch (size) { + case 32: + return Int32Array.of(value)[0]; + case 16: + return Int16Array.of(value)[0]; + case 8: + return Int8Array.of(value)[0]; + } +}; +export const expectNonNull = (value, location) => { + if (value === null || value === undefined) { + if (location) { + throw new TypeError(`Expected a non-null value for ${location}`); + } + throw new TypeError("Expected a non-null value"); + } + return value; +}; +export const expectObject = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "object" && !Array.isArray(value)) { + return value; + } + const receivedType = Array.isArray(value) ? "array" : typeof value; + throw new TypeError(`Expected object, got ${receivedType}: ${value}`); +}; +export const expectString = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "string") { + return value; + } + if (["boolean", "number", "bigint"].includes(typeof value)) { + logger.warn(stackTraceWarning(`Expected string, got ${typeof value}: ${value}`)); + return String(value); + } + throw new TypeError(`Expected string, got ${typeof value}: ${value}`); +}; +export const expectUnion = (value) => { + if (value === null || value === undefined) { + return undefined; + } + const asObject = expectObject(value); + const setKeys = Object.entries(asObject) + .filter(([, v]) => v != null) + .map(([k]) => k); + if (setKeys.length === 0) { + throw new TypeError(`Unions must have exactly one non-null member. None were found.`); + } + if (setKeys.length > 1) { + throw new TypeError(`Unions must have exactly one non-null member. Keys ${setKeys} were not null.`); + } + return asObject; +}; +export const strictParseDouble = (value) => { + if (typeof value == "string") { + return expectNumber(parseNumber(value)); + } + return expectNumber(value); +}; +export const strictParseFloat = strictParseDouble; +export const strictParseFloat32 = (value) => { + if (typeof value == "string") { + return expectFloat32(parseNumber(value)); + } + return expectFloat32(value); +}; +const NUMBER_REGEX = /(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)|(-?Infinity)|(NaN)/g; +const parseNumber = (value) => { + const matches = value.match(NUMBER_REGEX); + if (matches === null || matches[0].length !== value.length) { + throw new TypeError(`Expected real number, got implicit NaN`); + } + return parseFloat(value); +}; +export const limitedParseDouble = (value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectNumber(value); +}; +export const handleFloat = limitedParseDouble; +export const limitedParseFloat = limitedParseDouble; +export const limitedParseFloat32 = (value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectFloat32(value); +}; +const parseFloatString = (value) => { + switch (value) { + case "NaN": + return NaN; + case "Infinity": + return Infinity; + case "-Infinity": + return -Infinity; + default: + throw new Error(`Unable to parse float value: ${value}`); + } +}; +export const strictParseLong = (value) => { + if (typeof value === "string") { + return expectLong(parseNumber(value)); + } + return expectLong(value); +}; +export const strictParseInt = strictParseLong; +export const strictParseInt32 = (value) => { + if (typeof value === "string") { + return expectInt32(parseNumber(value)); + } + return expectInt32(value); +}; +export const strictParseShort = (value) => { + if (typeof value === "string") { + return expectShort(parseNumber(value)); + } + return expectShort(value); +}; +export const strictParseByte = (value) => { + if (typeof value === "string") { + return expectByte(parseNumber(value)); + } + return expectByte(value); +}; +const stackTraceWarning = (message) => { + return String(new TypeError(message).stack || message) + .split("\n") + .slice(0, 5) + .filter((s) => !s.includes("stackTraceWarning")) + .join("\n"); +}; +export const logger = { + warn: console.warn, +}; diff --git a/node_modules/@smithy/smithy-client/dist-es/quote-header.js b/node_modules/@smithy/smithy-client/dist-es/quote-header.js new file mode 100644 index 00000000..d0ddf67f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/quote-header.js @@ -0,0 +1,6 @@ +export function quoteHeader(part) { + if (part.includes(",") || part.includes('"')) { + part = `"${part.replace(/"/g, '\\"')}"`; + } + return part; +} diff --git a/node_modules/@smithy/smithy-client/dist-es/resolve-path.js b/node_modules/@smithy/smithy-client/dist-es/resolve-path.js new file mode 100644 index 00000000..6c70cb3b --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/resolve-path.js @@ -0,0 +1 @@ +export { resolvedPath } from "@smithy/core/protocols"; diff --git a/node_modules/@smithy/smithy-client/dist-es/ser-utils.js b/node_modules/@smithy/smithy-client/dist-es/ser-utils.js new file mode 100644 index 00000000..207437fe --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/ser-utils.js @@ -0,0 +1,14 @@ +export const serializeFloat = (value) => { + if (value !== value) { + return "NaN"; + } + switch (value) { + case Infinity: + return "Infinity"; + case -Infinity: + return "-Infinity"; + default: + return value; + } +}; +export const serializeDateTime = (date) => date.toISOString().replace(".000Z", "Z"); diff --git a/node_modules/@smithy/smithy-client/dist-es/serde-json.js b/node_modules/@smithy/smithy-client/dist-es/serde-json.js new file mode 100644 index 00000000..babb7c17 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/serde-json.js @@ -0,0 +1,19 @@ +export const _json = (obj) => { + if (obj == null) { + return {}; + } + if (Array.isArray(obj)) { + return obj.filter((_) => _ != null).map(_json); + } + if (typeof obj === "object") { + const target = {}; + for (const key of Object.keys(obj)) { + if (obj[key] == null) { + continue; + } + target[key] = _json(obj[key]); + } + return target; + } + return obj; +}; diff --git a/node_modules/@smithy/smithy-client/dist-es/split-every.js b/node_modules/@smithy/smithy-client/dist-es/split-every.js new file mode 100644 index 00000000..1d78dcae --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/split-every.js @@ -0,0 +1,27 @@ +export function splitEvery(value, delimiter, numDelimiters) { + if (numDelimiters <= 0 || !Number.isInteger(numDelimiters)) { + throw new Error("Invalid number of delimiters (" + numDelimiters + ") for splitEvery."); + } + const segments = value.split(delimiter); + if (numDelimiters === 1) { + return segments; + } + const compoundSegments = []; + let currentSegment = ""; + for (let i = 0; i < segments.length; i++) { + if (currentSegment === "") { + currentSegment = segments[i]; + } + else { + currentSegment += delimiter + segments[i]; + } + if ((i + 1) % numDelimiters === 0) { + compoundSegments.push(currentSegment); + currentSegment = ""; + } + } + if (currentSegment !== "") { + compoundSegments.push(currentSegment); + } + return compoundSegments; +} diff --git a/node_modules/@smithy/smithy-client/dist-es/split-header.js b/node_modules/@smithy/smithy-client/dist-es/split-header.js new file mode 100644 index 00000000..518e77f2 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-es/split-header.js @@ -0,0 +1,37 @@ +export const splitHeader = (value) => { + const z = value.length; + const values = []; + let withinQuotes = false; + let prevChar = undefined; + let anchor = 0; + for (let i = 0; i < z; ++i) { + const char = value[i]; + switch (char) { + case `"`: + if (prevChar !== "\\") { + withinQuotes = !withinQuotes; + } + break; + case ",": + if (!withinQuotes) { + values.push(value.slice(anchor, i)); + anchor = i + 1; + } + break; + default: + } + prevChar = char; + } + values.push(value.slice(anchor)); + return values.map((v) => { + v = v.trim(); + const z = v.length; + if (z < 2) { + return v; + } + if (v[0] === `"` && v[z - 1] === `"`) { + v = v.slice(1, z - 1); + } + return v.replace(/\\"/g, '"'); + }); +}; diff --git a/node_modules/@smithy/smithy-client/dist-types/NoOpLogger.d.ts b/node_modules/@smithy/smithy-client/dist-types/NoOpLogger.d.ts new file mode 100644 index 00000000..93ebff49 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/NoOpLogger.d.ts @@ -0,0 +1,11 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + */ +export declare class NoOpLogger implements Logger { + trace(): void; + debug(): void; + info(): void; + warn(): void; + error(): void; +} diff --git a/node_modules/@smithy/smithy-client/dist-types/client.d.ts b/node_modules/@smithy/smithy-client/dist-types/client.d.ts new file mode 100644 index 00000000..6f155e5d --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/client.d.ts @@ -0,0 +1,61 @@ +import { Client as IClient, Command, FetchHttpHandlerOptions, MetadataBearer, MiddlewareStack, NodeHttpHandlerOptions, RequestHandler } from "@smithy/types"; +/** + * @public + */ +export interface SmithyConfiguration { + requestHandler: RequestHandler | NodeHttpHandlerOptions | FetchHttpHandlerOptions | Record; + /** + * The API version set internally by the SDK, and is + * not planned to be used by customer code. + * @internal + */ + readonly apiVersion: string; + /** + * @public + * + * Default false. + * + * When true, the client will only resolve the middleware stack once per + * Command class. This means modifying the middlewareStack of the + * command or client after requests have been made will not be + * recognized. + * + * Calling client.destroy() also clears this cache. + * + * Enable this only if needing the additional time saved (0-1ms per request) + * and not needing middleware modifications between requests. + */ + cacheMiddleware?: boolean; +} +/** + * @internal + */ +export type SmithyResolvedConfiguration = { + requestHandler: RequestHandler; + readonly apiVersion: string; + cacheMiddleware?: boolean; +}; +/** + * @public + */ +export declare class Client> implements IClient { + readonly config: ResolvedClientConfiguration; + middlewareStack: MiddlewareStack; + /** + * Holds an object reference to the initial configuration object. + * Used to check that the config resolver stack does not create + * dangling instances of an intermediate form of the configuration object. + * + * @internal + */ + initConfig?: object; + /** + * May be used to cache the resolved handler function for a Command class. + */ + private handlers?; + constructor(config: ResolvedClientConfiguration); + send(command: Command>, options?: HandlerOptions): Promise; + send(command: Command>, cb: (err: any, data?: OutputType) => void): void; + send(command: Command>, options: HandlerOptions, cb: (err: any, data?: OutputType) => void): void; + destroy(): void; +} diff --git a/node_modules/@smithy/smithy-client/dist-types/collect-stream-body.d.ts b/node_modules/@smithy/smithy-client/dist-types/collect-stream-body.d.ts new file mode 100644 index 00000000..33378b80 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/collect-stream-body.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { collectBody } from "@smithy/core/protocols"; diff --git a/node_modules/@smithy/smithy-client/dist-types/command.d.ts b/node_modules/@smithy/smithy-client/dist-types/command.d.ts new file mode 100644 index 00000000..3625b888 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/command.d.ts @@ -0,0 +1,113 @@ +import type { EndpointParameterInstructions } from "@smithy/middleware-endpoint"; +import type { Command as ICommand, Handler, HandlerExecutionContext, HttpRequest as IHttpRequest, HttpResponse as IHttpResponse, Logger, MetadataBearer, MiddlewareStack as IMiddlewareStack, OptionalParameter, Pluggable, RequestHandler, SerdeContext } from "@smithy/types"; +/** + * @public + */ +export declare abstract class Command implements ICommand { + abstract input: Input; + readonly middlewareStack: IMiddlewareStack; + /** + * Factory for Command ClassBuilder. + * @internal + */ + static classBuilder; + }, SI extends object = any, SO extends MetadataBearer = any>(): ClassBuilder; + abstract resolveMiddleware(stack: IMiddlewareStack, configuration: ResolvedClientConfiguration, options: any): Handler; + /** + * @internal + */ + resolveMiddlewareWithContext(clientStack: IMiddlewareStack, configuration: { + logger: Logger; + requestHandler: RequestHandler; + }, options: any, { middlewareFn, clientName, commandName, inputFilterSensitiveLog, outputFilterSensitiveLog, smithyContext, additionalContext, CommandCtor, }: ResolveMiddlewareContextArgs): import("@smithy/types").InitializeHandler; +} +/** + * @internal + */ +type ResolveMiddlewareContextArgs = { + middlewareFn: (CommandCtor: any, clientStack: any, config: any, options: any) => Pluggable[]; + clientName: string; + commandName: string; + smithyContext: Record; + additionalContext: HandlerExecutionContext; + inputFilterSensitiveLog: (_: any) => any; + outputFilterSensitiveLog: (_: any) => any; + CommandCtor: any; +}; +/** + * @internal + */ +declare class ClassBuilder; +}, SI extends object = any, SO extends MetadataBearer = any> { + private _init; + private _ep; + private _middlewareFn; + private _commandName; + private _clientName; + private _additionalContext; + private _smithyContext; + private _inputFilterSensitiveLog; + private _outputFilterSensitiveLog; + private _serializer; + private _deserializer; + /** + * Optional init callback. + */ + init(cb: (_: Command) => void): void; + /** + * Set the endpoint parameter instructions. + */ + ep(endpointParameterInstructions: EndpointParameterInstructions): ClassBuilder; + /** + * Add any number of middleware. + */ + m(middlewareSupplier: (CommandCtor: any, clientStack: any, config: any, options: any) => Pluggable[]): ClassBuilder; + /** + * Set the initial handler execution context Smithy field. + */ + s(service: string, operation: string, smithyContext?: Record): ClassBuilder; + /** + * Set the initial handler execution context. + */ + c(additionalContext?: HandlerExecutionContext): ClassBuilder; + /** + * Set constant string identifiers for the operation. + */ + n(clientName: string, commandName: string): ClassBuilder; + /** + * Set the input and output sensistive log filters. + */ + f(inputFilter?: (_: any) => any, outputFilter?: (_: any) => any): ClassBuilder; + /** + * Sets the serializer. + */ + ser(serializer: (input: I, context?: SerdeContext | any) => Promise): ClassBuilder; + /** + * Sets the deserializer. + */ + de(deserializer: (output: IHttpResponse, context?: SerdeContext | any) => Promise): ClassBuilder; + /** + * @returns a Command class with the classBuilder properties. + */ + build(): { + new (input: I): CommandImpl; + new (...[input]: OptionalParameter): CommandImpl; + getEndpointParameterInstructions(): EndpointParameterInstructions; + }; +} +/** + * A concrete implementation of ICommand with no abstract members. + * @public + */ +export interface CommandImpl; +}, SI extends object = any, SO extends MetadataBearer = any> extends Command { + readonly input: I; + resolveMiddleware(stack: IMiddlewareStack, configuration: C, options: any): Handler; +} +export {}; diff --git a/node_modules/@smithy/smithy-client/dist-types/constants.d.ts b/node_modules/@smithy/smithy-client/dist-types/constants.d.ts new file mode 100644 index 00000000..c17e1c8a --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/constants.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const SENSITIVE_STRING = "***SensitiveInformation***"; diff --git a/node_modules/@smithy/smithy-client/dist-types/create-aggregated-client.d.ts b/node_modules/@smithy/smithy-client/dist-types/create-aggregated-client.d.ts new file mode 100644 index 00000000..00e23d8f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/create-aggregated-client.d.ts @@ -0,0 +1,9 @@ +import { Client } from "./client"; +/** + * @internal + * + * @param commands - command lookup container. + * @param client - client instance on which to add aggregated methods. + * @returns an aggregated client with dynamically created methods. + */ +export declare const createAggregatedClient: (commands: Record, Client: new (...args: any) => Client) => void; diff --git a/node_modules/@smithy/smithy-client/dist-types/date-utils.d.ts b/node_modules/@smithy/smithy-client/dist-types/date-utils.d.ts new file mode 100644 index 00000000..99c55f45 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/date-utils.d.ts @@ -0,0 +1,73 @@ +/** + * @internal + * + * Builds a proper UTC HttpDate timestamp from a Date object + * since not all environments will have this as the expected + * format. + * + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toUTCString} + * - Prior to ECMAScript 2018, the format of the return value + * - varied according to the platform. The most common return + * - value was an RFC-1123 formatted date stamp, which is a + * - slightly updated version of RFC-822 date stamps. + */ +export declare function dateToUtcString(date: Date): string; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 3339 date. + * + * Input strings must conform to RFC3339 section 5.6, and cannot have a UTC + * offset. Fractional precision is supported. + * + * @see {@link https://xml2rfc.tools.ietf.org/public/rfc/html/rfc3339.html#anchor14} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc3339DateTime: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 3339 date. + * + * Input strings must conform to RFC3339 section 5.6, and can have a UTC + * offset. Fractional precision is supported. + * + * @see {@link https://xml2rfc.tools.ietf.org/public/rfc/html/rfc3339.html#anchor14} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc3339DateTimeWithOffset: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 7231 IMF-fixdate or obs-date. + * + * Input strings must conform to RFC7231 section 7.1.1.1. Fractional seconds are supported. + * + * @see {@link https://datatracker.ietf.org/doc/html/rfc7231.html#section-7.1.1.1} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc7231DateTime: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a number or a parseable string. + * + * Input strings must be an integer or floating point number. Fractional seconds are supported. + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseEpochTimestamp: (value: unknown) => Date | undefined; diff --git a/node_modules/@smithy/smithy-client/dist-types/default-error-handler.d.ts b/node_modules/@smithy/smithy-client/dist-types/default-error-handler.d.ts new file mode 100644 index 00000000..fd4b52da --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/default-error-handler.d.ts @@ -0,0 +1,13 @@ +/** + * Always throws an error with the given `exceptionCtor` and other arguments. + * This is only called from an error handling code path. + * + * @internal + */ +export declare const throwDefaultError: ({ output, parsedBody, exceptionCtor, errorCode }: any) => never; +/** + * @internal + * + * Creates {@link throwDefaultError} with bound ExceptionCtor. + */ +export declare const withBaseException: (ExceptionCtor: new (...args: any) => any) => any; diff --git a/node_modules/@smithy/smithy-client/dist-types/defaults-mode.d.ts b/node_modules/@smithy/smithy-client/dist-types/defaults-mode.d.ts new file mode 100644 index 00000000..1ddb6f0f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/defaults-mode.d.ts @@ -0,0 +1,28 @@ +/** + * @internal + */ +export declare const loadConfigsForDefaultMode: (mode: ResolvedDefaultsMode) => DefaultsModeConfigs; +/** + * Option determining how certain default configuration options are resolved in the SDK. It can be one of the value listed below: + * * `"standard"`:

The STANDARD mode provides the latest recommended default values that should be safe to run in most scenarios

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"in-region"`:

The IN_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services from within the same AWS region

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"cross-region"`:

The CROSS_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services in a different region

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"mobile"`:

The MOBILE mode builds on the standard mode and includes optimization tailored for mobile applications

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"auto"`:

The AUTO mode is an experimental mode that builds on the standard mode. The SDK will attempt to discover the execution environment to determine the appropriate settings automatically.

Note that the auto detection is heuristics-based and does not guarantee 100% accuracy. STANDARD mode will be used if the execution environment cannot be determined. The auto detection might query EC2 Instance Metadata service, which might introduce latency. Therefore we recommend choosing an explicit defaults_mode instead if startup latency is critical to your application

+ * * `"legacy"`:

The LEGACY mode provides default settings that vary per SDK and were used prior to establishment of defaults_mode

+ * + * @defaultValue "legacy" + */ +export type DefaultsMode = "standard" | "in-region" | "cross-region" | "mobile" | "auto" | "legacy"; +/** + * @internal + */ +export type ResolvedDefaultsMode = Exclude; +/** + * @internal + */ +export interface DefaultsModeConfigs { + retryMode?: string; + connectionTimeout?: number; + requestTimeout?: number; +} diff --git a/node_modules/@smithy/smithy-client/dist-types/emitWarningIfUnsupportedVersion.d.ts b/node_modules/@smithy/smithy-client/dist-types/emitWarningIfUnsupportedVersion.d.ts new file mode 100644 index 00000000..8fc02ce2 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/emitWarningIfUnsupportedVersion.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * + * Emits warning if the provided Node.js version string is pending deprecation. + * + * @param version - The Node.js version string. + */ +export declare const emitWarningIfUnsupportedVersion: (version: string) => void; diff --git a/node_modules/@smithy/smithy-client/dist-types/exceptions.d.ts b/node_modules/@smithy/smithy-client/dist-types/exceptions.d.ts new file mode 100644 index 00000000..0a362c6f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/exceptions.d.ts @@ -0,0 +1,42 @@ +import { HttpResponse, MetadataBearer, ResponseMetadata, RetryableTrait, SmithyException } from "@smithy/types"; +/** + * The type of the exception class constructor parameter. The returned type contains the properties + * in the `ExceptionType` but not in the `BaseExceptionType`. If the `BaseExceptionType` contains + * `$metadata` and `message` properties, it's also included in the returned type. + * @internal + */ +export type ExceptionOptionType = Omit>; +/** + * @public + */ +export interface ServiceExceptionOptions extends SmithyException, MetadataBearer { + message?: string; +} +/** + * @public + * + * Base exception class for the exceptions from the server-side. + */ +export declare class ServiceException extends Error implements SmithyException, MetadataBearer { + readonly $fault: "client" | "server"; + $response?: HttpResponse; + $retryable?: RetryableTrait; + $metadata: ResponseMetadata; + constructor(options: ServiceExceptionOptions); + /** + * Checks if a value is an instance of ServiceException (duck typed) + */ + static isInstance(value: unknown): value is ServiceException; + /** + * Custom instanceof check to support the operator for ServiceException base class + */ + static [Symbol.hasInstance](instance: unknown): boolean; +} +/** + * This method inject unmodeled member to a deserialized SDK exception, + * and load the error message from different possible keys('message', + * 'Message'). + * + * @internal + */ +export declare const decorateServiceException: (exception: E, additions?: Record) => E; diff --git a/node_modules/@smithy/smithy-client/dist-types/extended-encode-uri-component.d.ts b/node_modules/@smithy/smithy-client/dist-types/extended-encode-uri-component.d.ts new file mode 100644 index 00000000..ced666a6 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/extended-encode-uri-component.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { extendedEncodeURIComponent } from "@smithy/core/protocols"; diff --git a/node_modules/@smithy/smithy-client/dist-types/extensions/checksum.d.ts b/node_modules/@smithy/smithy-client/dist-types/extensions/checksum.d.ts new file mode 100644 index 00000000..8b5dd7b8 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/extensions/checksum.d.ts @@ -0,0 +1,24 @@ +import type { ChecksumAlgorithm, ChecksumConfiguration, ChecksumConstructor, HashConstructor } from "@smithy/types"; +import { AlgorithmId } from "@smithy/types"; +export { AlgorithmId, ChecksumAlgorithm, ChecksumConfiguration }; +/** + * @internal + */ +export type PartialChecksumRuntimeConfigType = Partial<{ + sha256: ChecksumConstructor | HashConstructor; + md5: ChecksumConstructor | HashConstructor; + crc32: ChecksumConstructor | HashConstructor; + crc32c: ChecksumConstructor | HashConstructor; + sha1: ChecksumConstructor | HashConstructor; +}>; +/** + * @internal + */ +export declare const getChecksumConfiguration: (runtimeConfig: PartialChecksumRuntimeConfigType) => { + addChecksumAlgorithm(algo: ChecksumAlgorithm): void; + checksumAlgorithms(): ChecksumAlgorithm[]; +}; +/** + * @internal + */ +export declare const resolveChecksumRuntimeConfig: (clientConfig: ChecksumConfiguration) => PartialChecksumRuntimeConfigType; diff --git a/node_modules/@smithy/smithy-client/dist-types/extensions/defaultExtensionConfiguration.d.ts b/node_modules/@smithy/smithy-client/dist-types/extensions/defaultExtensionConfiguration.d.ts new file mode 100644 index 00000000..42de409b --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/extensions/defaultExtensionConfiguration.d.ts @@ -0,0 +1,38 @@ +import type { DefaultExtensionConfiguration } from "@smithy/types"; +import { PartialChecksumRuntimeConfigType } from "./checksum"; +import { PartialRetryRuntimeConfigType } from "./retry"; +/** + * @internal + */ +export type DefaultExtensionRuntimeConfigType = PartialRetryRuntimeConfigType & PartialChecksumRuntimeConfigType; +/** + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getDefaultExtensionConfiguration: (runtimeConfig: DefaultExtensionRuntimeConfigType) => { + addChecksumAlgorithm(algo: import("@smithy/types").ChecksumAlgorithm): void; + checksumAlgorithms(): import("@smithy/types").ChecksumAlgorithm[]; +} & { + setRetryStrategy(retryStrategy: import("@smithy/types").Provider): void; + retryStrategy(): import("@smithy/types").Provider; +}; +/** + * @deprecated use getDefaultExtensionConfiguration + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getDefaultClientConfiguration: (runtimeConfig: DefaultExtensionRuntimeConfigType) => { + addChecksumAlgorithm(algo: import("@smithy/types").ChecksumAlgorithm): void; + checksumAlgorithms(): import("@smithy/types").ChecksumAlgorithm[]; +} & { + setRetryStrategy(retryStrategy: import("@smithy/types").Provider): void; + retryStrategy(): import("@smithy/types").Provider; +}; +/** + * @internal + * + * Helper function to resolve runtime config from default extension configuration + */ +export declare const resolveDefaultRuntimeConfig: (config: DefaultExtensionConfiguration) => DefaultExtensionRuntimeConfigType; diff --git a/node_modules/@smithy/smithy-client/dist-types/extensions/index.d.ts b/node_modules/@smithy/smithy-client/dist-types/extensions/index.d.ts new file mode 100644 index 00000000..f1b80749 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/extensions/index.d.ts @@ -0,0 +1 @@ +export * from "./defaultExtensionConfiguration"; diff --git a/node_modules/@smithy/smithy-client/dist-types/extensions/retry.d.ts b/node_modules/@smithy/smithy-client/dist-types/extensions/retry.d.ts new file mode 100644 index 00000000..6e288279 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/extensions/retry.d.ts @@ -0,0 +1,18 @@ +import { Provider, RetryStrategy, RetryStrategyConfiguration, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +export type PartialRetryRuntimeConfigType = Partial<{ + retryStrategy: Provider; +}>; +/** + * @internal + */ +export declare const getRetryConfiguration: (runtimeConfig: PartialRetryRuntimeConfigType) => { + setRetryStrategy(retryStrategy: Provider): void; + retryStrategy(): Provider; +}; +/** + * @internal + */ +export declare const resolveRetryRuntimeConfig: (retryStrategyConfiguration: RetryStrategyConfiguration) => PartialRetryRuntimeConfigType; diff --git a/node_modules/@smithy/smithy-client/dist-types/get-array-if-single-item.d.ts b/node_modules/@smithy/smithy-client/dist-types/get-array-if-single-item.d.ts new file mode 100644 index 00000000..6468b914 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/get-array-if-single-item.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * The XML parser will set one K:V for a member that could + * return multiple entries but only has one. + */ +export declare const getArrayIfSingleItem: (mayBeArray: T) => T | T[]; diff --git a/node_modules/@smithy/smithy-client/dist-types/get-value-from-text-node.d.ts b/node_modules/@smithy/smithy-client/dist-types/get-value-from-text-node.d.ts new file mode 100644 index 00000000..7163e5af --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/get-value-from-text-node.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Recursively parses object and populates value is node from + * "#text" key if it's available + */ +export declare const getValueFromTextNode: (obj: any) => any; diff --git a/node_modules/@smithy/smithy-client/dist-types/index.d.ts b/node_modules/@smithy/smithy-client/dist-types/index.d.ts new file mode 100644 index 00000000..4a4ac197 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/index.d.ts @@ -0,0 +1,26 @@ +export type { DocumentType, SdkError, SmithyException } from "@smithy/types"; +export * from "./client"; +export * from "./collect-stream-body"; +export * from "./command"; +export * from "./constants"; +export * from "./create-aggregated-client"; +export * from "./date-utils"; +export * from "./default-error-handler"; +export * from "./defaults-mode"; +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./exceptions"; +export * from "./extended-encode-uri-component"; +export * from "./extensions"; +export * from "./get-array-if-single-item"; +export * from "./get-value-from-text-node"; +export * from "./is-serializable-header-value"; +export * from "./lazy-json"; +export * from "./NoOpLogger"; +export * from "./object-mapping"; +export * from "./parse-utils"; +export * from "./quote-header"; +export * from "./resolve-path"; +export * from "./ser-utils"; +export * from "./serde-json"; +export * from "./split-every"; +export * from "./split-header"; diff --git a/node_modules/@smithy/smithy-client/dist-types/is-serializable-header-value.d.ts b/node_modules/@smithy/smithy-client/dist-types/is-serializable-header-value.d.ts new file mode 100644 index 00000000..a35a23ac --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/is-serializable-header-value.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * @returns whether the header value is serializable. + */ +export declare const isSerializableHeaderValue: (value: any) => boolean; diff --git a/node_modules/@smithy/smithy-client/dist-types/lazy-json.d.ts b/node_modules/@smithy/smithy-client/dist-types/lazy-json.d.ts new file mode 100644 index 00000000..df7eb516 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/lazy-json.d.ts @@ -0,0 +1,46 @@ +/** + * @public + * + * A model field with this type means that you may provide a JavaScript + * object in lieu of a JSON string, and it will be serialized to JSON + * automatically before being sent in a request. + * + * For responses, you will receive a "LazyJsonString", which is a boxed String object + * with additional mixin methods. + * To get the string value, call `.toString()`, or to get the JSON object value, + * call `.deserializeJSON()` or parse it yourself. + */ +export type AutomaticJsonStringConversion = Parameters[0] | LazyJsonString; +/** + * @internal + * + */ +export interface LazyJsonString extends String { + /** + * @returns the JSON parsing of the string value. + */ + deserializeJSON(): any; + /** + * @returns the original string value rather than a JSON.stringified value. + */ + toJSON(): string; +} +/** + * @internal + * + * Extension of the native String class in the previous implementation + * has negative global performance impact on method dispatch for strings, + * and is generally discouraged. + * + * This current implementation may look strange, but is necessary to preserve the interface and + * behavior of extending the String class. + */ +export declare const LazyJsonString: { + (s: string): LazyJsonString; + new (s: string): LazyJsonString; + from(s: any): LazyJsonString; + /** + * @deprecated use #from. + */ + fromObject(s: any): LazyJsonString; +}; diff --git a/node_modules/@smithy/smithy-client/dist-types/object-mapping.d.ts b/node_modules/@smithy/smithy-client/dist-types/object-mapping.d.ts new file mode 100644 index 00000000..97e28e59 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/object-mapping.d.ts @@ -0,0 +1,162 @@ +/** + * @internal + * + * A set of instructions for multiple keys. + * The aim is to provide a concise yet readable way to map and filter values + * onto a target object. + * + * @example + * ```javascript + * const example: ObjectMappingInstructions = { + * lazyValue1: [, () => 1], + * lazyValue2: [, () => 2], + * lazyValue3: [, () => 3], + * lazyConditionalValue1: [() => true, () => 4], + * lazyConditionalValue2: [() => true, () => 5], + * lazyConditionalValue3: [true, () => 6], + * lazyConditionalValue4: [false, () => 44], + * lazyConditionalValue5: [() => false, () => 55], + * lazyConditionalValue6: ["", () => 66], + * simpleValue1: [, 7], + * simpleValue2: [, 8], + * simpleValue3: [, 9], + * conditionalValue1: [() => true, 10], + * conditionalValue2: [() => true, 11], + * conditionalValue3: [{}, 12], + * conditionalValue4: [false, 110], + * conditionalValue5: [() => false, 121], + * conditionalValue6: ["", 132], + * }; + * + * const exampleResult: Record = { + * lazyValue1: 1, + * lazyValue2: 2, + * lazyValue3: 3, + * lazyConditionalValue1: 4, + * lazyConditionalValue2: 5, + * lazyConditionalValue3: 6, + * simpleValue1: 7, + * simpleValue2: 8, + * simpleValue3: 9, + * conditionalValue1: 10, + * conditionalValue2: 11, + * conditionalValue3: 12, + * }; + * ``` + */ +export type ObjectMappingInstructions = Record; +/** + * @internal + * + * A variant of the object mapping instruction for the `take` function. + * In this case, the source value is provided to the value function, turning it + * from a supplier into a mapper. + */ +export type SourceMappingInstructions = Record; +/** + * @internal + * + * An instruction set for assigning a value to a target object. + */ +export type ObjectMappingInstruction = LazyValueInstruction | ConditionalLazyValueInstruction | SimpleValueInstruction | ConditionalValueInstruction | UnfilteredValue; +/** + * @internal + * + * non-array + */ +export type UnfilteredValue = any; +/** + * @internal + */ +export type LazyValueInstruction = [FilterStatus, ValueSupplier]; +/** + * @internal + */ +export type ConditionalLazyValueInstruction = [FilterStatusSupplier, ValueSupplier]; +/** + * @internal + */ +export type SimpleValueInstruction = [FilterStatus, Value]; +/** + * @internal + */ +export type ConditionalValueInstruction = [ValueFilteringFunction, Value]; +/** + * @internal + */ +export type SourceMappingInstruction = [(ValueFilteringFunction | FilterStatus)?, ValueMapper?, string?]; +/** + * @internal + * + * Filter is considered passed if + * 1. It is a boolean true. + * 2. It is not undefined and is itself truthy. + * 3. It is undefined and the corresponding _value_ is neither null nor undefined. + */ +export type FilterStatus = boolean | unknown | void; +/** + * @internal + * + * Supplies the filter check but not against any value as input. + */ +export type FilterStatusSupplier = () => boolean; +/** + * @internal + * + * Filter check with the given value. + */ +export type ValueFilteringFunction = (value: any) => boolean; +/** + * @internal + * + * Supplies the value for lazy evaluation. + */ +export type ValueSupplier = () => any; +/** + * @internal + * + * A function that maps the source value to the target value. + * Defaults to pass-through with nullish check. + */ +export type ValueMapper = (value: any) => any; +/** + * @internal + * + * A non-function value. + */ +export type Value = any; +/** + * @internal + * Internal/Private, for codegen use only. + * + * Transfer a set of keys from [instructions] to [target]. + * + * For each instruction in the record, the target key will be the instruction key. + * The target assignment will be conditional on the instruction's filter. + * The target assigned value will be supplied by the instructions as an evaluable function or non-function value. + * + * @see ObjectMappingInstructions for an example. + */ +export declare function map(target: any, filter: (value: any) => boolean, instructions: Record): typeof target; +/** + * @internal + */ +export declare function map(instructions: ObjectMappingInstructions): any; +/** + * @internal + */ +export declare function map(target: any, instructions: ObjectMappingInstructions): typeof target; +/** + * Convert a regular object `{ k: v }` to `{ k: [, v] }` mapping instruction set with default + * filter. + * + * @internal + */ +export declare const convertMap: (target: any) => Record; +/** + * @param source - original object with data. + * @param instructions - how to map the data. + * @returns new object mapped from the source object. + * @internal + */ +export declare const take: (source: any, instructions: SourceMappingInstructions) => any; diff --git a/node_modules/@smithy/smithy-client/dist-types/parse-utils.d.ts b/node_modules/@smithy/smithy-client/dist-types/parse-utils.d.ts new file mode 100644 index 00000000..b5ded6f6 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/parse-utils.d.ts @@ -0,0 +1,270 @@ +/** + * @internal + * + * Give an input string, strictly parses a boolean value. + * + * @param value - The boolean string to parse. + * @returns true for "true", false for "false", otherwise an error is thrown. + */ +export declare const parseBoolean: (value: string) => boolean; +/** + * @internal + * + * Asserts a value is a boolean and returns it. + * Casts strings and numbers with a warning if there is evidence that they were + * intended to be booleans. + * + * @param value - A value that is expected to be a boolean. + * @returns The value if it's a boolean, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectBoolean: (value: any) => boolean | undefined; +/** + * @internal + * + * Asserts a value is a number and returns it. + * Casts strings with a warning if the string is a parseable number. + * This is to unblock slight API definition/implementation inconsistencies. + * + * @param value - A value that is expected to be a number. + * @returns The value if it's a number, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectNumber: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit float and returns it. + * + * @param value - A value that is expected to be a 32-bit float. + * @returns The value if it's a float, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectFloat32: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is an integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectLong: (value: any) => number | undefined; +/** + * @internal + * + * @deprecated Use expectLong + */ +export declare const expectInt: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectInt32: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 16-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectShort: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is an 8-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectByte: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is not null or undefined and returns it, or throws an error. + * + * @param value - A value that is expected to be defined + * @param location - The location where we're expecting to find a defined object (optional) + * @returns The value if it's not undefined, otherwise throws an error + */ +export declare const expectNonNull: (value: T | null | undefined, location?: string) => T; +/** + * @internal + * + * Asserts a value is an JSON-like object and returns it. This is expected to be used + * with values parsed from JSON (arrays, objects, numbers, strings, booleans). + * + * @param value - A value that is expected to be an object + * @returns The value if it's an object, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectObject: (value: any) => Record | undefined; +/** + * @internal + * + * Asserts a value is a string and returns it. + * Numbers and boolean will be cast to strings with a warning. + * + * @param value - A value that is expected to be a string. + * @returns The value if it's a string, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectString: (value: any) => string | undefined; +/** + * @internal + * + * Asserts a value is a JSON-like object with only one non-null/non-undefined key and + * returns it. + * + * @param value - A value that is expected to be an object with exactly one non-null, + * non-undefined key. + * @returns the value if it's a union, undefined if it's null/undefined, otherwise + * an error is thrown. + */ +export declare const expectUnion: (value: unknown) => Record | undefined; +/** + * @internal + * + * Parses a value into a double. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by the standard + * parseFloat with one exception: NaN may only be explicitly set as the string + * "NaN", any implicit Nan values will result in an error being thrown. If any + * other type is provided, an exception will be thrown. + * + * @param value - A number or string representation of a double. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseDouble: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use strictParseDouble + */ +export declare const strictParseFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a float. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by the standard + * parseFloat with one exception: NaN may only be explicitly set as the string + * "NaN", any implicit Nan values will result in an error being thrown. If any + * other type is provided, an exception will be thrown. + * + * @param value - A number or string representation of a float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseFloat32: (value: string | number) => number | undefined; +/** + * @internal + * + * Asserts a value is a number and returns it. If the value is a string + * representation of a non-numeric number type (NaN, Infinity, -Infinity), + * the value will be parsed. Any other string value will result in an exception + * being thrown. Null or undefined will be returned as undefined. Any other + * type will result in an exception being thrown. + * + * @param value - A number or string representation of a non-numeric float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const limitedParseDouble: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use limitedParseDouble + */ +export declare const handleFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use limitedParseDouble + */ +export declare const limitedParseFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit float and returns it. If the value is a string + * representation of a non-numeric number type (NaN, Infinity, -Infinity), + * the value will be parsed. Any other string value will result in an exception + * being thrown. Null or undefined will be returned as undefined. Any other + * type will result in an exception being thrown. + * + * @param value - A number or string representation of a non-numeric float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const limitedParseFloat32: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into an integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of an integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseLong: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use strictParseLong + */ +export declare const strictParseInt: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a 32-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of a 32-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseInt32: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a 16-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of a 16-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseShort: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into an 8-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of an 8-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseByte: (value: string | number) => number | undefined; +/** + * @internal + */ +export declare const logger: { + warn: { + (...data: any[]): void; + (message?: any, ...optionalParams: any[]): void; + }; +}; diff --git a/node_modules/@smithy/smithy-client/dist-types/quote-header.d.ts b/node_modules/@smithy/smithy-client/dist-types/quote-header.d.ts new file mode 100644 index 00000000..73d6c16c --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/quote-header.d.ts @@ -0,0 +1,6 @@ +/** + * @public + * @param part - header list element + * @returns quoted string if part contains delimiter. + */ +export declare function quoteHeader(part: string): string; diff --git a/node_modules/@smithy/smithy-client/dist-types/resolve-path.d.ts b/node_modules/@smithy/smithy-client/dist-types/resolve-path.d.ts new file mode 100644 index 00000000..2a3204fc --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/resolve-path.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { resolvedPath } from "@smithy/core/protocols"; diff --git a/node_modules/@smithy/smithy-client/dist-types/ser-utils.d.ts b/node_modules/@smithy/smithy-client/dist-types/ser-utils.d.ts new file mode 100644 index 00000000..ae03c61e --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ser-utils.d.ts @@ -0,0 +1,15 @@ +/** + * @internal + * + * Serializes a number, turning non-numeric values into strings. + * + * @param value - The number to serialize. + * @returns A number, or a string if the given number was non-numeric. + */ +export declare const serializeFloat: (value: number) => string | number; +/** + * @internal + * @param date - to be serialized. + * @returns https://smithy.io/2.0/spec/protocol-traits.html#timestampformat-trait date-time format. + */ +export declare const serializeDateTime: (date: Date) => string; diff --git a/node_modules/@smithy/smithy-client/dist-types/serde-json.d.ts b/node_modules/@smithy/smithy-client/dist-types/serde-json.d.ts new file mode 100644 index 00000000..96ac476a --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/serde-json.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + * + * Maps an object through the default JSON serde behavior. + * This means removing nullish fields and un-sparsifying lists. + * + * This is also used by Smithy RPCv2 CBOR as the default serde behavior. + * + * @param obj - to be checked. + * @returns same object with default serde behavior applied. + */ +export declare const _json: (obj: any) => any; diff --git a/node_modules/@smithy/smithy-client/dist-types/split-every.d.ts b/node_modules/@smithy/smithy-client/dist-types/split-every.d.ts new file mode 100644 index 00000000..45a02291 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/split-every.d.ts @@ -0,0 +1,11 @@ +/** + * @internal + * + * Given an input string, splits based on the delimiter after a given + * number of delimiters has been encountered. + * + * @param value - The input string to split. + * @param delimiter - The delimiter to split on. + * @param numDelimiters - The number of delimiters to have encountered to split. + */ +export declare function splitEvery(value: string, delimiter: string, numDelimiters: number): Array; diff --git a/node_modules/@smithy/smithy-client/dist-types/split-header.d.ts b/node_modules/@smithy/smithy-client/dist-types/split-header.d.ts new file mode 100644 index 00000000..0f51651e --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/split-header.d.ts @@ -0,0 +1,5 @@ +/** + * @param value - header string value. + * @returns value split by commas that aren't in quotes. + */ +export declare const splitHeader: (value: string) => string[]; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/NoOpLogger.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/NoOpLogger.d.ts new file mode 100644 index 00000000..a9a10629 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/NoOpLogger.d.ts @@ -0,0 +1,11 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + */ +export declare class NoOpLogger implements Logger { + trace(): void; + debug(): void; + info(): void; + warn(): void; + error(): void; +} diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/client.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/client.d.ts new file mode 100644 index 00000000..578541ec --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/client.d.ts @@ -0,0 +1,61 @@ +import { Client as IClient, Command, FetchHttpHandlerOptions, MetadataBearer, MiddlewareStack, NodeHttpHandlerOptions, RequestHandler } from "@smithy/types"; +/** + * @public + */ +export interface SmithyConfiguration { + requestHandler: RequestHandler | NodeHttpHandlerOptions | FetchHttpHandlerOptions | Record; + /** + * The API version set internally by the SDK, and is + * not planned to be used by customer code. + * @internal + */ + readonly apiVersion: string; + /** + * @public + * + * Default false. + * + * When true, the client will only resolve the middleware stack once per + * Command class. This means modifying the middlewareStack of the + * command or client after requests have been made will not be + * recognized. + * + * Calling client.destroy() also clears this cache. + * + * Enable this only if needing the additional time saved (0-1ms per request) + * and not needing middleware modifications between requests. + */ + cacheMiddleware?: boolean; +} +/** + * @internal + */ +export type SmithyResolvedConfiguration = { + requestHandler: RequestHandler; + readonly apiVersion: string; + cacheMiddleware?: boolean; +}; +/** + * @public + */ +export declare class Client> implements IClient { + readonly config: ResolvedClientConfiguration; + middlewareStack: MiddlewareStack; + /** + * Holds an object reference to the initial configuration object. + * Used to check that the config resolver stack does not create + * dangling instances of an intermediate form of the configuration object. + * + * @internal + */ + initConfig?: object; + /** + * May be used to cache the resolved handler function for a Command class. + */ + private handlers?; + constructor(config: ResolvedClientConfiguration); + send(command: Command>, options?: HandlerOptions): Promise; + send(command: Command>, cb: (err: any, data?: OutputType) => void): void; + send(command: Command>, options: HandlerOptions, cb: (err: any, data?: OutputType) => void): void; + destroy(): void; +} diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/collect-stream-body.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/collect-stream-body.d.ts new file mode 100644 index 00000000..c53a1e3c --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/collect-stream-body.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { collectBody } from "@smithy/core/protocols"; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/command.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/command.d.ts new file mode 100644 index 00000000..8b42ff60 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/command.d.ts @@ -0,0 +1,113 @@ +import { EndpointParameterInstructions } from "@smithy/middleware-endpoint"; +import { Command as ICommand, Handler, HandlerExecutionContext, HttpRequest as IHttpRequest, HttpResponse as IHttpResponse, Logger, MetadataBearer, MiddlewareStack as IMiddlewareStack, OptionalParameter, Pluggable, RequestHandler, SerdeContext } from "@smithy/types"; +/** + * @public + */ +export declare abstract class Command implements ICommand { + abstract input: Input; + readonly middlewareStack: IMiddlewareStack; + /** + * Factory for Command ClassBuilder. + * @internal + */ + static classBuilder; + }, SI extends object = any, SO extends MetadataBearer = any>(): ClassBuilder; + abstract resolveMiddleware(stack: IMiddlewareStack, configuration: ResolvedClientConfiguration, options: any): Handler; + /** + * @internal + */ + resolveMiddlewareWithContext(clientStack: IMiddlewareStack, configuration: { + logger: Logger; + requestHandler: RequestHandler; + }, options: any, { middlewareFn, clientName, commandName, inputFilterSensitiveLog, outputFilterSensitiveLog, smithyContext, additionalContext, CommandCtor, }: ResolveMiddlewareContextArgs): import("@smithy/types").InitializeHandler; +} +/** + * @internal + */ +type ResolveMiddlewareContextArgs = { + middlewareFn: (CommandCtor: any, clientStack: any, config: any, options: any) => Pluggable[]; + clientName: string; + commandName: string; + smithyContext: Record; + additionalContext: HandlerExecutionContext; + inputFilterSensitiveLog: (_: any) => any; + outputFilterSensitiveLog: (_: any) => any; + CommandCtor: any; +}; +/** + * @internal + */ +declare class ClassBuilder; +}, SI extends object = any, SO extends MetadataBearer = any> { + private _init; + private _ep; + private _middlewareFn; + private _commandName; + private _clientName; + private _additionalContext; + private _smithyContext; + private _inputFilterSensitiveLog; + private _outputFilterSensitiveLog; + private _serializer; + private _deserializer; + /** + * Optional init callback. + */ + init(cb: (_: Command) => void): void; + /** + * Set the endpoint parameter instructions. + */ + ep(endpointParameterInstructions: EndpointParameterInstructions): ClassBuilder; + /** + * Add any number of middleware. + */ + m(middlewareSupplier: (CommandCtor: any, clientStack: any, config: any, options: any) => Pluggable[]): ClassBuilder; + /** + * Set the initial handler execution context Smithy field. + */ + s(service: string, operation: string, smithyContext?: Record): ClassBuilder; + /** + * Set the initial handler execution context. + */ + c(additionalContext?: HandlerExecutionContext): ClassBuilder; + /** + * Set constant string identifiers for the operation. + */ + n(clientName: string, commandName: string): ClassBuilder; + /** + * Set the input and output sensistive log filters. + */ + f(inputFilter?: (_: any) => any, outputFilter?: (_: any) => any): ClassBuilder; + /** + * Sets the serializer. + */ + ser(serializer: (input: I, context?: SerdeContext | any) => Promise): ClassBuilder; + /** + * Sets the deserializer. + */ + de(deserializer: (output: IHttpResponse, context?: SerdeContext | any) => Promise): ClassBuilder; + /** + * @returns a Command class with the classBuilder properties. + */ + build(): { + new (input: I): CommandImpl; + new (...[input]: OptionalParameter): CommandImpl; + getEndpointParameterInstructions(): EndpointParameterInstructions; + }; +} +/** + * A concrete implementation of ICommand with no abstract members. + * @public + */ +export interface CommandImpl; +}, SI extends object = any, SO extends MetadataBearer = any> extends Command { + readonly input: I; + resolveMiddleware(stack: IMiddlewareStack, configuration: C, options: any): Handler; +} +export {}; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/constants.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/constants.d.ts new file mode 100644 index 00000000..eab978fa --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const SENSITIVE_STRING = "***SensitiveInformation***"; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/create-aggregated-client.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/create-aggregated-client.d.ts new file mode 100644 index 00000000..ded19995 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/create-aggregated-client.d.ts @@ -0,0 +1,9 @@ +import { Client } from "./client"; +/** + * @internal + * + * @param commands - command lookup container. + * @param client - client instance on which to add aggregated methods. + * @returns an aggregated client with dynamically created methods. + */ +export declare const createAggregatedClient: (commands: Record, Client: new (...args: any) => Client) => void; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/date-utils.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/date-utils.d.ts new file mode 100644 index 00000000..41071c2a --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/date-utils.d.ts @@ -0,0 +1,73 @@ +/** + * @internal + * + * Builds a proper UTC HttpDate timestamp from a Date object + * since not all environments will have this as the expected + * format. + * + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toUTCString} + * - Prior to ECMAScript 2018, the format of the return value + * - varied according to the platform. The most common return + * - value was an RFC-1123 formatted date stamp, which is a + * - slightly updated version of RFC-822 date stamps. + */ +export declare function dateToUtcString(date: Date): string; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 3339 date. + * + * Input strings must conform to RFC3339 section 5.6, and cannot have a UTC + * offset. Fractional precision is supported. + * + * @see {@link https://xml2rfc.tools.ietf.org/public/rfc/html/rfc3339.html#anchor14} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc3339DateTime: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 3339 date. + * + * Input strings must conform to RFC3339 section 5.6, and can have a UTC + * offset. Fractional precision is supported. + * + * @see {@link https://xml2rfc.tools.ietf.org/public/rfc/html/rfc3339.html#anchor14} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc3339DateTimeWithOffset: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 7231 IMF-fixdate or obs-date. + * + * Input strings must conform to RFC7231 section 7.1.1.1. Fractional seconds are supported. + * + * @see {@link https://datatracker.ietf.org/doc/html/rfc7231.html#section-7.1.1.1} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc7231DateTime: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a number or a parseable string. + * + * Input strings must be an integer or floating point number. Fractional seconds are supported. + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseEpochTimestamp: (value: unknown) => Date | undefined; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/default-error-handler.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/default-error-handler.d.ts new file mode 100644 index 00000000..e9852ba3 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/default-error-handler.d.ts @@ -0,0 +1,13 @@ +/** + * Always throws an error with the given `exceptionCtor` and other arguments. + * This is only called from an error handling code path. + * + * @internal + */ +export declare const throwDefaultError: ({ output, parsedBody, exceptionCtor, errorCode }: any) => never; +/** + * @internal + * + * Creates {@link throwDefaultError} with bound ExceptionCtor. + */ +export declare const withBaseException: (ExceptionCtor: new (...args: any) => any) => any; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/defaults-mode.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/defaults-mode.d.ts new file mode 100644 index 00000000..c8a89ed8 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/defaults-mode.d.ts @@ -0,0 +1,28 @@ +/** + * @internal + */ +export declare const loadConfigsForDefaultMode: (mode: ResolvedDefaultsMode) => DefaultsModeConfigs; +/** + * Option determining how certain default configuration options are resolved in the SDK. It can be one of the value listed below: + * * `"standard"`:

The STANDARD mode provides the latest recommended default values that should be safe to run in most scenarios

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"in-region"`:

The IN_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services from within the same AWS region

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"cross-region"`:

The CROSS_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services in a different region

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"mobile"`:

The MOBILE mode builds on the standard mode and includes optimization tailored for mobile applications

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"auto"`:

The AUTO mode is an experimental mode that builds on the standard mode. The SDK will attempt to discover the execution environment to determine the appropriate settings automatically.

Note that the auto detection is heuristics-based and does not guarantee 100% accuracy. STANDARD mode will be used if the execution environment cannot be determined. The auto detection might query EC2 Instance Metadata service, which might introduce latency. Therefore we recommend choosing an explicit defaults_mode instead if startup latency is critical to your application

+ * * `"legacy"`:

The LEGACY mode provides default settings that vary per SDK and were used prior to establishment of defaults_mode

+ * + * @defaultValue "legacy" + */ +export type DefaultsMode = "standard" | "in-region" | "cross-region" | "mobile" | "auto" | "legacy"; +/** + * @internal + */ +export type ResolvedDefaultsMode = Exclude; +/** + * @internal + */ +export interface DefaultsModeConfigs { + retryMode?: string; + connectionTimeout?: number; + requestTimeout?: number; +} diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/emitWarningIfUnsupportedVersion.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/emitWarningIfUnsupportedVersion.d.ts new file mode 100644 index 00000000..f0284ef4 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/emitWarningIfUnsupportedVersion.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * + * Emits warning if the provided Node.js version string is pending deprecation. + * + * @param version - The Node.js version string. + */ +export declare const emitWarningIfUnsupportedVersion: (version: string) => void; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/exceptions.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/exceptions.d.ts new file mode 100644 index 00000000..675354a3 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/exceptions.d.ts @@ -0,0 +1,42 @@ +import { HttpResponse, MetadataBearer, ResponseMetadata, RetryableTrait, SmithyException } from "@smithy/types"; +/** + * The type of the exception class constructor parameter. The returned type contains the properties + * in the `ExceptionType` but not in the `BaseExceptionType`. If the `BaseExceptionType` contains + * `$metadata` and `message` properties, it's also included in the returned type. + * @internal + */ +export type ExceptionOptionType = Pick>>; +/** + * @public + */ +export interface ServiceExceptionOptions extends SmithyException, MetadataBearer { + message?: string; +} +/** + * @public + * + * Base exception class for the exceptions from the server-side. + */ +export declare class ServiceException extends Error implements SmithyException, MetadataBearer { + readonly $fault: "client" | "server"; + $response?: HttpResponse; + $retryable?: RetryableTrait; + $metadata: ResponseMetadata; + constructor(options: ServiceExceptionOptions); + /** + * Checks if a value is an instance of ServiceException (duck typed) + */ + static isInstance(value: unknown): value is ServiceException; + /** + * Custom instanceof check to support the operator for ServiceException base class + */ + static [Symbol.hasInstance](instance: unknown): boolean; +} +/** + * This method inject unmodeled member to a deserialized SDK exception, + * and load the error message from different possible keys('message', + * 'Message'). + * + * @internal + */ +export declare const decorateServiceException: (exception: E, additions?: Record) => E; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/extended-encode-uri-component.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/extended-encode-uri-component.d.ts new file mode 100644 index 00000000..4e510cfd --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/extended-encode-uri-component.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { extendedEncodeURIComponent } from "@smithy/core/protocols"; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/checksum.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/checksum.d.ts new file mode 100644 index 00000000..c5f06b85 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/checksum.d.ts @@ -0,0 +1,24 @@ +import { ChecksumAlgorithm, ChecksumConfiguration, ChecksumConstructor, HashConstructor } from "@smithy/types"; +import { AlgorithmId } from "@smithy/types"; +export { AlgorithmId, ChecksumAlgorithm, ChecksumConfiguration }; +/** + * @internal + */ +export type PartialChecksumRuntimeConfigType = Partial<{ + sha256: ChecksumConstructor | HashConstructor; + md5: ChecksumConstructor | HashConstructor; + crc32: ChecksumConstructor | HashConstructor; + crc32c: ChecksumConstructor | HashConstructor; + sha1: ChecksumConstructor | HashConstructor; +}>; +/** + * @internal + */ +export declare const getChecksumConfiguration: (runtimeConfig: PartialChecksumRuntimeConfigType) => { + addChecksumAlgorithm(algo: ChecksumAlgorithm): void; + checksumAlgorithms(): ChecksumAlgorithm[]; +}; +/** + * @internal + */ +export declare const resolveChecksumRuntimeConfig: (clientConfig: ChecksumConfiguration) => PartialChecksumRuntimeConfigType; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts new file mode 100644 index 00000000..d8c05bb9 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts @@ -0,0 +1,38 @@ +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { PartialChecksumRuntimeConfigType } from "./checksum"; +import { PartialRetryRuntimeConfigType } from "./retry"; +/** + * @internal + */ +export type DefaultExtensionRuntimeConfigType = PartialRetryRuntimeConfigType & PartialChecksumRuntimeConfigType; +/** + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getDefaultExtensionConfiguration: (runtimeConfig: DefaultExtensionRuntimeConfigType) => { + addChecksumAlgorithm(algo: import("@smithy/types").ChecksumAlgorithm): void; + checksumAlgorithms(): import("@smithy/types").ChecksumAlgorithm[]; +} & { + setRetryStrategy(retryStrategy: import("@smithy/types").Provider): void; + retryStrategy(): import("@smithy/types").Provider; +}; +/** + * @deprecated use getDefaultExtensionConfiguration + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getDefaultClientConfiguration: (runtimeConfig: DefaultExtensionRuntimeConfigType) => { + addChecksumAlgorithm(algo: import("@smithy/types").ChecksumAlgorithm): void; + checksumAlgorithms(): import("@smithy/types").ChecksumAlgorithm[]; +} & { + setRetryStrategy(retryStrategy: import("@smithy/types").Provider): void; + retryStrategy(): import("@smithy/types").Provider; +}; +/** + * @internal + * + * Helper function to resolve runtime config from default extension configuration + */ +export declare const resolveDefaultRuntimeConfig: (config: DefaultExtensionConfiguration) => DefaultExtensionRuntimeConfigType; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/index.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 00000000..04e3c839 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1 @@ +export * from "./defaultExtensionConfiguration"; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/retry.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/retry.d.ts new file mode 100644 index 00000000..b41fa3cd --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/retry.d.ts @@ -0,0 +1,18 @@ +import { Provider, RetryStrategy, RetryStrategyConfiguration, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +export type PartialRetryRuntimeConfigType = Partial<{ + retryStrategy: Provider; +}>; +/** + * @internal + */ +export declare const getRetryConfiguration: (runtimeConfig: PartialRetryRuntimeConfigType) => { + setRetryStrategy(retryStrategy: Provider): void; + retryStrategy(): Provider; +}; +/** + * @internal + */ +export declare const resolveRetryRuntimeConfig: (retryStrategyConfiguration: RetryStrategyConfiguration) => PartialRetryRuntimeConfigType; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-array-if-single-item.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-array-if-single-item.d.ts new file mode 100644 index 00000000..dbbd2809 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-array-if-single-item.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * The XML parser will set one K:V for a member that could + * return multiple entries but only has one. + */ +export declare const getArrayIfSingleItem: (mayBeArray: T) => T | T[]; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-value-from-text-node.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-value-from-text-node.d.ts new file mode 100644 index 00000000..d56771e8 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-value-from-text-node.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Recursively parses object and populates value is node from + * "#text" key if it's available + */ +export declare const getValueFromTextNode: (obj: any) => any; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..684c9772 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/index.d.ts @@ -0,0 +1,26 @@ +export { DocumentType, SdkError, SmithyException } from "@smithy/types"; +export * from "./client"; +export * from "./collect-stream-body"; +export * from "./command"; +export * from "./constants"; +export * from "./create-aggregated-client"; +export * from "./date-utils"; +export * from "./default-error-handler"; +export * from "./defaults-mode"; +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./exceptions"; +export * from "./extended-encode-uri-component"; +export * from "./extensions"; +export * from "./get-array-if-single-item"; +export * from "./get-value-from-text-node"; +export * from "./is-serializable-header-value"; +export * from "./lazy-json"; +export * from "./NoOpLogger"; +export * from "./object-mapping"; +export * from "./parse-utils"; +export * from "./quote-header"; +export * from "./resolve-path"; +export * from "./ser-utils"; +export * from "./serde-json"; +export * from "./split-every"; +export * from "./split-header"; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/is-serializable-header-value.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/is-serializable-header-value.d.ts new file mode 100644 index 00000000..4d531091 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/is-serializable-header-value.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * @returns whether the header value is serializable. + */ +export declare const isSerializableHeaderValue: (value: any) => boolean; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/lazy-json.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/lazy-json.d.ts new file mode 100644 index 00000000..3a41bf38 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/lazy-json.d.ts @@ -0,0 +1,46 @@ +/** + * @public + * + * A model field with this type means that you may provide a JavaScript + * object in lieu of a JSON string, and it will be serialized to JSON + * automatically before being sent in a request. + * + * For responses, you will receive a "LazyJsonString", which is a boxed String object + * with additional mixin methods. + * To get the string value, call `.toString()`, or to get the JSON object value, + * call `.deserializeJSON()` or parse it yourself. + */ +export type AutomaticJsonStringConversion = Parameters[0] | LazyJsonString; +/** + * @internal + * + */ +export interface LazyJsonString extends String { + /** + * @returns the JSON parsing of the string value. + */ + deserializeJSON(): any; + /** + * @returns the original string value rather than a JSON.stringified value. + */ + toJSON(): string; +} +/** + * @internal + * + * Extension of the native String class in the previous implementation + * has negative global performance impact on method dispatch for strings, + * and is generally discouraged. + * + * This current implementation may look strange, but is necessary to preserve the interface and + * behavior of extending the String class. + */ +export declare const LazyJsonString: { + (s: string): LazyJsonString; + new (s: string): LazyJsonString; + from(s: any): LazyJsonString; + /** + * @deprecated use #from. + */ + fromObject(s: any): LazyJsonString; +}; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/object-mapping.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/object-mapping.d.ts new file mode 100644 index 00000000..d658c169 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/object-mapping.d.ts @@ -0,0 +1,178 @@ +/** + * @internal + * + * A set of instructions for multiple keys. + * The aim is to provide a concise yet readable way to map and filter values + * onto a target object. + * + * @example + * ```javascript + * const example: ObjectMappingInstructions = { + * lazyValue1: [, () => 1], + * lazyValue2: [, () => 2], + * lazyValue3: [, () => 3], + * lazyConditionalValue1: [() => true, () => 4], + * lazyConditionalValue2: [() => true, () => 5], + * lazyConditionalValue3: [true, () => 6], + * lazyConditionalValue4: [false, () => 44], + * lazyConditionalValue5: [() => false, () => 55], + * lazyConditionalValue6: ["", () => 66], + * simpleValue1: [, 7], + * simpleValue2: [, 8], + * simpleValue3: [, 9], + * conditionalValue1: [() => true, 10], + * conditionalValue2: [() => true, 11], + * conditionalValue3: [{}, 12], + * conditionalValue4: [false, 110], + * conditionalValue5: [() => false, 121], + * conditionalValue6: ["", 132], + * }; + * + * const exampleResult: Record = { + * lazyValue1: 1, + * lazyValue2: 2, + * lazyValue3: 3, + * lazyConditionalValue1: 4, + * lazyConditionalValue2: 5, + * lazyConditionalValue3: 6, + * simpleValue1: 7, + * simpleValue2: 8, + * simpleValue3: 9, + * conditionalValue1: 10, + * conditionalValue2: 11, + * conditionalValue3: 12, + * }; + * ``` + */ +export type ObjectMappingInstructions = Record; +/** + * @internal + * + * A variant of the object mapping instruction for the `take` function. + * In this case, the source value is provided to the value function, turning it + * from a supplier into a mapper. + */ +export type SourceMappingInstructions = Record; +/** + * @internal + * + * An instruction set for assigning a value to a target object. + */ +export type ObjectMappingInstruction = LazyValueInstruction | ConditionalLazyValueInstruction | SimpleValueInstruction | ConditionalValueInstruction | UnfilteredValue; +/** + * @internal + * + * non-array + */ +export type UnfilteredValue = any; +/** + * @internal + */ +export type LazyValueInstruction = [ + FilterStatus, + ValueSupplier +]; +/** + * @internal + */ +export type ConditionalLazyValueInstruction = [ + FilterStatusSupplier, + ValueSupplier +]; +/** + * @internal + */ +export type SimpleValueInstruction = [ + FilterStatus, + Value +]; +/** + * @internal + */ +export type ConditionalValueInstruction = [ + ValueFilteringFunction, + Value +]; +/** + * @internal + */ +export type SourceMappingInstruction = [ + (ValueFilteringFunction | FilterStatus)?, + ValueMapper?, + string? +]; +/** + * @internal + * + * Filter is considered passed if + * 1. It is a boolean true. + * 2. It is not undefined and is itself truthy. + * 3. It is undefined and the corresponding _value_ is neither null nor undefined. + */ +export type FilterStatus = boolean | unknown | void; +/** + * @internal + * + * Supplies the filter check but not against any value as input. + */ +export type FilterStatusSupplier = () => boolean; +/** + * @internal + * + * Filter check with the given value. + */ +export type ValueFilteringFunction = (value: any) => boolean; +/** + * @internal + * + * Supplies the value for lazy evaluation. + */ +export type ValueSupplier = () => any; +/** + * @internal + * + * A function that maps the source value to the target value. + * Defaults to pass-through with nullish check. + */ +export type ValueMapper = (value: any) => any; +/** + * @internal + * + * A non-function value. + */ +export type Value = any; +/** + * @internal + * Internal/Private, for codegen use only. + * + * Transfer a set of keys from [instructions] to [target]. + * + * For each instruction in the record, the target key will be the instruction key. + * The target assignment will be conditional on the instruction's filter. + * The target assigned value will be supplied by the instructions as an evaluable function or non-function value. + * + * @see ObjectMappingInstructions for an example. + */ +export declare function map(target: any, filter: (value: any) => boolean, instructions: Record): typeof target; +/** + * @internal + */ +export declare function map(instructions: ObjectMappingInstructions): any; +/** + * @internal + */ +export declare function map(target: any, instructions: ObjectMappingInstructions): typeof target; +/** + * Convert a regular object `{ k: v }` to `{ k: [, v] }` mapping instruction set with default + * filter. + * + * @internal + */ +export declare const convertMap: (target: any) => Record; +/** + * @param source - original object with data. + * @param instructions - how to map the data. + * @returns new object mapped from the source object. + * @internal + */ +export declare const take: (source: any, instructions: SourceMappingInstructions) => any; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/parse-utils.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/parse-utils.d.ts new file mode 100644 index 00000000..e4c8aef3 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/parse-utils.d.ts @@ -0,0 +1,270 @@ +/** + * @internal + * + * Give an input string, strictly parses a boolean value. + * + * @param value - The boolean string to parse. + * @returns true for "true", false for "false", otherwise an error is thrown. + */ +export declare const parseBoolean: (value: string) => boolean; +/** + * @internal + * + * Asserts a value is a boolean and returns it. + * Casts strings and numbers with a warning if there is evidence that they were + * intended to be booleans. + * + * @param value - A value that is expected to be a boolean. + * @returns The value if it's a boolean, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectBoolean: (value: any) => boolean | undefined; +/** + * @internal + * + * Asserts a value is a number and returns it. + * Casts strings with a warning if the string is a parseable number. + * This is to unblock slight API definition/implementation inconsistencies. + * + * @param value - A value that is expected to be a number. + * @returns The value if it's a number, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectNumber: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit float and returns it. + * + * @param value - A value that is expected to be a 32-bit float. + * @returns The value if it's a float, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectFloat32: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is an integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectLong: (value: any) => number | undefined; +/** + * @internal + * + * @deprecated Use expectLong + */ +export declare const expectInt: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectInt32: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 16-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectShort: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is an 8-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectByte: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is not null or undefined and returns it, or throws an error. + * + * @param value - A value that is expected to be defined + * @param location - The location where we're expecting to find a defined object (optional) + * @returns The value if it's not undefined, otherwise throws an error + */ +export declare const expectNonNull: (value: T | null | undefined, location?: string) => T; +/** + * @internal + * + * Asserts a value is an JSON-like object and returns it. This is expected to be used + * with values parsed from JSON (arrays, objects, numbers, strings, booleans). + * + * @param value - A value that is expected to be an object + * @returns The value if it's an object, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectObject: (value: any) => Record | undefined; +/** + * @internal + * + * Asserts a value is a string and returns it. + * Numbers and boolean will be cast to strings with a warning. + * + * @param value - A value that is expected to be a string. + * @returns The value if it's a string, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectString: (value: any) => string | undefined; +/** + * @internal + * + * Asserts a value is a JSON-like object with only one non-null/non-undefined key and + * returns it. + * + * @param value - A value that is expected to be an object with exactly one non-null, + * non-undefined key. + * @returns the value if it's a union, undefined if it's null/undefined, otherwise + * an error is thrown. + */ +export declare const expectUnion: (value: unknown) => Record | undefined; +/** + * @internal + * + * Parses a value into a double. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by the standard + * parseFloat with one exception: NaN may only be explicitly set as the string + * "NaN", any implicit Nan values will result in an error being thrown. If any + * other type is provided, an exception will be thrown. + * + * @param value - A number or string representation of a double. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseDouble: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use strictParseDouble + */ +export declare const strictParseFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a float. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by the standard + * parseFloat with one exception: NaN may only be explicitly set as the string + * "NaN", any implicit Nan values will result in an error being thrown. If any + * other type is provided, an exception will be thrown. + * + * @param value - A number or string representation of a float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseFloat32: (value: string | number) => number | undefined; +/** + * @internal + * + * Asserts a value is a number and returns it. If the value is a string + * representation of a non-numeric number type (NaN, Infinity, -Infinity), + * the value will be parsed. Any other string value will result in an exception + * being thrown. Null or undefined will be returned as undefined. Any other + * type will result in an exception being thrown. + * + * @param value - A number or string representation of a non-numeric float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const limitedParseDouble: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use limitedParseDouble + */ +export declare const handleFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use limitedParseDouble + */ +export declare const limitedParseFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit float and returns it. If the value is a string + * representation of a non-numeric number type (NaN, Infinity, -Infinity), + * the value will be parsed. Any other string value will result in an exception + * being thrown. Null or undefined will be returned as undefined. Any other + * type will result in an exception being thrown. + * + * @param value - A number or string representation of a non-numeric float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const limitedParseFloat32: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into an integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of an integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseLong: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use strictParseLong + */ +export declare const strictParseInt: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a 32-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of a 32-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseInt32: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a 16-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of a 16-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseShort: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into an 8-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of an 8-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseByte: (value: string | number) => number | undefined; +/** + * @internal + */ +export declare const logger: { + warn: { + (...data: any[]): void; + (message?: any, ...optionalParams: any[]): void; + }; +}; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/quote-header.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/quote-header.d.ts new file mode 100644 index 00000000..c2f12e91 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/quote-header.d.ts @@ -0,0 +1,6 @@ +/** + * @public + * @param part - header list element + * @returns quoted string if part contains delimiter. + */ +export declare function quoteHeader(part: string): string; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/resolve-path.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/resolve-path.d.ts new file mode 100644 index 00000000..5432be7f --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/resolve-path.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { resolvedPath } from "@smithy/core/protocols"; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/ser-utils.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/ser-utils.d.ts new file mode 100644 index 00000000..355f829d --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/ser-utils.d.ts @@ -0,0 +1,15 @@ +/** + * @internal + * + * Serializes a number, turning non-numeric values into strings. + * + * @param value - The number to serialize. + * @returns A number, or a string if the given number was non-numeric. + */ +export declare const serializeFloat: (value: number) => string | number; +/** + * @internal + * @param date - to be serialized. + * @returns https://smithy.io/2.0/spec/protocol-traits.html#timestampformat-trait date-time format. + */ +export declare const serializeDateTime: (date: Date) => string; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/serde-json.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/serde-json.d.ts new file mode 100644 index 00000000..499409f5 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/serde-json.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + * + * Maps an object through the default JSON serde behavior. + * This means removing nullish fields and un-sparsifying lists. + * + * This is also used by Smithy RPCv2 CBOR as the default serde behavior. + * + * @param obj - to be checked. + * @returns same object with default serde behavior applied. + */ +export declare const _json: (obj: any) => any; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-every.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-every.d.ts new file mode 100644 index 00000000..2280f3e4 --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-every.d.ts @@ -0,0 +1,11 @@ +/** + * @internal + * + * Given an input string, splits based on the delimiter after a given + * number of delimiters has been encountered. + * + * @param value - The input string to split. + * @param delimiter - The delimiter to split on. + * @param numDelimiters - The number of delimiters to have encountered to split. + */ +export declare function splitEvery(value: string, delimiter: string, numDelimiters: number): Array; diff --git a/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-header.d.ts b/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-header.d.ts new file mode 100644 index 00000000..7cf54c6a --- /dev/null +++ b/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-header.d.ts @@ -0,0 +1,5 @@ +/** + * @param value - header string value. + * @returns value split by commas that aren't in quotes. + */ +export declare const splitHeader: (value: string) => string[]; diff --git a/node_modules/@smithy/smithy-client/package.json b/node_modules/@smithy/smithy-client/package.json new file mode 100644 index 00000000..ef22b322 --- /dev/null +++ b/node_modules/@smithy/smithy-client/package.json @@ -0,0 +1,67 @@ +{ + "name": "@smithy/smithy-client", + "version": "4.2.1", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline smithy-client", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.0", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/smithy-client", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/smithy-client" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/types/LICENSE b/node_modules/@smithy/types/LICENSE new file mode 100644 index 00000000..e907b586 --- /dev/null +++ b/node_modules/@smithy/types/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@smithy/types/README.md b/node_modules/@smithy/types/README.md new file mode 100644 index 00000000..7ab3ccd4 --- /dev/null +++ b/node_modules/@smithy/types/README.md @@ -0,0 +1,115 @@ +# @smithy/types + +[![NPM version](https://img.shields.io/npm/v/@smithy/types/latest.svg)](https://www.npmjs.com/package/@smithy/types) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/types.svg)](https://www.npmjs.com/package/@smithy/types) + +## Usage + +This package is mostly used internally by generated clients. +Some public components have independent applications. + +--- + +### Scenario: Removing `| undefined` from input and output structures + +Generated shapes' members are unioned with `undefined` for +input shapes, and are `?` (optional) for output shapes. + +- for inputs, this defers the validation to the service. +- for outputs, this strongly suggests that you should runtime-check the output data. + +If you would like to skip these steps, use the `AssertiveClient` or +`UncheckedClient` type helpers. + +Using AWS S3 as an example: + +```ts +import { S3 } from "@aws-sdk/client-s3"; +import type { AssertiveClient, UncheckedClient } from "@smithy/types"; + +const s3a = new S3({}) as AssertiveClient; +const s3b = new S3({}) as UncheckedClient; + +// AssertiveClient enforces required inputs are not undefined +// and required outputs are not undefined. +const get = await s3a.getObject({ + Bucket: "", + // @ts-expect-error (undefined not assignable to string) + Key: undefined, +}); + +// UncheckedClient makes output fields non-nullable. +// You should still perform type checks as you deem +// necessary, but the SDK will no longer prompt you +// with nullability errors. +const body = await ( + await s3b.getObject({ + Bucket: "", + Key: "", + }) +).Body.transformToString(); +``` + +When using the transform on non-aggregated client with the `Command` syntax, +the input cannot be validated because it goes through another class. + +```ts +import { S3Client, ListBucketsCommand, GetObjectCommand, GetObjectCommandInput } from "@aws-sdk/client-s3"; +import type { AssertiveClient, UncheckedClient, NoUndefined } from "@smithy/types"; + +const s3 = new S3Client({}) as UncheckedClient; + +const list = await s3.send( + new ListBucketsCommand({ + // command inputs are not validated by the type transform. + // because this is a separate class. + }) +); + +/** + * Although less ergonomic, you can use the NoUndefined + * transform on the input type. + */ +const getObjectInput: NoUndefined = { + Bucket: "undefined", + // @ts-expect-error (undefined not assignable to string) + Key: undefined, + // optional params can still be undefined. + SSECustomerAlgorithm: undefined, +}; + +const get = s3.send(new GetObjectCommand(getObjectInput)); + +// outputs are still transformed. +await get.Body.TransformToString(); +``` + +### Scenario: Narrowing a smithy-typescript generated client's output payload blob types + +This is mostly relevant to operations with streaming bodies such as within +the S3Client in the AWS SDK for JavaScript v3. + +Because blob payload types are platform dependent, you may wish to indicate in your application that a client is running in a specific +environment. This narrows the blob payload types. + +```typescript +import { GetObjectCommand, S3Client } from "@aws-sdk/client-s3"; +import type { NodeJsClient, SdkStream, StreamingBlobPayloadOutputTypes } from "@smithy/types"; +import type { IncomingMessage } from "node:http"; + +// default client init. +const s3Default = new S3Client({}); + +// client init with type narrowing. +const s3NarrowType = new S3Client({}) as NodeJsClient; + +// The default type of blob payloads is a wide union type including multiple possible +// request handlers. +const body1: StreamingBlobPayloadOutputTypes = (await s3Default.send(new GetObjectCommand({ Key: "", Bucket: "" }))) + .Body!; + +// This is of the narrower type SdkStream representing +// blob payload responses using specifically the node:http request handler. +const body2: SdkStream = (await s3NarrowType.send(new GetObjectCommand({ Key: "", Bucket: "" }))) + .Body!; +``` diff --git a/node_modules/@smithy/types/dist-cjs/abort-handler.js b/node_modules/@smithy/types/dist-cjs/abort-handler.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/abort-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/abort.js b/node_modules/@smithy/types/dist-cjs/abort.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/abort.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/auth/HttpApiKeyAuth.js b/node_modules/@smithy/types/dist-cjs/auth/HttpApiKeyAuth.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/auth/HttpApiKeyAuth.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/auth/HttpAuthScheme.js b/node_modules/@smithy/types/dist-cjs/auth/HttpAuthScheme.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/auth/HttpAuthScheme.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/auth/HttpAuthSchemeProvider.js b/node_modules/@smithy/types/dist-cjs/auth/HttpAuthSchemeProvider.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/auth/HttpAuthSchemeProvider.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/auth/HttpSigner.js b/node_modules/@smithy/types/dist-cjs/auth/HttpSigner.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/auth/HttpSigner.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/auth/IdentityProviderConfig.js b/node_modules/@smithy/types/dist-cjs/auth/IdentityProviderConfig.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/auth/IdentityProviderConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/auth/auth.js b/node_modules/@smithy/types/dist-cjs/auth/auth.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/auth/auth.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/auth/index.js b/node_modules/@smithy/types/dist-cjs/auth/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/auth/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/blob/blob-payload-input-types.js b/node_modules/@smithy/types/dist-cjs/blob/blob-payload-input-types.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/blob/blob-payload-input-types.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/checksum.js b/node_modules/@smithy/types/dist-cjs/checksum.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/checksum.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/client.js b/node_modules/@smithy/types/dist-cjs/client.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/client.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/command.js b/node_modules/@smithy/types/dist-cjs/command.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/command.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/connection/config.js b/node_modules/@smithy/types/dist-cjs/connection/config.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/connection/config.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/connection/index.js b/node_modules/@smithy/types/dist-cjs/connection/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/connection/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/connection/manager.js b/node_modules/@smithy/types/dist-cjs/connection/manager.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/connection/manager.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/connection/pool.js b/node_modules/@smithy/types/dist-cjs/connection/pool.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/connection/pool.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/crypto.js b/node_modules/@smithy/types/dist-cjs/crypto.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/crypto.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/downlevel-ts3.4/transform/type-transform.js b/node_modules/@smithy/types/dist-cjs/downlevel-ts3.4/transform/type-transform.js new file mode 100644 index 00000000..88174128 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/downlevel-ts3.4/transform/type-transform.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/encode.js b/node_modules/@smithy/types/dist-cjs/encode.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/encode.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/endpoint.js b/node_modules/@smithy/types/dist-cjs/endpoint.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/endpoint.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/endpoints/EndpointRuleObject.js b/node_modules/@smithy/types/dist-cjs/endpoints/EndpointRuleObject.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/endpoints/EndpointRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/endpoints/ErrorRuleObject.js b/node_modules/@smithy/types/dist-cjs/endpoints/ErrorRuleObject.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/endpoints/ErrorRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/endpoints/RuleSetObject.js b/node_modules/@smithy/types/dist-cjs/endpoints/RuleSetObject.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/endpoints/RuleSetObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/endpoints/TreeRuleObject.js b/node_modules/@smithy/types/dist-cjs/endpoints/TreeRuleObject.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/endpoints/TreeRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/endpoints/index.js b/node_modules/@smithy/types/dist-cjs/endpoints/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/endpoints/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/endpoints/shared.js b/node_modules/@smithy/types/dist-cjs/endpoints/shared.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/endpoints/shared.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/eventStream.js b/node_modules/@smithy/types/dist-cjs/eventStream.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/eventStream.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/extensions/checksum.js b/node_modules/@smithy/types/dist-cjs/extensions/checksum.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/extensions/checksum.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/extensions/defaultClientConfiguration.js b/node_modules/@smithy/types/dist-cjs/extensions/defaultClientConfiguration.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/extensions/defaultClientConfiguration.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/extensions/defaultExtensionConfiguration.js b/node_modules/@smithy/types/dist-cjs/extensions/defaultExtensionConfiguration.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/extensions/defaultExtensionConfiguration.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/extensions/index.js b/node_modules/@smithy/types/dist-cjs/extensions/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/extensions/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/extensions/retry.js b/node_modules/@smithy/types/dist-cjs/extensions/retry.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/extensions/retry.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/externals-check/browser-externals-check.js b/node_modules/@smithy/types/dist-cjs/externals-check/browser-externals-check.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/externals-check/browser-externals-check.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/feature-ids.js b/node_modules/@smithy/types/dist-cjs/feature-ids.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/feature-ids.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/http.js b/node_modules/@smithy/types/dist-cjs/http.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/http.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/http/httpHandlerInitialization.js b/node_modules/@smithy/types/dist-cjs/http/httpHandlerInitialization.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/http/httpHandlerInitialization.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/identity/apiKeyIdentity.js b/node_modules/@smithy/types/dist-cjs/identity/apiKeyIdentity.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/identity/apiKeyIdentity.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/identity/awsCredentialIdentity.js b/node_modules/@smithy/types/dist-cjs/identity/awsCredentialIdentity.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/identity/awsCredentialIdentity.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/identity/identity.js b/node_modules/@smithy/types/dist-cjs/identity/identity.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/identity/identity.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/identity/index.js b/node_modules/@smithy/types/dist-cjs/identity/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/identity/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/identity/tokenIdentity.js b/node_modules/@smithy/types/dist-cjs/identity/tokenIdentity.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/identity/tokenIdentity.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/index.js b/node_modules/@smithy/types/dist-cjs/index.js new file mode 100644 index 00000000..0849f2b4 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/index.js @@ -0,0 +1,144 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AlgorithmId: () => AlgorithmId, + EndpointURLScheme: () => EndpointURLScheme, + FieldPosition: () => FieldPosition, + HttpApiKeyAuthLocation: () => HttpApiKeyAuthLocation, + HttpAuthLocation: () => HttpAuthLocation, + IniSectionType: () => IniSectionType, + RequestHandlerProtocol: () => RequestHandlerProtocol, + SMITHY_CONTEXT_KEY: () => SMITHY_CONTEXT_KEY, + getDefaultClientConfiguration: () => getDefaultClientConfiguration, + resolveDefaultRuntimeConfig: () => resolveDefaultRuntimeConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/auth/auth.ts +var HttpAuthLocation = /* @__PURE__ */ ((HttpAuthLocation2) => { + HttpAuthLocation2["HEADER"] = "header"; + HttpAuthLocation2["QUERY"] = "query"; + return HttpAuthLocation2; +})(HttpAuthLocation || {}); + +// src/auth/HttpApiKeyAuth.ts +var HttpApiKeyAuthLocation = /* @__PURE__ */ ((HttpApiKeyAuthLocation2) => { + HttpApiKeyAuthLocation2["HEADER"] = "header"; + HttpApiKeyAuthLocation2["QUERY"] = "query"; + return HttpApiKeyAuthLocation2; +})(HttpApiKeyAuthLocation || {}); + +// src/endpoint.ts +var EndpointURLScheme = /* @__PURE__ */ ((EndpointURLScheme2) => { + EndpointURLScheme2["HTTP"] = "http"; + EndpointURLScheme2["HTTPS"] = "https"; + return EndpointURLScheme2; +})(EndpointURLScheme || {}); + +// src/extensions/checksum.ts +var AlgorithmId = /* @__PURE__ */ ((AlgorithmId2) => { + AlgorithmId2["MD5"] = "md5"; + AlgorithmId2["CRC32"] = "crc32"; + AlgorithmId2["CRC32C"] = "crc32c"; + AlgorithmId2["SHA1"] = "sha1"; + AlgorithmId2["SHA256"] = "sha256"; + return AlgorithmId2; +})(AlgorithmId || {}); +var getChecksumConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const checksumAlgorithms = []; + if (runtimeConfig.sha256 !== void 0) { + checksumAlgorithms.push({ + algorithmId: () => "sha256" /* SHA256 */, + checksumConstructor: () => runtimeConfig.sha256 + }); + } + if (runtimeConfig.md5 != void 0) { + checksumAlgorithms.push({ + algorithmId: () => "md5" /* MD5 */, + checksumConstructor: () => runtimeConfig.md5 + }); + } + return { + addChecksumAlgorithm(algo) { + checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return checksumAlgorithms; + } + }; +}, "getChecksumConfiguration"); +var resolveChecksumRuntimeConfig = /* @__PURE__ */ __name((clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}, "resolveChecksumRuntimeConfig"); + +// src/extensions/defaultClientConfiguration.ts +var getDefaultClientConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return getChecksumConfiguration(runtimeConfig); +}, "getDefaultClientConfiguration"); +var resolveDefaultRuntimeConfig = /* @__PURE__ */ __name((config) => { + return resolveChecksumRuntimeConfig(config); +}, "resolveDefaultRuntimeConfig"); + +// src/http.ts +var FieldPosition = /* @__PURE__ */ ((FieldPosition2) => { + FieldPosition2[FieldPosition2["HEADER"] = 0] = "HEADER"; + FieldPosition2[FieldPosition2["TRAILER"] = 1] = "TRAILER"; + return FieldPosition2; +})(FieldPosition || {}); + +// src/middleware.ts +var SMITHY_CONTEXT_KEY = "__smithy_context"; + +// src/profile.ts +var IniSectionType = /* @__PURE__ */ ((IniSectionType2) => { + IniSectionType2["PROFILE"] = "profile"; + IniSectionType2["SSO_SESSION"] = "sso-session"; + IniSectionType2["SERVICES"] = "services"; + return IniSectionType2; +})(IniSectionType || {}); + +// src/transfer.ts +var RequestHandlerProtocol = /* @__PURE__ */ ((RequestHandlerProtocol2) => { + RequestHandlerProtocol2["HTTP_0_9"] = "http/0.9"; + RequestHandlerProtocol2["HTTP_1_0"] = "http/1.0"; + RequestHandlerProtocol2["TDS_8_0"] = "tds/8.0"; + return RequestHandlerProtocol2; +})(RequestHandlerProtocol || {}); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + HttpAuthLocation, + HttpApiKeyAuthLocation, + EndpointURLScheme, + AlgorithmId, + getDefaultClientConfiguration, + resolveDefaultRuntimeConfig, + FieldPosition, + SMITHY_CONTEXT_KEY, + IniSectionType, + RequestHandlerProtocol +}); + diff --git a/node_modules/@smithy/types/dist-cjs/logger.js b/node_modules/@smithy/types/dist-cjs/logger.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/logger.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/middleware.js b/node_modules/@smithy/types/dist-cjs/middleware.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/middleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/pagination.js b/node_modules/@smithy/types/dist-cjs/pagination.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/pagination.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/profile.js b/node_modules/@smithy/types/dist-cjs/profile.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/profile.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/response.js b/node_modules/@smithy/types/dist-cjs/response.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/response.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/retry.js b/node_modules/@smithy/types/dist-cjs/retry.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/retry.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/serde.js b/node_modules/@smithy/types/dist-cjs/serde.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/serde.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/shapes.js b/node_modules/@smithy/types/dist-cjs/shapes.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/shapes.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/signature.js b/node_modules/@smithy/types/dist-cjs/signature.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/signature.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/stream.js b/node_modules/@smithy/types/dist-cjs/stream.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/stream.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-common-types.js b/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-common-types.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-common-types.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-input-types.js b/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-input-types.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-input-types.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-output-types.js b/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-output-types.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-output-types.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/transfer.js b/node_modules/@smithy/types/dist-cjs/transfer.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/transfer.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/transform/client-method-transforms.js b/node_modules/@smithy/types/dist-cjs/transform/client-method-transforms.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/transform/client-method-transforms.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/transform/client-payload-blob-type-narrow.js b/node_modules/@smithy/types/dist-cjs/transform/client-payload-blob-type-narrow.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/transform/client-payload-blob-type-narrow.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/transform/exact.js b/node_modules/@smithy/types/dist-cjs/transform/exact.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/transform/exact.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/transform/no-undefined.js b/node_modules/@smithy/types/dist-cjs/transform/no-undefined.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/transform/no-undefined.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/transform/type-transform.js b/node_modules/@smithy/types/dist-cjs/transform/type-transform.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/transform/type-transform.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/uri.js b/node_modules/@smithy/types/dist-cjs/uri.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/uri.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/util.js b/node_modules/@smithy/types/dist-cjs/util.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/util.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-cjs/waiter.js b/node_modules/@smithy/types/dist-cjs/waiter.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/types/dist-cjs/waiter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/types/dist-es/abort-handler.js b/node_modules/@smithy/types/dist-es/abort-handler.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/abort-handler.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/abort.js b/node_modules/@smithy/types/dist-es/abort.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/abort.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/auth/HttpApiKeyAuth.js b/node_modules/@smithy/types/dist-es/auth/HttpApiKeyAuth.js new file mode 100644 index 00000000..4c02f242 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/auth/HttpApiKeyAuth.js @@ -0,0 +1,5 @@ +export var HttpApiKeyAuthLocation; +(function (HttpApiKeyAuthLocation) { + HttpApiKeyAuthLocation["HEADER"] = "header"; + HttpApiKeyAuthLocation["QUERY"] = "query"; +})(HttpApiKeyAuthLocation || (HttpApiKeyAuthLocation = {})); diff --git a/node_modules/@smithy/types/dist-es/auth/HttpAuthScheme.js b/node_modules/@smithy/types/dist-es/auth/HttpAuthScheme.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/auth/HttpAuthScheme.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/auth/HttpAuthSchemeProvider.js b/node_modules/@smithy/types/dist-es/auth/HttpAuthSchemeProvider.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/auth/HttpAuthSchemeProvider.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/auth/HttpSigner.js b/node_modules/@smithy/types/dist-es/auth/HttpSigner.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/auth/HttpSigner.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/auth/IdentityProviderConfig.js b/node_modules/@smithy/types/dist-es/auth/IdentityProviderConfig.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/auth/IdentityProviderConfig.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/auth/auth.js b/node_modules/@smithy/types/dist-es/auth/auth.js new file mode 100644 index 00000000..bd3b2df8 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/auth/auth.js @@ -0,0 +1,5 @@ +export var HttpAuthLocation; +(function (HttpAuthLocation) { + HttpAuthLocation["HEADER"] = "header"; + HttpAuthLocation["QUERY"] = "query"; +})(HttpAuthLocation || (HttpAuthLocation = {})); diff --git a/node_modules/@smithy/types/dist-es/auth/index.js b/node_modules/@smithy/types/dist-es/auth/index.js new file mode 100644 index 00000000..7436030c --- /dev/null +++ b/node_modules/@smithy/types/dist-es/auth/index.js @@ -0,0 +1,6 @@ +export * from "./auth"; +export * from "./HttpApiKeyAuth"; +export * from "./HttpAuthScheme"; +export * from "./HttpAuthSchemeProvider"; +export * from "./HttpSigner"; +export * from "./IdentityProviderConfig"; diff --git a/node_modules/@smithy/types/dist-es/blob/blob-payload-input-types.js b/node_modules/@smithy/types/dist-es/blob/blob-payload-input-types.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/blob/blob-payload-input-types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/checksum.js b/node_modules/@smithy/types/dist-es/checksum.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/checksum.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/client.js b/node_modules/@smithy/types/dist-es/client.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/client.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/command.js b/node_modules/@smithy/types/dist-es/command.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/command.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/connection/config.js b/node_modules/@smithy/types/dist-es/connection/config.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/connection/config.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/connection/index.js b/node_modules/@smithy/types/dist-es/connection/index.js new file mode 100644 index 00000000..c6c3ea80 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/connection/index.js @@ -0,0 +1,3 @@ +export * from "./config"; +export * from "./manager"; +export * from "./pool"; diff --git a/node_modules/@smithy/types/dist-es/connection/manager.js b/node_modules/@smithy/types/dist-es/connection/manager.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/connection/manager.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/connection/pool.js b/node_modules/@smithy/types/dist-es/connection/pool.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/connection/pool.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/crypto.js b/node_modules/@smithy/types/dist-es/crypto.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/crypto.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/downlevel-ts3.4/transform/type-transform.js b/node_modules/@smithy/types/dist-es/downlevel-ts3.4/transform/type-transform.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/downlevel-ts3.4/transform/type-transform.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/encode.js b/node_modules/@smithy/types/dist-es/encode.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/encode.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/endpoint.js b/node_modules/@smithy/types/dist-es/endpoint.js new file mode 100644 index 00000000..4ae601ff --- /dev/null +++ b/node_modules/@smithy/types/dist-es/endpoint.js @@ -0,0 +1,5 @@ +export var EndpointURLScheme; +(function (EndpointURLScheme) { + EndpointURLScheme["HTTP"] = "http"; + EndpointURLScheme["HTTPS"] = "https"; +})(EndpointURLScheme || (EndpointURLScheme = {})); diff --git a/node_modules/@smithy/types/dist-es/endpoints/EndpointRuleObject.js b/node_modules/@smithy/types/dist-es/endpoints/EndpointRuleObject.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/endpoints/EndpointRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/endpoints/ErrorRuleObject.js b/node_modules/@smithy/types/dist-es/endpoints/ErrorRuleObject.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/endpoints/ErrorRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/endpoints/RuleSetObject.js b/node_modules/@smithy/types/dist-es/endpoints/RuleSetObject.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/endpoints/RuleSetObject.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/endpoints/TreeRuleObject.js b/node_modules/@smithy/types/dist-es/endpoints/TreeRuleObject.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/endpoints/TreeRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/endpoints/index.js b/node_modules/@smithy/types/dist-es/endpoints/index.js new file mode 100644 index 00000000..64d85cf8 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/endpoints/index.js @@ -0,0 +1,5 @@ +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./shared"; +export * from "./TreeRuleObject"; diff --git a/node_modules/@smithy/types/dist-es/endpoints/shared.js b/node_modules/@smithy/types/dist-es/endpoints/shared.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/endpoints/shared.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/eventStream.js b/node_modules/@smithy/types/dist-es/eventStream.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/eventStream.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/extensions/checksum.js b/node_modules/@smithy/types/dist-es/extensions/checksum.js new file mode 100644 index 00000000..5a7939e7 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/extensions/checksum.js @@ -0,0 +1,38 @@ +export var AlgorithmId; +(function (AlgorithmId) { + AlgorithmId["MD5"] = "md5"; + AlgorithmId["CRC32"] = "crc32"; + AlgorithmId["CRC32C"] = "crc32c"; + AlgorithmId["SHA1"] = "sha1"; + AlgorithmId["SHA256"] = "sha256"; +})(AlgorithmId || (AlgorithmId = {})); +export const getChecksumConfiguration = (runtimeConfig) => { + const checksumAlgorithms = []; + if (runtimeConfig.sha256 !== undefined) { + checksumAlgorithms.push({ + algorithmId: () => AlgorithmId.SHA256, + checksumConstructor: () => runtimeConfig.sha256, + }); + } + if (runtimeConfig.md5 != undefined) { + checksumAlgorithms.push({ + algorithmId: () => AlgorithmId.MD5, + checksumConstructor: () => runtimeConfig.md5, + }); + } + return { + addChecksumAlgorithm(algo) { + checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return checksumAlgorithms; + }, + }; +}; +export const resolveChecksumRuntimeConfig = (clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}; diff --git a/node_modules/@smithy/types/dist-es/extensions/defaultClientConfiguration.js b/node_modules/@smithy/types/dist-es/extensions/defaultClientConfiguration.js new file mode 100644 index 00000000..4e3eb911 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/extensions/defaultClientConfiguration.js @@ -0,0 +1,7 @@ +import { getChecksumConfiguration, resolveChecksumRuntimeConfig } from "./checksum"; +export const getDefaultClientConfiguration = (runtimeConfig) => { + return getChecksumConfiguration(runtimeConfig); +}; +export const resolveDefaultRuntimeConfig = (config) => { + return resolveChecksumRuntimeConfig(config); +}; diff --git a/node_modules/@smithy/types/dist-es/extensions/defaultExtensionConfiguration.js b/node_modules/@smithy/types/dist-es/extensions/defaultExtensionConfiguration.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/extensions/defaultExtensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/extensions/index.js b/node_modules/@smithy/types/dist-es/extensions/index.js new file mode 100644 index 00000000..0fa92d96 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/extensions/index.js @@ -0,0 +1,3 @@ +export * from "./defaultClientConfiguration"; +export * from "./defaultExtensionConfiguration"; +export { AlgorithmId } from "./checksum"; diff --git a/node_modules/@smithy/types/dist-es/extensions/retry.js b/node_modules/@smithy/types/dist-es/extensions/retry.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/extensions/retry.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/externals-check/browser-externals-check.js b/node_modules/@smithy/types/dist-es/externals-check/browser-externals-check.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/externals-check/browser-externals-check.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/feature-ids.js b/node_modules/@smithy/types/dist-es/feature-ids.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/feature-ids.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/http.js b/node_modules/@smithy/types/dist-es/http.js new file mode 100644 index 00000000..27b22f01 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/http.js @@ -0,0 +1,5 @@ +export var FieldPosition; +(function (FieldPosition) { + FieldPosition[FieldPosition["HEADER"] = 0] = "HEADER"; + FieldPosition[FieldPosition["TRAILER"] = 1] = "TRAILER"; +})(FieldPosition || (FieldPosition = {})); diff --git a/node_modules/@smithy/types/dist-es/http/httpHandlerInitialization.js b/node_modules/@smithy/types/dist-es/http/httpHandlerInitialization.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/http/httpHandlerInitialization.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/identity/apiKeyIdentity.js b/node_modules/@smithy/types/dist-es/identity/apiKeyIdentity.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/identity/apiKeyIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/identity/awsCredentialIdentity.js b/node_modules/@smithy/types/dist-es/identity/awsCredentialIdentity.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/identity/awsCredentialIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/identity/identity.js b/node_modules/@smithy/types/dist-es/identity/identity.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/identity/identity.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/identity/index.js b/node_modules/@smithy/types/dist-es/identity/index.js new file mode 100644 index 00000000..33603203 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/identity/index.js @@ -0,0 +1,4 @@ +export * from "./apiKeyIdentity"; +export * from "./awsCredentialIdentity"; +export * from "./identity"; +export * from "./tokenIdentity"; diff --git a/node_modules/@smithy/types/dist-es/identity/tokenIdentity.js b/node_modules/@smithy/types/dist-es/identity/tokenIdentity.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/identity/tokenIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/index.js b/node_modules/@smithy/types/dist-es/index.js new file mode 100644 index 00000000..c370335c --- /dev/null +++ b/node_modules/@smithy/types/dist-es/index.js @@ -0,0 +1,37 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-payload-input-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./crypto"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./endpoints"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./http"; +export * from "./http/httpHandlerInitialization"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./streaming-payload/streaming-blob-common-types"; +export * from "./streaming-payload/streaming-blob-payload-input-types"; +export * from "./streaming-payload/streaming-blob-payload-output-types"; +export * from "./transfer"; +export * from "./transform/client-payload-blob-type-narrow"; +export * from "./transform/no-undefined"; +export * from "./transform/type-transform"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/node_modules/@smithy/types/dist-es/logger.js b/node_modules/@smithy/types/dist-es/logger.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/logger.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/middleware.js b/node_modules/@smithy/types/dist-es/middleware.js new file mode 100644 index 00000000..7d0d0500 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/middleware.js @@ -0,0 +1 @@ +export const SMITHY_CONTEXT_KEY = "__smithy_context"; diff --git a/node_modules/@smithy/types/dist-es/pagination.js b/node_modules/@smithy/types/dist-es/pagination.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/pagination.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/profile.js b/node_modules/@smithy/types/dist-es/profile.js new file mode 100644 index 00000000..9d56c8d6 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/profile.js @@ -0,0 +1,6 @@ +export var IniSectionType; +(function (IniSectionType) { + IniSectionType["PROFILE"] = "profile"; + IniSectionType["SSO_SESSION"] = "sso-session"; + IniSectionType["SERVICES"] = "services"; +})(IniSectionType || (IniSectionType = {})); diff --git a/node_modules/@smithy/types/dist-es/response.js b/node_modules/@smithy/types/dist-es/response.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/response.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/retry.js b/node_modules/@smithy/types/dist-es/retry.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/retry.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/serde.js b/node_modules/@smithy/types/dist-es/serde.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/serde.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/shapes.js b/node_modules/@smithy/types/dist-es/shapes.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/shapes.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/signature.js b/node_modules/@smithy/types/dist-es/signature.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/signature.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/stream.js b/node_modules/@smithy/types/dist-es/stream.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/stream.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-common-types.js b/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-common-types.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-common-types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-input-types.js b/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-input-types.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-input-types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-output-types.js b/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-output-types.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-output-types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/transfer.js b/node_modules/@smithy/types/dist-es/transfer.js new file mode 100644 index 00000000..f7761513 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/transfer.js @@ -0,0 +1,6 @@ +export var RequestHandlerProtocol; +(function (RequestHandlerProtocol) { + RequestHandlerProtocol["HTTP_0_9"] = "http/0.9"; + RequestHandlerProtocol["HTTP_1_0"] = "http/1.0"; + RequestHandlerProtocol["TDS_8_0"] = "tds/8.0"; +})(RequestHandlerProtocol || (RequestHandlerProtocol = {})); diff --git a/node_modules/@smithy/types/dist-es/transform/client-method-transforms.js b/node_modules/@smithy/types/dist-es/transform/client-method-transforms.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/transform/client-method-transforms.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/transform/client-payload-blob-type-narrow.js b/node_modules/@smithy/types/dist-es/transform/client-payload-blob-type-narrow.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/transform/client-payload-blob-type-narrow.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/transform/exact.js b/node_modules/@smithy/types/dist-es/transform/exact.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/transform/exact.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/transform/no-undefined.js b/node_modules/@smithy/types/dist-es/transform/no-undefined.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/transform/no-undefined.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/transform/type-transform.js b/node_modules/@smithy/types/dist-es/transform/type-transform.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/transform/type-transform.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/uri.js b/node_modules/@smithy/types/dist-es/uri.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/uri.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/util.js b/node_modules/@smithy/types/dist-es/util.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/util.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-es/waiter.js b/node_modules/@smithy/types/dist-es/waiter.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/types/dist-es/waiter.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/types/dist-types/abort-handler.d.ts b/node_modules/@smithy/types/dist-types/abort-handler.d.ts new file mode 100644 index 00000000..09a0544f --- /dev/null +++ b/node_modules/@smithy/types/dist-types/abort-handler.d.ts @@ -0,0 +1,7 @@ +import type { AbortSignal as DeprecatedAbortSignal } from "./abort"; +/** + * @public + */ +export interface AbortHandler { + (this: AbortSignal | DeprecatedAbortSignal, ev: any): any; +} diff --git a/node_modules/@smithy/types/dist-types/abort.d.ts b/node_modules/@smithy/types/dist-types/abort.d.ts new file mode 100644 index 00000000..80fc87f0 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/abort.d.ts @@ -0,0 +1,50 @@ +import type { AbortHandler } from "./abort-handler"; +/** + * @public + */ +export { AbortHandler }; +/** + * @public + * @deprecated use platform (global) type for AbortSignal. + * + * Holders of an AbortSignal object may query if the associated operation has + * been aborted and register an onabort handler. + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal + */ +export interface AbortSignal { + /** + * Whether the action represented by this signal has been cancelled. + */ + readonly aborted: boolean; + /** + * A function to be invoked when the action represented by this signal has + * been cancelled. + */ + onabort: AbortHandler | Function | null; +} +/** + * @public + * @deprecated use platform (global) type for AbortController. + * + * The AWS SDK uses a Controller/Signal model to allow for cooperative + * cancellation of asynchronous operations. When initiating such an operation, + * the caller can create an AbortController and then provide linked signal to + * subtasks. This allows a single source to communicate to multiple consumers + * that an action has been aborted without dictating how that cancellation + * should be handled. + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortController + */ +export interface AbortController { + /** + * An object that reports whether the action associated with this + * `AbortController` has been cancelled. + */ + readonly signal: AbortSignal; + /** + * Declares the operation associated with this AbortController to have been + * cancelled. + */ + abort(): void; +} diff --git a/node_modules/@smithy/types/dist-types/auth/HttpApiKeyAuth.d.ts b/node_modules/@smithy/types/dist-types/auth/HttpApiKeyAuth.d.ts new file mode 100644 index 00000000..5d74340f --- /dev/null +++ b/node_modules/@smithy/types/dist-types/auth/HttpApiKeyAuth.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum HttpApiKeyAuthLocation { + HEADER = "header", + QUERY = "query" +} diff --git a/node_modules/@smithy/types/dist-types/auth/HttpAuthScheme.d.ts b/node_modules/@smithy/types/dist-types/auth/HttpAuthScheme.d.ts new file mode 100644 index 00000000..c5be5324 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/auth/HttpAuthScheme.d.ts @@ -0,0 +1,49 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +import { HandlerExecutionContext } from "../middleware"; +import { HttpSigner } from "./HttpSigner"; +import { IdentityProviderConfig } from "./IdentityProviderConfig"; +/** + * ID for {@link HttpAuthScheme} + * @internal + */ +export type HttpAuthSchemeId = string; +/** + * Interface that defines an HttpAuthScheme + * @internal + */ +export interface HttpAuthScheme { + /** + * ID for an HttpAuthScheme, typically the absolute shape ID of a Smithy auth trait. + */ + schemeId: HttpAuthSchemeId; + /** + * Gets the IdentityProvider corresponding to an HttpAuthScheme. + */ + identityProvider(config: IdentityProviderConfig): IdentityProvider | undefined; + /** + * HttpSigner corresponding to an HttpAuthScheme. + */ + signer: HttpSigner; +} +/** + * Interface that defines the identity and signing properties when selecting + * an HttpAuthScheme. + * @internal + */ +export interface HttpAuthOption { + schemeId: HttpAuthSchemeId; + identityProperties?: Record; + signingProperties?: Record; + propertiesExtractor?: (config: TConfig, context: TContext) => { + identityProperties?: Record; + signingProperties?: Record; + }; +} +/** + * @internal + */ +export interface SelectedHttpAuthScheme { + httpAuthOption: HttpAuthOption; + identity: Identity; + signer: HttpSigner; +} diff --git a/node_modules/@smithy/types/dist-types/auth/HttpAuthSchemeProvider.d.ts b/node_modules/@smithy/types/dist-types/auth/HttpAuthSchemeProvider.d.ts new file mode 100644 index 00000000..710dc8f3 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/auth/HttpAuthSchemeProvider.d.ts @@ -0,0 +1,20 @@ +import { HandlerExecutionContext } from "../middleware"; +import { HttpAuthOption } from "./HttpAuthScheme"; +/** + * @internal + */ +export interface HttpAuthSchemeParameters { + operation?: string; +} +/** + * @internal + */ +export interface HttpAuthSchemeProvider { + (authParameters: TParameters): HttpAuthOption[]; +} +/** + * @internal + */ +export interface HttpAuthSchemeParametersProvider { + (config: TConfig, context: TContext, input: TInput): Promise; +} diff --git a/node_modules/@smithy/types/dist-types/auth/HttpSigner.d.ts b/node_modules/@smithy/types/dist-types/auth/HttpSigner.d.ts new file mode 100644 index 00000000..ea2969cb --- /dev/null +++ b/node_modules/@smithy/types/dist-types/auth/HttpSigner.d.ts @@ -0,0 +1,41 @@ +import { HttpRequest, HttpResponse } from "../http"; +import { Identity } from "../identity/identity"; +/** + * @internal + */ +export interface ErrorHandler { + (signingProperties: Record): (error: E) => never; +} +/** + * @internal + */ +export interface SuccessHandler { + (httpResponse: HttpResponse | unknown, signingProperties: Record): void; +} +/** + * Interface to sign identity and signing properties. + * @internal + */ +export interface HttpSigner { + /** + * Signs an HttpRequest with an identity and signing properties. + * @param httpRequest request to sign + * @param identity identity to sing the request with + * @param signingProperties property bag for signing + * @returns signed request in a promise + */ + sign(httpRequest: HttpRequest, identity: Identity, signingProperties: Record): Promise; + /** + * Handler that executes after the {@link HttpSigner.sign} invocation and corresponding + * middleware throws an error. + * The error handler is expected to throw the error it receives, so the return type of the error handler is `never`. + * @internal + */ + errorHandler?: ErrorHandler; + /** + * Handler that executes after the {@link HttpSigner.sign} invocation and corresponding + * middleware succeeds. + * @internal + */ + successHandler?: SuccessHandler; +} diff --git a/node_modules/@smithy/types/dist-types/auth/IdentityProviderConfig.d.ts b/node_modules/@smithy/types/dist-types/auth/IdentityProviderConfig.d.ts new file mode 100644 index 00000000..663d2ec6 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/auth/IdentityProviderConfig.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +import { HttpAuthSchemeId } from "./HttpAuthScheme"; +/** + * Interface to get an IdentityProvider for a specified HttpAuthScheme + * @internal + */ +export interface IdentityProviderConfig { + /** + * Get the IdentityProvider for a specified HttpAuthScheme. + * @param schemeId schemeId of the HttpAuthScheme + * @returns IdentityProvider or undefined if HttpAuthScheme is not found + */ + getIdentityProvider(schemeId: HttpAuthSchemeId): IdentityProvider | undefined; +} diff --git a/node_modules/@smithy/types/dist-types/auth/auth.d.ts b/node_modules/@smithy/types/dist-types/auth/auth.d.ts new file mode 100644 index 00000000..2aaabbcb --- /dev/null +++ b/node_modules/@smithy/types/dist-types/auth/auth.d.ts @@ -0,0 +1,57 @@ +/** + * @internal + * + * Authentication schemes represent a way that the service will authenticate the customer’s identity. + */ +export interface AuthScheme { + /** + * @example "sigv4a" or "sigv4" + */ + name: "sigv4" | "sigv4a" | string; + /** + * @example "s3" + */ + signingName: string; + /** + * @example "us-east-1" + */ + signingRegion: string; + /** + * @example ["*"] + * @example ["us-west-2", "us-east-1"] + */ + signingRegionSet?: string[]; + /** + * @deprecated this field was renamed to signingRegion. + */ + signingScope?: never; + properties: Record; +} +/** + * @internal + * @deprecated + */ +export interface HttpAuthDefinition { + /** + * Defines the location of where the Auth is serialized. + */ + in: HttpAuthLocation; + /** + * Defines the name of the HTTP header or query string parameter + * that contains the Auth. + */ + name: string; + /** + * Defines the security scheme to use on the `Authorization` header value. + * This can only be set if the "in" property is set to {@link HttpAuthLocation.HEADER}. + */ + scheme?: string; +} +/** + * @internal + * @deprecated + */ +export declare enum HttpAuthLocation { + HEADER = "header", + QUERY = "query" +} diff --git a/node_modules/@smithy/types/dist-types/auth/index.d.ts b/node_modules/@smithy/types/dist-types/auth/index.d.ts new file mode 100644 index 00000000..7436030c --- /dev/null +++ b/node_modules/@smithy/types/dist-types/auth/index.d.ts @@ -0,0 +1,6 @@ +export * from "./auth"; +export * from "./HttpApiKeyAuth"; +export * from "./HttpAuthScheme"; +export * from "./HttpAuthSchemeProvider"; +export * from "./HttpSigner"; +export * from "./IdentityProviderConfig"; diff --git a/node_modules/@smithy/types/dist-types/blob/blob-payload-input-types.d.ts b/node_modules/@smithy/types/dist-types/blob/blob-payload-input-types.d.ts new file mode 100644 index 00000000..e468bae2 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/blob/blob-payload-input-types.d.ts @@ -0,0 +1,43 @@ +/// +/// +/// +import { Readable } from "stream"; +import type { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * A union of types that can be used as inputs for the service model + * "blob" type when it represents the request's entire payload or body. + * + * For example, in Lambda::invoke, the payload is modeled as a blob type + * and this union applies to it. + * In contrast, in Lambda::createFunction the Zip file option is a blob type, + * but is not the (entire) payload and this union does not apply. + * + * Note: not all types are signable by the standard SignatureV4 signer when + * used as the request body. For example, in Node.js a Readable stream + * is not signable by the default signer. + * They are included in the union because it may work in some cases, + * but the expected types are primarily string and Uint8Array. + * + * Additional details may be found in the internal + * function "getPayloadHash" in the SignatureV4 module. + */ +export type BlobPayloadInputTypes = string | ArrayBuffer | ArrayBufferView | Uint8Array | NodeJsRuntimeBlobTypes | BrowserRuntimeBlobTypes; +/** + * @public + * + * Additional blob types for the Node.js environment. + */ +export type NodeJsRuntimeBlobTypes = Readable | Buffer; +/** + * @public + * + * Additional blob types for the browser environment. + */ +export type BrowserRuntimeBlobTypes = BlobOptionalType | ReadableStreamOptionalType; +/** + * @internal + * @deprecated renamed to BlobPayloadInputTypes. + */ +export type BlobTypes = BlobPayloadInputTypes; diff --git a/node_modules/@smithy/types/dist-types/checksum.d.ts b/node_modules/@smithy/types/dist-types/checksum.d.ts new file mode 100644 index 00000000..19060090 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/checksum.d.ts @@ -0,0 +1,63 @@ +import { SourceData } from "./crypto"; +/** + * @public + * + * An object that provides a checksum of data provided in chunks to `update`. + * The checksum may be performed incrementally as chunks are received or all + * at once when the checksum is finalized, depending on the underlying + * implementation. + * + * It's recommended to compute checksum incrementally to avoid reading the + * entire payload in memory. + * + * A class that implements this interface may accept an optional secret key in its + * constructor while computing checksum value, when using HMAC. If provided, + * this secret key would be used when computing checksum. + */ +export interface Checksum { + /** + * Constant length of the digest created by the algorithm in bytes. + */ + digestLength?: number; + /** + * Creates a new checksum object that contains a deep copy of the internal + * state of the current `Checksum` object. + */ + copy?(): Checksum; + /** + * Returns the digest of all of the data passed. + */ + digest(): Promise; + /** + * Allows marking a checksum for checksums that support the ability + * to mark and reset. + * + * @param readLimit - The maximum limit of bytes that can be read + * before the mark position becomes invalid. + */ + mark?(readLimit: number): void; + /** + * Resets the checksum to its initial value. + */ + reset(): void; + /** + * Adds a chunk of data for which checksum needs to be computed. + * This can be called many times with new data as it is streamed. + * + * Implementations may override this method which passes second param + * which makes Checksum object stateless. + * + * @param chunk - The buffer to update checksum with. + */ + update(chunk: Uint8Array): void; +} +/** + * @public + * + * A constructor for a Checksum that may be used to calculate an HMAC. Implementing + * classes should not directly hold the provided key in memory beyond the + * lexical scope of the constructor. + */ +export interface ChecksumConstructor { + new (secret?: SourceData): Checksum; +} diff --git a/node_modules/@smithy/types/dist-types/client.d.ts b/node_modules/@smithy/types/dist-types/client.d.ts new file mode 100644 index 00000000..8bd8f7eb --- /dev/null +++ b/node_modules/@smithy/types/dist-types/client.d.ts @@ -0,0 +1,57 @@ +import { Command } from "./command"; +import { MiddlewareStack } from "./middleware"; +import { MetadataBearer } from "./response"; +import { OptionalParameter } from "./util"; +/** + * @public + * + * A type which checks if the client configuration is optional. + * If all entries of the client configuration are optional, it allows client creation without passing any config. + */ +export type CheckOptionalClientConfig = OptionalParameter; +/** + * @public + * + * function definition for different overrides of client's 'send' function. + */ +export interface InvokeFunction { + (command: Command, options?: any): Promise; + (command: Command, cb: (err: any, data?: OutputType) => void): void; + (command: Command, options: any, cb: (err: any, data?: OutputType) => void): void; + (command: Command, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * @public + * + * Signature that appears on aggregated clients' methods. + */ +export interface InvokeMethod { + (input: InputType, options?: any): Promise; + (input: InputType, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options: any, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * @public + * + * Signature that appears on aggregated clients' methods when argument is optional. + */ +export interface InvokeMethodOptionalArgs { + (): Promise; + (input: InputType, options?: any): Promise; + (input: InputType, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options: any, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * A general interface for service clients, idempotent to browser or node clients + * This type corresponds to SmithyClient(https://github.com/aws/aws-sdk-js-v3/blob/main/packages/smithy-client/src/client.ts). + * It's provided for using without importing the SmithyClient class. + * @internal + */ +export interface Client { + readonly config: ResolvedClientConfiguration; + middlewareStack: MiddlewareStack; + send: InvokeFunction; + destroy: () => void; +} diff --git a/node_modules/@smithy/types/dist-types/command.d.ts b/node_modules/@smithy/types/dist-types/command.d.ts new file mode 100644 index 00000000..3a71ee79 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/command.d.ts @@ -0,0 +1,23 @@ +import { Handler, MiddlewareStack } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + */ +export interface Command extends CommandIO { + readonly input: InputType; + readonly middlewareStack: MiddlewareStack; + resolveMiddleware(stack: MiddlewareStack, configuration: ResolvedConfiguration, options: any): Handler; +} +/** + * @internal + * + * This is a subset of the Command type used only to detect the i/o types. + */ +export interface CommandIO { + readonly input: InputType; + resolveMiddleware(stack: any, configuration: any, options: any): Handler; +} +/** + * @internal + */ +export type GetOutputType = Command extends CommandIO ? O : never; diff --git a/node_modules/@smithy/types/dist-types/connection/config.d.ts b/node_modules/@smithy/types/dist-types/connection/config.d.ts new file mode 100644 index 00000000..f9d46322 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/connection/config.d.ts @@ -0,0 +1,10 @@ +/** + * @public + */ +export interface ConnectConfiguration { + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + */ + requestTimeout?: number; +} diff --git a/node_modules/@smithy/types/dist-types/connection/index.d.ts b/node_modules/@smithy/types/dist-types/connection/index.d.ts new file mode 100644 index 00000000..c6c3ea80 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/connection/index.d.ts @@ -0,0 +1,3 @@ +export * from "./config"; +export * from "./manager"; +export * from "./pool"; diff --git a/node_modules/@smithy/types/dist-types/connection/manager.d.ts b/node_modules/@smithy/types/dist-types/connection/manager.d.ts new file mode 100644 index 00000000..5b1a8372 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/connection/manager.d.ts @@ -0,0 +1,34 @@ +import { RequestContext } from "../transfer"; +import { ConnectConfiguration } from "./config"; +/** + * @public + */ +export interface ConnectionManagerConfiguration { + /** + * Maximum number of allowed concurrent requests per connection. + */ + maxConcurrency?: number; + /** + * Disables concurrent requests per connection. + */ + disableConcurrency?: boolean; +} +/** + * @public + */ +export interface ConnectionManager { + /** + * Retrieves a connection from the connection pool if available, + * otherwise establish a new connection + */ + lease(requestContext: RequestContext, connectionConfiguration: ConnectConfiguration): T; + /** + * Releases the connection back to the pool making it potentially + * re-usable by other requests. + */ + release(requestContext: RequestContext, connection: T): void; + /** + * Destroys the connection manager. All connections will be closed. + */ + destroy(): void; +} diff --git a/node_modules/@smithy/types/dist-types/connection/pool.d.ts b/node_modules/@smithy/types/dist-types/connection/pool.d.ts new file mode 100644 index 00000000..d43530a0 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/connection/pool.d.ts @@ -0,0 +1,32 @@ +/** + * @public + */ +export interface ConnectionPool { + /** + * Retrieve the first connection in the pool + */ + poll(): T | void; + /** + * Release the connection back to the pool making it potentially + * re-usable by other requests. + */ + offerLast(connection: T): void; + /** + * Removes the connection from the pool, and destroys it. + */ + destroy(connection: T): void; + /** + * Implements the iterable protocol and allows arrays to be consumed + * by most syntaxes expecting iterables, such as the spread syntax + * and for...of loops + */ + [Symbol.iterator](): Iterator; +} +/** + * Unused. + * @internal + * @deprecated + */ +export interface CacheKey { + destination: string; +} diff --git a/node_modules/@smithy/types/dist-types/crypto.d.ts b/node_modules/@smithy/types/dist-types/crypto.d.ts new file mode 100644 index 00000000..874320e3 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/crypto.d.ts @@ -0,0 +1,60 @@ +/** + * @public + */ +export type SourceData = string | ArrayBuffer | ArrayBufferView; +/** + * @public + * + * An object that provides a hash of data provided in chunks to `update`. The + * hash may be performed incrementally as chunks are received or all at once + * when the hash is finalized, depending on the underlying implementation. + * + * @deprecated use {@link Checksum} + */ +export interface Hash { + /** + * Adds a chunk of data to the hash. If a buffer is provided, the `encoding` + * argument will be ignored. If a string is provided without a specified + * encoding, implementations must assume UTF-8 encoding. + * + * Not all encodings are supported on all platforms, though all must support + * UTF-8. + */ + update(toHash: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + /** + * Finalizes the hash and provides a promise that will be fulfilled with the + * raw bytes of the calculated hash. + */ + digest(): Promise; +} +/** + * @public + * + * A constructor for a hash that may be used to calculate an HMAC. Implementing + * classes should not directly hold the provided key in memory beyond the + * lexical scope of the constructor. + * + * @deprecated use {@link ChecksumConstructor} + */ +export interface HashConstructor { + new (secret?: SourceData): Hash; +} +/** + * @public + * + * A function that calculates the hash of a data stream. Determining the hash + * will consume the stream, so only replayable streams should be provided to an + * implementation of this interface. + */ +export interface StreamHasher { + (hashCtor: HashConstructor, stream: StreamType): Promise; +} +/** + * @public + * + * A function that returns a promise fulfilled with bytes from a + * cryptographically secure pseudorandom number generator. + */ +export interface randomValues { + (byteLength: number): Promise; +} diff --git a/node_modules/@smithy/types/dist-types/downlevel-ts3.4/transform/type-transform.d.ts b/node_modules/@smithy/types/dist-types/downlevel-ts3.4/transform/type-transform.d.ts new file mode 100644 index 00000000..312ae6e3 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/downlevel-ts3.4/transform/type-transform.d.ts @@ -0,0 +1,25 @@ +/** + * @public + * + * Transforms any members of the object T having type FromType + * to ToType. This applies only to exact type matches. + * + * This is for the case where FromType is a union and only those fields + * matching the same union should be transformed. + */ +export type Transform = RecursiveTransformExact; +/** + * @internal + * + * Returns ToType if T matches exactly with FromType. + */ +type TransformExact = [T] extends [FromType] ? ([FromType] extends [T] ? ToType : T) : T; +/** + * @internal + * + * Applies TransformExact to members of an object recursively. + */ +type RecursiveTransformExact = T extends Function ? T : T extends object ? { + [key in keyof T]: [T[key]] extends [FromType] ? [FromType] extends [T[key]] ? ToType : RecursiveTransformExact : RecursiveTransformExact; +} : TransformExact; +export {}; diff --git a/node_modules/@smithy/types/dist-types/encode.d.ts b/node_modules/@smithy/types/dist-types/encode.d.ts new file mode 100644 index 00000000..27d3a184 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/encode.d.ts @@ -0,0 +1,31 @@ +import { Message } from "./eventStream"; +/** + * @public + */ +export interface MessageEncoder { + encode(message: Message): Uint8Array; +} +/** + * @public + */ +export interface MessageDecoder { + decode(message: ArrayBufferView): Message; + feed(message: ArrayBufferView): void; + endOfStream(): void; + getMessage(): AvailableMessage; + getAvailableMessages(): AvailableMessages; +} +/** + * @public + */ +export interface AvailableMessage { + getMessage(): Message | undefined; + isEndOfStream(): boolean; +} +/** + * @public + */ +export interface AvailableMessages { + getMessages(): Message[]; + isEndOfStream(): boolean; +} diff --git a/node_modules/@smithy/types/dist-types/endpoint.d.ts b/node_modules/@smithy/types/dist-types/endpoint.d.ts new file mode 100644 index 00000000..4e937331 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/endpoint.d.ts @@ -0,0 +1,77 @@ +import { AuthScheme } from "./auth/auth"; +/** + * @public + */ +export interface EndpointPartition { + name: string; + dnsSuffix: string; + dualStackDnsSuffix: string; + supportsFIPS: boolean; + supportsDualStack: boolean; +} +/** + * @public + */ +export interface EndpointARN { + partition: string; + service: string; + region: string; + accountId: string; + resourceId: Array; +} +/** + * @public + */ +export declare enum EndpointURLScheme { + HTTP = "http", + HTTPS = "https" +} +/** + * @public + */ +export interface EndpointURL { + /** + * The URL scheme such as http or https. + */ + scheme: EndpointURLScheme; + /** + * The authority is the host and optional port component of the URL. + */ + authority: string; + /** + * The parsed path segment of the URL. + * This value is as-is as provided by the user. + */ + path: string; + /** + * The parsed path segment of the URL. + * This value is guranteed to start and end with a "/". + */ + normalizedPath: string; + /** + * A boolean indicating whether the authority is an IP address. + */ + isIp: boolean; +} +/** + * @public + */ +export type EndpointObjectProperty = string | boolean | { + [key: string]: EndpointObjectProperty; +} | EndpointObjectProperty[]; +/** + * @public + */ +export interface EndpointV2 { + url: URL; + properties?: { + authSchemes?: AuthScheme[]; + } & Record; + headers?: Record; +} +/** + * @public + */ +export type EndpointParameters = { + [name: string]: undefined | boolean | string | string[]; +}; diff --git a/node_modules/@smithy/types/dist-types/endpoints/EndpointRuleObject.d.ts b/node_modules/@smithy/types/dist-types/endpoints/EndpointRuleObject.d.ts new file mode 100644 index 00000000..349558e7 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/endpoints/EndpointRuleObject.d.ts @@ -0,0 +1,27 @@ +import { EndpointObjectProperty } from "../endpoint"; +import { ConditionObject, Expression } from "./shared"; +/** + * @public + */ +export type EndpointObjectProperties = Record; +/** + * @public + */ +export type EndpointObjectHeaders = Record; +/** + * @public + */ +export type EndpointObject = { + url: Expression; + properties?: EndpointObjectProperties; + headers?: EndpointObjectHeaders; +}; +/** + * @public + */ +export type EndpointRuleObject = { + type: "endpoint"; + conditions?: ConditionObject[]; + endpoint: EndpointObject; + documentation?: string; +}; diff --git a/node_modules/@smithy/types/dist-types/endpoints/ErrorRuleObject.d.ts b/node_modules/@smithy/types/dist-types/endpoints/ErrorRuleObject.d.ts new file mode 100644 index 00000000..9ce0733e --- /dev/null +++ b/node_modules/@smithy/types/dist-types/endpoints/ErrorRuleObject.d.ts @@ -0,0 +1,10 @@ +import { ConditionObject, Expression } from "./shared"; +/** + * @public + */ +export type ErrorRuleObject = { + type: "error"; + conditions?: ConditionObject[]; + error: Expression; + documentation?: string; +}; diff --git a/node_modules/@smithy/types/dist-types/endpoints/RuleSetObject.d.ts b/node_modules/@smithy/types/dist-types/endpoints/RuleSetObject.d.ts new file mode 100644 index 00000000..669b591d --- /dev/null +++ b/node_modules/@smithy/types/dist-types/endpoints/RuleSetObject.d.ts @@ -0,0 +1,28 @@ +import { RuleSetRules } from "./TreeRuleObject"; +/** + * @public + */ +export type DeprecatedObject = { + message?: string; + since?: string; +}; +/** + * @public + */ +export type ParameterObject = { + type: "String" | "string" | "Boolean" | "boolean"; + default?: string | boolean; + required?: boolean; + documentation?: string; + builtIn?: string; + deprecated?: DeprecatedObject; +}; +/** + * @public + */ +export type RuleSetObject = { + version: string; + serviceId?: string; + parameters: Record; + rules: RuleSetRules; +}; diff --git a/node_modules/@smithy/types/dist-types/endpoints/TreeRuleObject.d.ts b/node_modules/@smithy/types/dist-types/endpoints/TreeRuleObject.d.ts new file mode 100644 index 00000000..180d306d --- /dev/null +++ b/node_modules/@smithy/types/dist-types/endpoints/TreeRuleObject.d.ts @@ -0,0 +1,16 @@ +import { EndpointRuleObject } from "./EndpointRuleObject"; +import { ErrorRuleObject } from "./ErrorRuleObject"; +import { ConditionObject } from "./shared"; +/** + * @public + */ +export type RuleSetRules = Array; +/** + * @public + */ +export type TreeRuleObject = { + type: "tree"; + conditions?: ConditionObject[]; + rules: RuleSetRules; + documentation?: string; +}; diff --git a/node_modules/@smithy/types/dist-types/endpoints/index.d.ts b/node_modules/@smithy/types/dist-types/endpoints/index.d.ts new file mode 100644 index 00000000..64d85cf8 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/endpoints/index.d.ts @@ -0,0 +1,5 @@ +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./shared"; +export * from "./TreeRuleObject"; diff --git a/node_modules/@smithy/types/dist-types/endpoints/shared.d.ts b/node_modules/@smithy/types/dist-types/endpoints/shared.d.ts new file mode 100644 index 00000000..bd113934 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/endpoints/shared.d.ts @@ -0,0 +1,55 @@ +import { Logger } from "../logger"; +/** + * @public + */ +export type ReferenceObject = { + ref: string; +}; +/** + * @public + */ +export type FunctionObject = { + fn: string; + argv: FunctionArgv; +}; +/** + * @public + */ +export type FunctionArgv = Array; +/** + * @public + */ +export type FunctionReturn = string | boolean | number | { + [key: string]: FunctionReturn; +}; +/** + * @public + */ +export type ConditionObject = FunctionObject & { + assign?: string; +}; +/** + * @public + */ +export type Expression = string | ReferenceObject | FunctionObject; +/** + * @public + */ +export type EndpointParams = Record; +/** + * @public + */ +export type EndpointResolverOptions = { + endpointParams: EndpointParams; + logger?: Logger; +}; +/** + * @public + */ +export type ReferenceRecord = Record; +/** + * @public + */ +export type EvaluateOptions = EndpointResolverOptions & { + referenceRecord: ReferenceRecord; +}; diff --git a/node_modules/@smithy/types/dist-types/eventStream.d.ts b/node_modules/@smithy/types/dist-types/eventStream.d.ts new file mode 100644 index 00000000..7b9af6c6 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/eventStream.d.ts @@ -0,0 +1,137 @@ +import { HttpRequest } from "./http"; +import { FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput, HandlerExecutionContext } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * An event stream message. The headers and body properties will always be + * defined, with empty headers represented as an object with no keys and an + * empty body represented as a zero-length Uint8Array. + */ +export interface Message { + headers: MessageHeaders; + body: Uint8Array; +} +/** + * @public + */ +export type MessageHeaders = Record; +/** + * @public + */ +export type HeaderValue = { + type: K; + value: V; +}; +/** + * @public + */ +export type BooleanHeaderValue = HeaderValue<"boolean", boolean>; +/** + * @public + */ +export type ByteHeaderValue = HeaderValue<"byte", number>; +/** + * @public + */ +export type ShortHeaderValue = HeaderValue<"short", number>; +/** + * @public + */ +export type IntegerHeaderValue = HeaderValue<"integer", number>; +/** + * @public + */ +export type LongHeaderValue = HeaderValue<"long", Int64>; +/** + * @public + */ +export type BinaryHeaderValue = HeaderValue<"binary", Uint8Array>; +/** + * @public + */ +export type StringHeaderValue = HeaderValue<"string", string>; +/** + * @public + */ +export type TimestampHeaderValue = HeaderValue<"timestamp", Date>; +/** + * @public + */ +export type UuidHeaderValue = HeaderValue<"uuid", string>; +/** + * @public + */ +export type MessageHeaderValue = BooleanHeaderValue | ByteHeaderValue | ShortHeaderValue | IntegerHeaderValue | LongHeaderValue | BinaryHeaderValue | StringHeaderValue | TimestampHeaderValue | UuidHeaderValue; +/** + * @public + */ +export interface Int64 { + readonly bytes: Uint8Array; + valueOf: () => number; + toString: () => string; +} +/** + * @public + * + * Util functions for serializing or deserializing event stream + */ +export interface EventStreamSerdeContext { + eventStreamMarshaller: EventStreamMarshaller; +} +/** + * @public + * + * A function which deserializes binary event stream message into modeled shape. + */ +export interface EventStreamMarshallerDeserFn { + (body: StreamType, deserializer: (input: Record) => Promise): AsyncIterable; +} +/** + * @public + * + * A function that serializes modeled shape into binary stream message. + */ +export interface EventStreamMarshallerSerFn { + (input: AsyncIterable, serializer: (event: T) => Message): StreamType; +} +/** + * @public + * + * An interface which provides functions for serializing and deserializing binary event stream + * to/from corresponsing modeled shape. + */ +export interface EventStreamMarshaller { + deserialize: EventStreamMarshallerDeserFn; + serialize: EventStreamMarshallerSerFn; +} +/** + * @public + */ +export interface EventStreamRequestSigner { + sign(request: HttpRequest): Promise; +} +/** + * @public + */ +export interface EventStreamPayloadHandler { + handle: (next: FinalizeHandler, args: FinalizeHandlerArguments, context?: HandlerExecutionContext) => Promise>; +} +/** + * @public + */ +export interface EventStreamPayloadHandlerProvider { + (options: any): EventStreamPayloadHandler; +} +/** + * @public + */ +export interface EventStreamSerdeProvider { + (options: any): EventStreamMarshaller; +} +/** + * @public + */ +export interface EventStreamSignerProvider { + (options: any): EventStreamRequestSigner; +} diff --git a/node_modules/@smithy/types/dist-types/extensions/checksum.d.ts b/node_modules/@smithy/types/dist-types/extensions/checksum.d.ts new file mode 100644 index 00000000..88995b93 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/extensions/checksum.d.ts @@ -0,0 +1,58 @@ +import { ChecksumConstructor } from "../checksum"; +import { HashConstructor } from "../crypto"; +/** + * @internal + */ +export declare enum AlgorithmId { + MD5 = "md5", + CRC32 = "crc32", + CRC32C = "crc32c", + SHA1 = "sha1", + SHA256 = "sha256" +} +/** + * @internal + */ +export interface ChecksumAlgorithm { + algorithmId(): AlgorithmId; + checksumConstructor(): ChecksumConstructor | HashConstructor; +} +/** + * @deprecated unused. + * @internal + */ +type ChecksumConfigurationLegacy = { + [other in string | number]: any; +}; +/** + * @internal + */ +export interface ChecksumConfiguration extends ChecksumConfigurationLegacy { + addChecksumAlgorithm(algo: ChecksumAlgorithm): void; + checksumAlgorithms(): ChecksumAlgorithm[]; +} +/** + * @deprecated will be removed for implicit type. + * @internal + */ +type GetChecksumConfigurationType = (runtimeConfig: Partial<{ + sha256: ChecksumConstructor | HashConstructor; + md5: ChecksumConstructor | HashConstructor; +}>) => ChecksumConfiguration; +/** + * @internal + * @deprecated will be moved to smithy-client. + */ +export declare const getChecksumConfiguration: GetChecksumConfigurationType; +/** + * @internal + * @deprecated will be removed for implicit type. + */ +type ResolveChecksumRuntimeConfigType = (clientConfig: ChecksumConfiguration) => any; +/** + * @internal + * + * @deprecated will be moved to smithy-client. + */ +export declare const resolveChecksumRuntimeConfig: ResolveChecksumRuntimeConfigType; +export {}; diff --git a/node_modules/@smithy/types/dist-types/extensions/defaultClientConfiguration.d.ts b/node_modules/@smithy/types/dist-types/extensions/defaultClientConfiguration.d.ts new file mode 100644 index 00000000..12eb9248 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/extensions/defaultClientConfiguration.d.ts @@ -0,0 +1,33 @@ +import { ChecksumConfiguration } from "./checksum"; +/** + * @deprecated will be replaced by DefaultExtensionConfiguration. + * @internal + * + * Default client configuration consisting various configurations for modifying a service client + */ +export interface DefaultClientConfiguration extends ChecksumConfiguration { +} +/** + * @deprecated will be removed for implicit type. + */ +type GetDefaultConfigurationType = (runtimeConfig: any) => DefaultClientConfiguration; +/** + * @deprecated moving to @smithy/smithy-client. + * @internal + * + * Helper function to resolve default client configuration from runtime config + * + */ +export declare const getDefaultClientConfiguration: GetDefaultConfigurationType; +/** + * @deprecated will be removed for implicit type. + */ +type ResolveDefaultRuntimeConfigType = (clientConfig: DefaultClientConfiguration) => any; +/** + * @deprecated moving to @smithy/smithy-client. + * @internal + * + * Helper function to resolve runtime config from default client configuration + */ +export declare const resolveDefaultRuntimeConfig: ResolveDefaultRuntimeConfigType; +export {}; diff --git a/node_modules/@smithy/types/dist-types/extensions/defaultExtensionConfiguration.d.ts b/node_modules/@smithy/types/dist-types/extensions/defaultExtensionConfiguration.d.ts new file mode 100644 index 00000000..0e6fa0d3 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/extensions/defaultExtensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { ChecksumConfiguration } from "./checksum"; +import { RetryStrategyConfiguration } from "./retry"; +/** + * @internal + * + * Default extension configuration consisting various configurations for modifying a service client + */ +export interface DefaultExtensionConfiguration extends ChecksumConfiguration, RetryStrategyConfiguration { +} diff --git a/node_modules/@smithy/types/dist-types/extensions/index.d.ts b/node_modules/@smithy/types/dist-types/extensions/index.d.ts new file mode 100644 index 00000000..cce65a1c --- /dev/null +++ b/node_modules/@smithy/types/dist-types/extensions/index.d.ts @@ -0,0 +1,4 @@ +export * from "./defaultClientConfiguration"; +export * from "./defaultExtensionConfiguration"; +export { AlgorithmId, ChecksumAlgorithm, ChecksumConfiguration } from "./checksum"; +export { RetryStrategyConfiguration } from "./retry"; diff --git a/node_modules/@smithy/types/dist-types/extensions/retry.d.ts b/node_modules/@smithy/types/dist-types/extensions/retry.d.ts new file mode 100644 index 00000000..8b91f1c0 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/extensions/retry.d.ts @@ -0,0 +1,18 @@ +import { RetryStrategyV2 } from "../retry"; +import { Provider, RetryStrategy } from "../util"; +/** + * A configuration interface with methods called by runtime extension + * @internal + */ +export interface RetryStrategyConfiguration { + /** + * Set retry strategy used for all http requests + * @param retryStrategy + */ + setRetryStrategy(retryStrategy: Provider): void; + /** + * Get retry strategy used for all http requests + * @param retryStrategy + */ + retryStrategy(): Provider; +} diff --git a/node_modules/@smithy/types/dist-types/externals-check/browser-externals-check.d.ts b/node_modules/@smithy/types/dist-types/externals-check/browser-externals-check.d.ts new file mode 100644 index 00000000..0de7f8fa --- /dev/null +++ b/node_modules/@smithy/types/dist-types/externals-check/browser-externals-check.d.ts @@ -0,0 +1,35 @@ +import type { Exact } from "../transform/exact"; +/** + * @public + * + * A checked type that resolves to Blob if it is defined as more than a stub, otherwise + * resolves to 'never' so as not to widen the type of unions containing Blob + * excessively. + */ +export type BlobOptionalType = BlobDefined extends true ? Blob : Unavailable; +/** + * @public + * + * A checked type that resolves to ReadableStream if it is defined as more than a stub, otherwise + * resolves to 'never' so as not to widen the type of unions containing ReadableStream + * excessively. + */ +export type ReadableStreamOptionalType = ReadableStreamDefined extends true ? ReadableStream : Unavailable; +/** + * @public + * + * Indicates a type is unavailable if it resolves to this. + */ +export type Unavailable = never; +/** + * @internal + * + * Whether the global types define more than a stub for ReadableStream. + */ +export type ReadableStreamDefined = Exact extends true ? false : true; +/** + * @internal + * + * Whether the global types define more than a stub for Blob. + */ +export type BlobDefined = Exact extends true ? false : true; diff --git a/node_modules/@smithy/types/dist-types/feature-ids.d.ts b/node_modules/@smithy/types/dist-types/feature-ids.d.ts new file mode 100644 index 00000000..19e4bd2d --- /dev/null +++ b/node_modules/@smithy/types/dist-types/feature-ids.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export type SmithyFeatures = Partial<{ + RESOURCE_MODEL: "A"; + WAITER: "B"; + PAGINATOR: "C"; + RETRY_MODE_LEGACY: "D"; + RETRY_MODE_STANDARD: "E"; + RETRY_MODE_ADAPTIVE: "F"; + GZIP_REQUEST_COMPRESSION: "L"; + PROTOCOL_RPC_V2_CBOR: "M"; + ENDPOINT_OVERRIDE: "N"; + SIGV4A_SIGNING: "S"; + CREDENTIALS_CODE: "e"; +}>; diff --git a/node_modules/@smithy/types/dist-types/http.d.ts b/node_modules/@smithy/types/dist-types/http.d.ts new file mode 100644 index 00000000..76c6cb29 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/http.d.ts @@ -0,0 +1,112 @@ +import { AbortSignal as DeprecatedAbortSignal } from "./abort"; +import { URI } from "./uri"; +/** + * @public + * + * @deprecated use {@link EndpointV2} from `@smithy/types`. + */ +export interface Endpoint { + protocol: string; + hostname: string; + port?: number; + path: string; + query?: QueryParameterBag; +} +/** + * @public + * + * Interface an HTTP request class. Contains + * addressing information in addition to standard message properties. + */ +export interface HttpRequest extends HttpMessage, URI { + method: string; +} +/** + * @public + * + * Represents an HTTP message as received in reply to a request. Contains a + * numeric status code in addition to standard message properties. + */ +export interface HttpResponse extends HttpMessage { + statusCode: number; + reason?: string; +} +/** + * @public + * + * Represents an HTTP message with headers and an optional static or streaming + * body. body: ArrayBuffer | ArrayBufferView | string | Uint8Array | Readable | ReadableStream; + */ +export interface HttpMessage { + headers: HeaderBag; + body?: any; +} +/** + * @public + * + * A mapping of query parameter names to strings or arrays of strings, with the + * second being used when a parameter contains a list of values. Value can be set + * to null when query is not in key-value pairs shape + */ +export type QueryParameterBag = Record | null>; +/** + * @public + */ +export type FieldOptions = { + name: string; + kind?: FieldPosition; + values?: string[]; +}; +/** + * @public + */ +export declare enum FieldPosition { + HEADER = 0, + TRAILER = 1 +} +/** + * @public + * + * A mapping of header names to string values. Multiple values for the same + * header should be represented as a single string with values separated by + * `, `. + * + * Keys should be considered case insensitive, even if this is not enforced by a + * particular implementation. For example, given the following HeaderBag, where + * keys differ only in case: + * + * ```json + * { + * 'x-request-date': '2000-01-01T00:00:00Z', + * 'X-Request-Date': '2001-01-01T00:00:00Z' + * } + * ``` + * + * The SDK may at any point during processing remove one of the object + * properties in favor of the other. The headers may or may not be combined, and + * the SDK will not deterministically select which header candidate to use. + */ +export type HeaderBag = Record; +/** + * @public + * + * Represents an HTTP message with headers and an optional static or streaming + * body. bode: ArrayBuffer | ArrayBufferView | string | Uint8Array | Readable | ReadableStream; + */ +export interface HttpMessage { + headers: HeaderBag; + body?: any; +} +/** + * @public + * + * Represents the options that may be passed to an Http Handler. + */ +export interface HttpHandlerOptions { + abortSignal?: AbortSignal | DeprecatedAbortSignal; + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + */ + requestTimeout?: number; +} diff --git a/node_modules/@smithy/types/dist-types/http/httpHandlerInitialization.d.ts b/node_modules/@smithy/types/dist-types/http/httpHandlerInitialization.d.ts new file mode 100644 index 00000000..bca08518 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/http/httpHandlerInitialization.d.ts @@ -0,0 +1,123 @@ +/// +/// +import type { Agent as hAgent, AgentOptions as hAgentOptions } from "http"; +import type { Agent as hsAgent, AgentOptions as hsAgentOptions } from "https"; +import { HttpRequest as IHttpRequest } from "../http"; +import { Logger } from "../logger"; +/** + * + * This type represents an alternate client constructor option for the entry + * "requestHandler". Instead of providing an instance of a requestHandler, the user + * may provide the requestHandler's constructor options for either the + * NodeHttpHandler or FetchHttpHandler. + * + * For other RequestHandlers like HTTP2 or WebSocket, + * constructor parameter passthrough is not currently available. + * + * @public + */ +export type RequestHandlerParams = NodeHttpHandlerOptions | FetchHttpHandlerOptions; +/** + * Represents the http options that can be passed to a node http client. + * @public + */ +export interface NodeHttpHandlerOptions { + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + * + * Defaults to 0, which disables the timeout. + */ + connectionTimeout?: number; + /** + * The number of milliseconds a request can take before automatically being terminated. + * Defaults to 0, which disables the timeout. + */ + requestTimeout?: number; + /** + * Delay before the NodeHttpHandler checks for socket exhaustion, + * and emits a warning if the active sockets and enqueued request count is greater than + * 2x the maxSockets count. + * + * Defaults to connectionTimeout + requestTimeout or 3000ms if those are not set. + */ + socketAcquisitionWarningTimeout?: number; + /** + * This field is deprecated, and requestTimeout should be used instead. + * The maximum time in milliseconds that a socket may remain idle before it + * is closed. + * + * @deprecated Use {@link requestTimeout} + */ + socketTimeout?: number; + /** + * You can pass http.Agent or its constructor options. + */ + httpAgent?: hAgent | hAgentOptions; + /** + * You can pass https.Agent or its constructor options. + */ + httpsAgent?: hsAgent | hsAgentOptions; + /** + * Optional logger. + */ + logger?: Logger; +} +/** + * Represents the http options that can be passed to a browser http client. + * @public + */ +export interface FetchHttpHandlerOptions { + /** + * The number of milliseconds a request can take before being automatically + * terminated. + */ + requestTimeout?: number; + /** + * Whether to allow the request to outlive the page. Default value is false. + * + * There may be limitations to the payload size, number of concurrent requests, + * request duration etc. when using keepalive in browsers. + * + * These may change over time, so look for up to date information about + * these limitations before enabling keepalive. + */ + keepAlive?: boolean; + /** + * A string indicating whether credentials will be sent with the request always, never, or + * only when sent to a same-origin URL. + * @see https://developer.mozilla.org/en-US/docs/Web/API/Request/credentials + */ + credentials?: "include" | "omit" | "same-origin" | undefined | string; + /** + * Cache settings for fetch. + * @see https://developer.mozilla.org/en-US/docs/Web/API/Request/cache + */ + cache?: "default" | "force-cache" | "no-cache" | "no-store" | "only-if-cached" | "reload"; + /** + * An optional function that produces additional RequestInit + * parameters for each httpRequest. + * + * This is applied last via merging with Object.assign() and overwrites other values + * set from other sources. + * + * @example + * ```js + * new Client({ + * requestHandler: { + * requestInit(httpRequest) { + * return { cache: "no-store" }; + * } + * } + * }); + * ``` + */ + requestInit?: (httpRequest: IHttpRequest) => RequestInit; +} +declare global { + /** + * interface merging stub. + */ + interface RequestInit { + } +} diff --git a/node_modules/@smithy/types/dist-types/identity/apiKeyIdentity.d.ts b/node_modules/@smithy/types/dist-types/identity/apiKeyIdentity.d.ts new file mode 100644 index 00000000..27750d4e --- /dev/null +++ b/node_modules/@smithy/types/dist-types/identity/apiKeyIdentity.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +/** + * @public + */ +export interface ApiKeyIdentity extends Identity { + /** + * The literal API Key + */ + readonly apiKey: string; +} +/** + * @public + */ +export type ApiKeyIdentityProvider = IdentityProvider; diff --git a/node_modules/@smithy/types/dist-types/identity/awsCredentialIdentity.d.ts b/node_modules/@smithy/types/dist-types/identity/awsCredentialIdentity.d.ts new file mode 100644 index 00000000..7aa5a4b0 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/identity/awsCredentialIdentity.d.ts @@ -0,0 +1,31 @@ +import { Identity, IdentityProvider } from "./identity"; +/** + * @public + */ +export interface AwsCredentialIdentity extends Identity { + /** + * AWS access key ID + */ + readonly accessKeyId: string; + /** + * AWS secret access key + */ + readonly secretAccessKey: string; + /** + * A security or session token to use with these credentials. Usually + * present for temporary credentials. + */ + readonly sessionToken?: string; + /** + * AWS credential scope for this set of credentials. + */ + readonly credentialScope?: string; + /** + * AWS accountId. + */ + readonly accountId?: string; +} +/** + * @public + */ +export type AwsCredentialIdentityProvider = IdentityProvider; diff --git a/node_modules/@smithy/types/dist-types/identity/identity.d.ts b/node_modules/@smithy/types/dist-types/identity/identity.d.ts new file mode 100644 index 00000000..c6fd0d1d --- /dev/null +++ b/node_modules/@smithy/types/dist-types/identity/identity.d.ts @@ -0,0 +1,15 @@ +/** + * @public + */ +export interface Identity { + /** + * A `Date` when the identity or credential will no longer be accepted. + */ + readonly expiration?: Date; +} +/** + * @public + */ +export interface IdentityProvider { + (identityProperties?: Record): Promise; +} diff --git a/node_modules/@smithy/types/dist-types/identity/index.d.ts b/node_modules/@smithy/types/dist-types/identity/index.d.ts new file mode 100644 index 00000000..33603203 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/identity/index.d.ts @@ -0,0 +1,4 @@ +export * from "./apiKeyIdentity"; +export * from "./awsCredentialIdentity"; +export * from "./identity"; +export * from "./tokenIdentity"; diff --git a/node_modules/@smithy/types/dist-types/identity/tokenIdentity.d.ts b/node_modules/@smithy/types/dist-types/identity/tokenIdentity.d.ts new file mode 100644 index 00000000..84a74ffa --- /dev/null +++ b/node_modules/@smithy/types/dist-types/identity/tokenIdentity.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +/** + * @internal + */ +export interface TokenIdentity extends Identity { + /** + * The literal token string + */ + readonly token: string; +} +/** + * @internal + */ +export type TokenIdentityProvider = IdentityProvider; diff --git a/node_modules/@smithy/types/dist-types/index.d.ts b/node_modules/@smithy/types/dist-types/index.d.ts new file mode 100644 index 00000000..c370335c --- /dev/null +++ b/node_modules/@smithy/types/dist-types/index.d.ts @@ -0,0 +1,37 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-payload-input-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./crypto"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./endpoints"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./http"; +export * from "./http/httpHandlerInitialization"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./streaming-payload/streaming-blob-common-types"; +export * from "./streaming-payload/streaming-blob-payload-input-types"; +export * from "./streaming-payload/streaming-blob-payload-output-types"; +export * from "./transfer"; +export * from "./transform/client-payload-blob-type-narrow"; +export * from "./transform/no-undefined"; +export * from "./transform/type-transform"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/node_modules/@smithy/types/dist-types/logger.d.ts b/node_modules/@smithy/types/dist-types/logger.d.ts new file mode 100644 index 00000000..f66a664c --- /dev/null +++ b/node_modules/@smithy/types/dist-types/logger.d.ts @@ -0,0 +1,13 @@ +/** + * @public + * + * Represents a logger object that is available in HandlerExecutionContext + * throughout the middleware stack. + */ +export interface Logger { + trace?: (...content: any[]) => void; + debug: (...content: any[]) => void; + info: (...content: any[]) => void; + warn: (...content: any[]) => void; + error: (...content: any[]) => void; +} diff --git a/node_modules/@smithy/types/dist-types/middleware.d.ts b/node_modules/@smithy/types/dist-types/middleware.d.ts new file mode 100644 index 00000000..cc200987 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/middleware.d.ts @@ -0,0 +1,534 @@ +import type { AuthScheme, HttpAuthDefinition } from "./auth/auth"; +import type { SelectedHttpAuthScheme } from "./auth/HttpAuthScheme"; +import type { Command } from "./command"; +import type { EndpointV2 } from "./endpoint"; +import type { SmithyFeatures } from "./feature-ids"; +import type { Logger } from "./logger"; +import type { UserAgent } from "./util"; +/** + * @public + */ +export interface InitializeHandlerArguments { + /** + * User input to a command. Reflects the userland representation of the + * union of data types the command can effectively handle. + */ + input: Input; +} +/** + * @public + */ +export interface InitializeHandlerOutput extends DeserializeHandlerOutput { + output: Output; +} +/** + * @public + */ +export interface SerializeHandlerArguments extends InitializeHandlerArguments { + /** + * The user input serialized as a request object. The request object is unknown, + * so you cannot modify it directly. When work with request, you need to guard its + * type to e.g. HttpRequest with 'instanceof' operand + * + * During the build phase of the execution of a middleware stack, a built + * request may or may not be available. + */ + request?: unknown; +} +/** + * @public + */ +export interface SerializeHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface BuildHandlerArguments extends FinalizeHandlerArguments { +} +/** + * @public + */ +export interface BuildHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface FinalizeHandlerArguments extends SerializeHandlerArguments { + /** + * The user input serialized as a request. + */ + request: unknown; +} +/** + * @public + */ +export interface FinalizeHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface DeserializeHandlerArguments extends FinalizeHandlerArguments { +} +/** + * @public + */ +export interface DeserializeHandlerOutput { + /** + * The raw response object from runtime is deserialized to structured output object. + * The response object is unknown so you cannot modify it directly. When work with + * response, you need to guard its type to e.g. HttpResponse with 'instanceof' operand. + * + * During the deserialize phase of the execution of a middleware stack, a deserialized + * response may or may not be available + */ + response: unknown; + output?: Output; +} +/** + * @public + */ +export interface InitializeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: InitializeHandlerArguments): Promise>; +} +/** + * @public + */ +export type Handler = InitializeHandler; +/** + * @public + */ +export interface SerializeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: SerializeHandlerArguments): Promise>; +} +/** + * @public + */ +export interface FinalizeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: FinalizeHandlerArguments): Promise>; +} +/** + * @public + */ +export interface BuildHandler { + (args: BuildHandlerArguments): Promise>; +} +/** + * @public + */ +export interface DeserializeHandler { + (args: DeserializeHandlerArguments): Promise>; +} +/** + * @public + * + * A factory function that creates functions implementing the `Handler` + * interface. + */ +export interface InitializeMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: InitializeHandler, context: HandlerExecutionContext): InitializeHandler; +} +/** + * @public + * + * A factory function that creates functions implementing the `BuildHandler` + * interface. + */ +export interface SerializeMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: SerializeHandler, context: HandlerExecutionContext): SerializeHandler; +} +/** + * @public + * + * A factory function that creates functions implementing the `FinalizeHandler` + * interface. + */ +export interface FinalizeRequestMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: FinalizeHandler, context: HandlerExecutionContext): FinalizeHandler; +} +/** + * @public + */ +export interface BuildMiddleware { + (next: BuildHandler, context: HandlerExecutionContext): BuildHandler; +} +/** + * @public + */ +export interface DeserializeMiddleware { + (next: DeserializeHandler, context: HandlerExecutionContext): DeserializeHandler; +} +/** + * @public + */ +export type MiddlewareType = InitializeMiddleware | SerializeMiddleware | BuildMiddleware | FinalizeRequestMiddleware | DeserializeMiddleware; +/** + * @public + * + * A factory function that creates the terminal handler atop which a middleware + * stack sits. + */ +export interface Terminalware { + (context: HandlerExecutionContext): DeserializeHandler; +} +/** + * @public + */ +export type Step = "initialize" | "serialize" | "build" | "finalizeRequest" | "deserialize"; +/** + * @public + */ +export type Priority = "high" | "normal" | "low"; +/** + * @public + */ +export interface HandlerOptions { + /** + * Handlers are ordered using a "step" that describes the stage of command + * execution at which the handler will be executed. The available steps are: + * + * - initialize: The input is being prepared. Examples of typical + * initialization tasks include injecting default options computing + * derived parameters. + * - serialize: The input is complete and ready to be serialized. Examples + * of typical serialization tasks include input validation and building + * an HTTP request from user input. + * - build: The input has been serialized into an HTTP request, but that + * request may require further modification. Any request alterations + * will be applied to all retries. Examples of typical build tasks + * include injecting HTTP headers that describe a stable aspect of the + * request, such as `Content-Length` or a body checksum. + * - finalizeRequest: The request is being prepared to be sent over the wire. The + * request in this stage should already be semantically complete and + * should therefore only be altered as match the recipient's + * expectations. Examples of typical finalization tasks include request + * signing and injecting hop-by-hop headers. + * - deserialize: The response has arrived, the middleware here will deserialize + * the raw response object to structured response + * + * Unlike initialization and build handlers, which are executed once + * per operation execution, finalization and deserialize handlers will be + * executed foreach HTTP request sent. + * + * @defaultValue 'initialize' + */ + step?: Step; + /** + * A list of strings to any that identify the general purpose or important + * characteristics of a given handler. + */ + tags?: Array; + /** + * A unique name to refer to a middleware + */ + name?: string; + /** + * @internal + * Aliases allows for middleware to be found by multiple names besides {@link HandlerOptions.name}. + * This allows for references to replaced middleware to continue working, e.g. replacing + * multiple auth-specific middleware with a single generic auth middleware. + */ + aliases?: Array; + /** + * A flag to override the existing middleware with the same name. Without + * setting it, adding middleware with duplicated name will throw an exception. + * @internal + */ + override?: boolean; +} +/** + * @public + */ +export interface AbsoluteLocation { + /** + * By default middleware will be added to individual step in un-guaranteed order. + * In the case that + * + * @defaultValue 'normal' + */ + priority?: Priority; +} +/** + * @public + */ +export type Relation = "before" | "after"; +/** + * @public + */ +export interface RelativeLocation { + /** + * Specify the relation to be before or after a know middleware. + */ + relation: Relation; + /** + * A known middleware name to indicate inserting middleware's location. + */ + toMiddleware: string; +} +/** + * @public + */ +export type RelativeMiddlewareOptions = RelativeLocation & Omit; +/** + * @public + */ +export interface InitializeHandlerOptions extends HandlerOptions { + step?: "initialize"; +} +/** + * @public + */ +export interface SerializeHandlerOptions extends HandlerOptions { + step: "serialize"; +} +/** + * @public + */ +export interface BuildHandlerOptions extends HandlerOptions { + step: "build"; +} +/** + * @public + */ +export interface FinalizeRequestHandlerOptions extends HandlerOptions { + step: "finalizeRequest"; +} +/** + * @public + */ +export interface DeserializeHandlerOptions extends HandlerOptions { + step: "deserialize"; +} +/** + * @public + * + * A stack storing middleware. It can be resolved into a handler. It supports 2 + * approaches for adding middleware: + * 1. Adding middleware to specific step with `add()`. The order of middleware + * added into same step is determined by order of adding them. If one middleware + * needs to be executed at the front of the step or at the end of step, set + * `priority` options to `high` or `low`. + * 2. Adding middleware to location relative to known middleware with `addRelativeTo()`. + * This is useful when given middleware must be executed before or after specific + * middleware(`toMiddleware`). You can add a middleware relatively to another + * middleware which also added relatively. But eventually, this relative middleware + * chain **must** be 'anchored' by a middleware that added using `add()` API + * with absolute `step` and `priority`. This mothod will throw if specified + * `toMiddleware` is not found. + */ +export interface MiddlewareStack extends Pluggable { + /** + * Add middleware to the stack to be executed during the "initialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: InitializeMiddleware, options?: InitializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "serialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: SerializeMiddleware, options: SerializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "build" step, + * optionally specifying a priority, tags and name + */ + add(middleware: BuildMiddleware, options: BuildHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "finalizeRequest" step, + * optionally specifying a priority, tags and name + */ + add(middleware: FinalizeRequestMiddleware, options: FinalizeRequestHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "deserialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: DeserializeMiddleware, options: DeserializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to a stack position before or after a known middleware,optionally + * specifying name and tags. + */ + addRelativeTo(middleware: MiddlewareType, options: RelativeMiddlewareOptions): void; + /** + * Apply a customization function to mutate the middleware stack, often + * used for customizations that requires mutating multiple middleware. + */ + use(pluggable: Pluggable): void; + /** + * Create a shallow clone of this stack. Step bindings and handler priorities + * and tags are preserved in the copy. + */ + clone(): MiddlewareStack; + /** + * Removes middleware from the stack. + * + * If a string is provided, it will be treated as middleware name. If a middleware + * is inserted with the given name, it will be removed. + * + * If a middleware class is provided, all usages thereof will be removed. + */ + remove(toRemove: MiddlewareType | string): boolean; + /** + * Removes middleware that contains given tag + * + * Multiple middleware will potentially be removed + */ + removeByTag(toRemove: string): boolean; + /** + * Create a stack containing the middlewares in this stack as well as the + * middlewares in the `from` stack. Neither source is modified, and step + * bindings and handler priorities and tags are preserved in the copy. + */ + concat(from: MiddlewareStack): MiddlewareStack; + /** + * Returns a list of the current order of middleware in the stack. + * This does not execute the middleware functions, nor does it + * provide a reference to the stack itself. + */ + identify(): string[]; + /** + * @internal + * + * When an operation is called using this stack, + * it will log its list of middleware to the console using + * the identify function. + * + * @param toggle - set whether to log on resolve. + * If no argument given, returns the current value. + */ + identifyOnResolve(toggle?: boolean): boolean; + /** + * Builds a single handler function from zero or more middleware classes and + * a core handler. The core handler is meant to send command objects to AWS + * services and return promises that will resolve with the operation result + * or be rejected with an error. + * + * When a composed handler is invoked, the arguments will pass through all + * middleware in a defined order, and the return from the innermost handler + * will pass through all middleware in the reverse of that order. + */ + resolve(handler: DeserializeHandler, context: HandlerExecutionContext): InitializeHandler; +} +/** + * @internal + */ +export declare const SMITHY_CONTEXT_KEY = "__smithy_context"; +/** + * @public + * + * Data and helper objects that are not expected to change from one execution of + * a composed handler to another. + */ +export interface HandlerExecutionContext { + /** + * A logger that may be invoked by any handler during execution of an + * operation. + */ + logger?: Logger; + /** + * Name of the service the operation is being sent to. + */ + clientName?: string; + /** + * Name of the operation being executed. + */ + commandName?: string; + /** + * Additional user agent that inferred by middleware. It can be used to save + * the internal user agent sections without overriding the `customUserAgent` + * config in clients. + */ + userAgent?: UserAgent; + /** + * Resolved by the endpointMiddleware function of `@smithy/middleware-endpoint` + * in the serialization stage. + */ + endpointV2?: EndpointV2; + /** + * Set at the same time as endpointV2. + */ + authSchemes?: AuthScheme[]; + /** + * The current auth configuration that has been set by any auth middleware and + * that will prevent from being set more than once. + */ + currentAuthConfig?: HttpAuthDefinition; + /** + * @deprecated do not extend this field, it is a carryover from AWS SDKs. + * Used by DynamoDbDocumentClient. + */ + dynamoDbDocumentClientOptions?: Partial<{ + overrideInputFilterSensitiveLog(...args: any[]): string | void; + overrideOutputFilterSensitiveLog(...args: any[]): string | void; + }>; + /** + * @internal + * Context for Smithy properties. + */ + [SMITHY_CONTEXT_KEY]?: { + service?: string; + operation?: string; + commandInstance?: Command; + selectedHttpAuthScheme?: SelectedHttpAuthScheme; + features?: SmithyFeatures; + /** + * @deprecated + * Do not assign arbitrary members to the Smithy Context, + * fields should be explicitly declared here to avoid collisions. + */ + [key: string]: unknown; + }; + /** + * @deprecated + * Do not assign arbitrary members to the context, since + * they can interfere with existing functionality. + * + * Additional members should instead be declared on the SMITHY_CONTEXT_KEY + * or other reserved keys. + */ + [key: string]: any; +} +/** + * @public + */ +export interface Pluggable { + /** + * A function that mutate the passed in middleware stack. Functions implementing + * this interface can add, remove, modify existing middleware stack from clients + * or commands + */ + applyToStack: (stack: MiddlewareStack) => void; +} diff --git a/node_modules/@smithy/types/dist-types/pagination.d.ts b/node_modules/@smithy/types/dist-types/pagination.d.ts new file mode 100644 index 00000000..e10fddaa --- /dev/null +++ b/node_modules/@smithy/types/dist-types/pagination.d.ts @@ -0,0 +1,33 @@ +import type { Client } from "./client"; +import type { Command } from "./command"; +/** + * @public + * + * Expected type definition of a paginator. + */ +export type Paginator = AsyncGenerator; +/** + * @public + * + * Expected paginator configuration passed to an operation. Services will extend + * this interface definition and may type client further. + */ +export interface PaginationConfiguration { + client: Client; + pageSize?: number; + startingToken?: any; + /** + * For some APIs, such as CloudWatchLogs events, the next page token will always + * be present. + * + * When true, this config field will have the paginator stop when the token doesn't change + * instead of when it is not present. + */ + stopOnSameToken?: boolean; + /** + * @param command - reference to the instantiated command. This callback is executed + * prior to sending the command with the paginator's client. + * @returns the original command or a replacement, defaulting to the original command object. + */ + withCommand?: (command: Command) => typeof command | undefined; +} diff --git a/node_modules/@smithy/types/dist-types/profile.d.ts b/node_modules/@smithy/types/dist-types/profile.d.ts new file mode 100644 index 00000000..b7885d98 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/profile.d.ts @@ -0,0 +1,30 @@ +/** + * @public + */ +export declare enum IniSectionType { + PROFILE = "profile", + SSO_SESSION = "sso-session", + SERVICES = "services" +} +/** + * @public + */ +export type IniSection = Record; +/** + * @public + * + * @deprecated Please use {@link IniSection} + */ +export interface Profile extends IniSection { +} +/** + * @public + */ +export type ParsedIniData = Record; +/** + * @public + */ +export interface SharedConfigFiles { + credentialsFile: ParsedIniData; + configFile: ParsedIniData; +} diff --git a/node_modules/@smithy/types/dist-types/response.d.ts b/node_modules/@smithy/types/dist-types/response.d.ts new file mode 100644 index 00000000..afcfe8f8 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/response.d.ts @@ -0,0 +1,40 @@ +/** + * @public + */ +export interface ResponseMetadata { + /** + * The status code of the last HTTP response received for this operation. + */ + httpStatusCode?: number; + /** + * A unique identifier for the last request sent for this operation. Often + * requested by AWS service teams to aid in debugging. + */ + requestId?: string; + /** + * A secondary identifier for the last request sent. Used for debugging. + */ + extendedRequestId?: string; + /** + * A tertiary identifier for the last request sent. Used for debugging. + */ + cfId?: string; + /** + * The number of times this operation was attempted. + */ + attempts?: number; + /** + * The total amount of time (in milliseconds) that was spent waiting between + * retry attempts. + */ + totalRetryDelay?: number; +} +/** + * @public + */ +export interface MetadataBearer { + /** + * Metadata pertaining to this request. + */ + $metadata: ResponseMetadata; +} diff --git a/node_modules/@smithy/types/dist-types/retry.d.ts b/node_modules/@smithy/types/dist-types/retry.d.ts new file mode 100644 index 00000000..7bb58819 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/retry.d.ts @@ -0,0 +1,133 @@ +import { SdkError } from "./shapes"; +/** + * @public + */ +export type RetryErrorType = +/** + * This is a connection level error such as a socket timeout, socket connect + * error, tls negotiation timeout etc... + * Typically these should never be applied for non-idempotent request types + * since in this scenario, it's impossible to know whether the operation had + * a side effect on the server. + */ +"TRANSIENT" +/** + * This is an error where the server explicitly told the client to back off, + * such as a 429 or 503 Http error. + */ + | "THROTTLING" +/** + * This is a server error that isn't explicitly throttling but is considered + * by the client to be something that should be retried. + */ + | "SERVER_ERROR" +/** + * Doesn't count against any budgets. This could be something like a 401 + * challenge in Http. + */ + | "CLIENT_ERROR"; +/** + * @public + */ +export interface RetryErrorInfo { + /** + * The error thrown during the initial request, if available. + */ + error?: SdkError; + errorType: RetryErrorType; + /** + * Protocol hint. This could come from Http's 'retry-after' header or + * something from MQTT or any other protocol that has the ability to convey + * retry info from a peer. + * + * The Date after which a retry should be attempted. + */ + retryAfterHint?: Date; +} +/** + * @public + */ +export interface RetryBackoffStrategy { + /** + * @returns the number of milliseconds to wait before retrying an action. + */ + computeNextBackoffDelay(retryAttempt: number): number; +} +/** + * @public + */ +export interface StandardRetryBackoffStrategy extends RetryBackoffStrategy { + /** + * Sets the delayBase used to compute backoff delays. + * @param delayBase - + */ + setDelayBase(delayBase: number): void; +} +/** + * @public + */ +export interface RetryStrategyOptions { + backoffStrategy: RetryBackoffStrategy; + maxRetriesBase: number; +} +/** + * @public + */ +export interface RetryToken { + /** + * @returns the current count of retry. + */ + getRetryCount(): number; + /** + * @returns the number of milliseconds to wait before retrying an action. + */ + getRetryDelay(): number; +} +/** + * @public + */ +export interface StandardRetryToken extends RetryToken { + /** + * @returns the cost of the last retry attempt. + */ + getRetryCost(): number | undefined; +} +/** + * @public + */ +export interface RetryStrategyV2 { + /** + * Called before any retries (for the first call to the operation). It either + * returns a retry token or an error upon the failure to acquire a token prior. + * + * tokenScope is arbitrary and out of scope for this component. However, + * adding it here offers us a lot of future flexibility for outage detection. + * For example, it could be "us-east-1" on a shared retry strategy, or + * "us-west-2-c:dynamodb". + */ + acquireInitialRetryToken(retryTokenScope: string): Promise; + /** + * After a failed operation call, this function is invoked to refresh the + * retryToken returned by acquireInitialRetryToken(). This function can + * either choose to allow another retry and send a new or updated token, + * or reject the retry attempt and report the error either in an exception + * or returning an error. + */ + refreshRetryTokenForRetry(tokenToRenew: RetryToken, errorInfo: RetryErrorInfo): Promise; + /** + * Upon successful completion of the operation, this function is called + * to record that the operation was successful. + */ + recordSuccess(token: RetryToken): void; +} +/** + * @public + */ +export type ExponentialBackoffJitterType = "DEFAULT" | "NONE" | "FULL" | "DECORRELATED"; +/** + * @public + */ +export interface ExponentialBackoffStrategyOptions { + jitterType: ExponentialBackoffJitterType; + backoffScaleValue?: number; +} diff --git a/node_modules/@smithy/types/dist-types/serde.d.ts b/node_modules/@smithy/types/dist-types/serde.d.ts new file mode 100644 index 00000000..a81314f9 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/serde.d.ts @@ -0,0 +1,112 @@ +import { Endpoint } from "./http"; +import { RequestHandler } from "./transfer"; +import { Decoder, Encoder, Provider } from "./util"; +/** + * @public + * + * Interface for object requires an Endpoint set. + */ +export interface EndpointBearer { + endpoint: Provider; +} +/** + * @public + */ +export interface StreamCollector { + /** + * A function that converts a stream into an array of bytes. + * + * @param stream - The low-level native stream from browser or Nodejs runtime + */ + (stream: any): Promise; +} +/** + * @public + * + * Request and Response serde util functions and settings for AWS services + */ +export interface SerdeContext extends SerdeFunctions, EndpointBearer { + requestHandler: RequestHandler; + disableHostPrefix: boolean; +} +/** + * @public + * + * Serde functions from the client config. + */ +export interface SerdeFunctions { + base64Encoder: Encoder; + base64Decoder: Decoder; + utf8Encoder: Encoder; + utf8Decoder: Decoder; + streamCollector: StreamCollector; +} +/** + * @public + */ +export interface RequestSerializer { + /** + * Converts the provided `input` into a request object + * + * @param input - The user input to serialize. + * + * @param context - Context containing runtime-specific util functions. + */ + (input: any, context: Context): Promise; +} +/** + * @public + */ +export interface ResponseDeserializer { + /** + * Converts the output of an operation into JavaScript types. + * + * @param output - The HTTP response received from the service + * + * @param context - context containing runtime-specific util functions. + */ + (output: ResponseType, context: Context): Promise; +} +/** + * The interface contains mix-in utility functions to transfer the runtime-specific + * stream implementation to specified format. Each stream can ONLY be transformed + * once. + * @public + */ +export interface SdkStreamMixin { + transformToByteArray: () => Promise; + transformToString: (encoding?: string) => Promise; + transformToWebStream: () => ReadableStream; +} +/** + * @public + * + * The type describing a runtime-specific stream implementation with mix-in + * utility functions. + */ +export type SdkStream = BaseStream & SdkStreamMixin; +/** + * @public + * + * Indicates that the member of type T with + * key StreamKey have been extended + * with the SdkStreamMixin helper methods. + */ +export type WithSdkStreamMixin = { + [key in keyof T]: key extends StreamKey ? SdkStream : T[key]; +}; +/** + * Interface for internal function to inject stream utility functions + * implementation + * + * @internal + */ +export interface SdkStreamMixinInjector { + (stream: unknown): SdkStreamMixin; +} +/** + * @internal + */ +export interface SdkStreamSerdeContext { + sdkStreamMixin: SdkStreamMixinInjector; +} diff --git a/node_modules/@smithy/types/dist-types/shapes.d.ts b/node_modules/@smithy/types/dist-types/shapes.d.ts new file mode 100644 index 00000000..a4812fb8 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/shapes.d.ts @@ -0,0 +1,82 @@ +import { HttpResponse } from "./http"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * A document type represents an untyped JSON-like value. + * + * Not all protocols support document types, and the serialization format of a + * document type is protocol specific. All JSON protocols SHOULD support + * document types and they SHOULD serialize document types inline as normal + * JSON values. + */ +export type DocumentType = null | boolean | number | string | DocumentType[] | { + [prop: string]: DocumentType; +}; +/** + * @public + * + * A structure shape with the error trait. + * https://smithy.io/2.0/spec/behavior-traits.html#smithy-api-retryable-trait + */ +export interface RetryableTrait { + /** + * Indicates that the error is a retryable throttling error. + */ + readonly throttling?: boolean; +} +/** + * @public + * + * Type that is implemented by all Smithy shapes marked with the + * error trait. + * @deprecated + */ +export interface SmithyException { + /** + * The shape ID name of the exception. + */ + readonly name: string; + /** + * Whether the client or server are at fault. + */ + readonly $fault: "client" | "server"; + /** + * The service that encountered the exception. + */ + readonly $service?: string; + /** + * Indicates that an error MAY be retried by the client. + */ + readonly $retryable?: RetryableTrait; + /** + * Reference to low-level HTTP response object. + */ + readonly $response?: HttpResponse; +} +/** + * @public + * + * @deprecated See {@link https://aws.amazon.com/blogs/developer/service-error-handling-modular-aws-sdk-js/} + * + * This type should not be used in your application. + * Users of the AWS SDK for JavaScript v3 service clients should prefer to + * use the specific Exception classes corresponding to each operation. + * These can be found as code in the deserializer for the operation's Command class, + * or as declarations in the service model file in codegen/sdk-codegen/aws-models. + * + * If no exceptions are enumerated by a particular Command operation, + * the base exception for the service should be used. Each client exports + * a base ServiceException prefixed with the service name. + */ +export type SdkError = Error & Partial & Partial & { + $metadata?: Partial["$metadata"] & { + /** + * If present, will have value of true and indicates that the error resulted in a + * correction of the clock skew, a.k.a. config.systemClockOffset. + * This is specific to AWS SDK and sigv4. + */ + readonly clockSkewCorrected?: true; + }; + cause?: Error; +}; diff --git a/node_modules/@smithy/types/dist-types/signature.d.ts b/node_modules/@smithy/types/dist-types/signature.d.ts new file mode 100644 index 00000000..db0039da --- /dev/null +++ b/node_modules/@smithy/types/dist-types/signature.d.ts @@ -0,0 +1,155 @@ +import { Message } from "./eventStream"; +import { HttpRequest } from "./http"; +/** + * @public + * + * A `Date` object, a unix (epoch) timestamp in seconds, or a string that can be + * understood by the JavaScript `Date` constructor. + */ +export type DateInput = number | string | Date; +/** + * @public + */ +export interface SigningArguments { + /** + * The date and time to be used as signature metadata. This value should be + * a Date object, a unix (epoch) timestamp, or a string that can be + * understood by the JavaScript `Date` constructor.If not supplied, the + * value returned by `new Date()` will be used. + */ + signingDate?: DateInput; + /** + * The service signing name. It will override the service name of the signer + * in current invocation + */ + signingService?: string; + /** + * The region name to sign the request. It will override the signing region of the + * signer in current invocation + */ + signingRegion?: string; +} +/** + * @public + */ +export interface RequestSigningArguments extends SigningArguments { + /** + * A set of strings whose members represents headers that cannot be signed. + * All headers in the provided request will have their names converted to + * lower case and then checked for existence in the unsignableHeaders set. + */ + unsignableHeaders?: Set; + /** + * A set of strings whose members represents headers that should be signed. + * Any values passed here will override those provided via unsignableHeaders, + * allowing them to be signed. + * + * All headers in the provided request will have their names converted to + * lower case before signing. + */ + signableHeaders?: Set; +} +/** + * @public + */ +export interface RequestPresigningArguments extends RequestSigningArguments { + /** + * The number of seconds before the presigned URL expires + */ + expiresIn?: number; + /** + * A set of strings whose representing headers that should not be hoisted + * to presigned request's query string. If not supplied, the presigner + * moves all the AWS-specific headers (starting with `x-amz-`) to the request + * query string. If supplied, these headers remain in the presigned request's + * header. + * All headers in the provided request will have their names converted to + * lower case and then checked for existence in the unhoistableHeaders set. + */ + unhoistableHeaders?: Set; + /** + * This overrides any headers with the same name(s) set by unhoistableHeaders. + * These headers will be hoisted into the query string and signed. + */ + hoistableHeaders?: Set; +} +/** + * @public + */ +export interface EventSigningArguments extends SigningArguments { + priorSignature: string; +} +/** + * @public + */ +export interface RequestPresigner { + /** + * Signs a request for future use. + * + * The request will be valid until either the provided `expiration` time has + * passed or the underlying credentials have expired. + * + * @param requestToSign - The request that should be signed. + * @param options - Additional signing options. + */ + presign(requestToSign: HttpRequest, options?: RequestPresigningArguments): Promise; +} +/** + * @public + * + * An object that signs request objects with AWS credentials using one of the + * AWS authentication protocols. + */ +export interface RequestSigner { + /** + * Sign the provided request for immediate dispatch. + */ + sign(requestToSign: HttpRequest, options?: RequestSigningArguments): Promise; +} +/** + * @public + */ +export interface StringSigner { + /** + * Sign the provided `stringToSign` for use outside of the context of + * request signing. Typical uses include signed policy generation. + */ + sign(stringToSign: string, options?: SigningArguments): Promise; +} +/** + * @public + */ +export interface FormattedEvent { + headers: Uint8Array; + payload: Uint8Array; +} +/** + * @public + */ +export interface EventSigner { + /** + * Sign the individual event of the event stream. + */ + sign(event: FormattedEvent, options: EventSigningArguments): Promise; +} +/** + * @public + */ +export interface SignableMessage { + message: Message; + priorSignature: string; +} +/** + * @public + */ +export interface SignedMessage { + message: Message; + signature: string; +} +/** + * @public + */ +export interface MessageSigner { + signMessage(message: SignableMessage, args: SigningArguments): Promise; + sign(event: SignableMessage, options: SigningArguments): Promise; +} diff --git a/node_modules/@smithy/types/dist-types/stream.d.ts b/node_modules/@smithy/types/dist-types/stream.d.ts new file mode 100644 index 00000000..f305dd9d --- /dev/null +++ b/node_modules/@smithy/types/dist-types/stream.d.ts @@ -0,0 +1,22 @@ +import { ChecksumConstructor } from "./checksum"; +import { HashConstructor, StreamHasher } from "./crypto"; +import { BodyLengthCalculator, Encoder } from "./util"; +/** + * @public + */ +export interface GetAwsChunkedEncodingStreamOptions { + base64Encoder?: Encoder; + bodyLengthChecker: BodyLengthCalculator; + checksumAlgorithmFn?: ChecksumConstructor | HashConstructor; + checksumLocationName?: string; + streamHasher?: StreamHasher; +} +/** + * @public + * + * A function that returns Readable Stream which follows aws-chunked encoding stream. + * It optionally adds checksum if options are provided. + */ +export interface GetAwsChunkedEncodingStream { + (readableStream: StreamType, options: GetAwsChunkedEncodingStreamOptions): StreamType; +} diff --git a/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-common-types.d.ts b/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-common-types.d.ts new file mode 100644 index 00000000..92c52dad --- /dev/null +++ b/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-common-types.d.ts @@ -0,0 +1,33 @@ +/// +import type { Readable } from "stream"; +import type { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * This is the union representing the modeled blob type with streaming trait + * in a generic format that does not relate to HTTP input or output payloads. + * + * Note: the non-streaming blob type is represented by Uint8Array, but because + * the streaming blob type is always in the request/response paylod, it has + * historically been handled with different types. + * + * @see https://smithy.io/2.0/spec/simple-types.html#blob + * + * For compatibility with its historical representation, it must contain at least + * Readble (Node.js), Blob (browser), and ReadableStream (browser). + * + * @see StreamingPayloadInputTypes for FAQ about mixing types from multiple environments. + */ +export type StreamingBlobTypes = NodeJsRuntimeStreamingBlobTypes | BrowserRuntimeStreamingBlobTypes; +/** + * @public + * + * Node.js streaming blob type. + */ +export type NodeJsRuntimeStreamingBlobTypes = Readable; +/** + * @public + * + * Browser streaming blob types. + */ +export type BrowserRuntimeStreamingBlobTypes = ReadableStreamOptionalType | BlobOptionalType; diff --git a/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-input-types.d.ts b/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-input-types.d.ts new file mode 100644 index 00000000..9bcc1641 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-input-types.d.ts @@ -0,0 +1,63 @@ +/// +/// +/// +import type { Readable } from "stream"; +import type { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * This union represents a superset of the compatible types you + * can use for streaming payload inputs. + * + * FAQ: + * Why does the type union mix mutually exclusive runtime types, namely + * Node.js and browser types? + * + * There are several reasons: + * 1. For backwards compatibility. + * 2. As a convenient compromise solution so that users in either environment may use the types + * without customization. + * 3. The SDK does not have static type information about the exact implementation + * of the HTTP RequestHandler being used in your client(s) (e.g. fetch, XHR, node:http, or node:http2), + * given that it is chosen at runtime. There are multiple possible request handlers + * in both the Node.js and browser runtime environments. + * + * Rather than restricting the type to a known common format (Uint8Array, for example) + * which doesn't include a universal streaming format in the currently supported Node.js versions, + * the type declaration is widened to multiple possible formats. + * It is up to the user to ultimately select a compatible format with the + * runtime and HTTP handler implementation they are using. + * + * Usage: + * The typical solution we expect users to have is to manually narrow the + * type when needed, picking the appropriate one out of the union according to the + * runtime environment and specific request handler. + * There is also the type utility "NodeJsClient", "BrowserClient" and more + * exported from this package. These can be applied at the client level + * to pre-narrow these streaming payload blobs. For usage see the readme.md + * in the root of the \@smithy/types NPM package. + */ +export type StreamingBlobPayloadInputTypes = NodeJsRuntimeStreamingBlobPayloadInputTypes | BrowserRuntimeStreamingBlobPayloadInputTypes; +/** + * @public + * + * Streaming payload input types in the Node.js environment. + * These are derived from the types compatible with the request body used by node:http. + * + * Note: not all types are signable by the standard SignatureV4 signer when + * used as the request body. For example, in Node.js a Readable stream + * is not signable by the default signer. + * They are included in the union because it may be intended in some cases, + * but the expected types are primarily string, Uint8Array, and Buffer. + * + * Additional details may be found in the internal + * function "getPayloadHash" in the SignatureV4 module. + */ +export type NodeJsRuntimeStreamingBlobPayloadInputTypes = string | Uint8Array | Buffer | Readable; +/** + * @public + * + * Streaming payload input types in the browser environment. + * These are derived from the types compatible with fetch's Request.body. + */ +export type BrowserRuntimeStreamingBlobPayloadInputTypes = string | Uint8Array | ReadableStreamOptionalType | BlobOptionalType; diff --git a/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-output-types.d.ts b/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-output-types.d.ts new file mode 100644 index 00000000..b64a8786 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-output-types.d.ts @@ -0,0 +1,53 @@ +/// +/// +import type { IncomingMessage } from "http"; +import type { Readable } from "stream"; +import type { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +import type { SdkStream } from "../serde"; +/** + * @public + * + * This union represents a superset of the types you may receive + * in streaming payload outputs. + * + * @see StreamingPayloadInputTypes for FAQ about mixing types from multiple environments. + * + * To highlight the upstream docs about the SdkStream mixin: + * + * The interface contains mix-in (via Object.assign) methods to transform the runtime-specific + * stream implementation to specified format. Each stream can ONLY be transformed + * once. + * + * The available methods are described on the SdkStream type via SdkStreamMixin. + */ +export type StreamingBlobPayloadOutputTypes = NodeJsRuntimeStreamingBlobPayloadOutputTypes | BrowserRuntimeStreamingBlobPayloadOutputTypes; +/** + * @public + * + * Streaming payload output types in the Node.js environment. + * + * This is by default the IncomingMessage type from node:http responses when + * using the default node-http-handler in Node.js environments. + * + * It can be other Readable types like node:http2's ClientHttp2Stream + * such as when using the node-http2-handler. + * + * The SdkStreamMixin adds methods on this type to help transform (collect) it to + * other formats. + */ +export type NodeJsRuntimeStreamingBlobPayloadOutputTypes = SdkStream; +/** + * @public + * + * Streaming payload output types in the browser environment. + * + * This is by default fetch's Response.body type (ReadableStream) when using + * the default fetch-http-handler in browser-like environments. + * + * It may be a Blob, such as when using the XMLHttpRequest handler + * and receiving an arraybuffer response body. + * + * The SdkStreamMixin adds methods on this type to help transform (collect) it to + * other formats. + */ +export type BrowserRuntimeStreamingBlobPayloadOutputTypes = SdkStream; diff --git a/node_modules/@smithy/types/dist-types/transfer.d.ts b/node_modules/@smithy/types/dist-types/transfer.d.ts new file mode 100644 index 00000000..462ee23c --- /dev/null +++ b/node_modules/@smithy/types/dist-types/transfer.d.ts @@ -0,0 +1,41 @@ +/** + * @public + */ +export type RequestHandlerOutput = { + response: ResponseType; +}; +/** + * @public + */ +export interface RequestHandler { + /** + * metadata contains information of a handler. For example + * 'h2' refers this handler is for handling HTTP/2 requests, + * whereas 'h1' refers handling HTTP1 requests + */ + metadata?: RequestHandlerMetadata; + destroy?: () => void; + handle: (request: RequestType, handlerOptions?: HandlerOptions) => Promise>; +} +/** + * @public + */ +export interface RequestHandlerMetadata { + handlerProtocol: RequestHandlerProtocol | string; +} +/** + * @public + * Values from ALPN Protocol IDs. + * @see https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids + */ +export declare enum RequestHandlerProtocol { + HTTP_0_9 = "http/0.9", + HTTP_1_0 = "http/1.0", + TDS_8_0 = "tds/8.0" +} +/** + * @public + */ +export interface RequestContext { + destination: URL; +} diff --git a/node_modules/@smithy/types/dist-types/transform/client-method-transforms.d.ts b/node_modules/@smithy/types/dist-types/transform/client-method-transforms.d.ts new file mode 100644 index 00000000..f9424c48 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/transform/client-method-transforms.d.ts @@ -0,0 +1,26 @@ +import type { CommandIO } from "../command"; +import type { MetadataBearer } from "../response"; +import type { StreamingBlobPayloadOutputTypes } from "../streaming-payload/streaming-blob-payload-output-types"; +import type { Transform } from "./type-transform"; +/** + * @internal + * + * Narrowed version of InvokeFunction used in Client::send. + */ +export interface NarrowedInvokeFunction { + (command: CommandIO, options?: HttpHandlerOptions): Promise>; + (command: CommandIO, cb: (err: unknown, data?: Transform) => void): void; + (command: CommandIO, options: HttpHandlerOptions, cb: (err: unknown, data?: Transform) => void): void; + (command: CommandIO, options?: HttpHandlerOptions, cb?: (err: unknown, data?: Transform) => void): Promise> | void; +} +/** + * @internal + * + * Narrowed version of InvokeMethod used in aggregated Client methods. + */ +export interface NarrowedInvokeMethod { + (input: InputType, options?: HttpHandlerOptions): Promise>; + (input: InputType, cb: (err: unknown, data?: Transform) => void): void; + (input: InputType, options: HttpHandlerOptions, cb: (err: unknown, data?: Transform) => void): void; + (input: InputType, options?: HttpHandlerOptions, cb?: (err: unknown, data?: OutputType) => void): Promise> | void; +} diff --git a/node_modules/@smithy/types/dist-types/transform/client-payload-blob-type-narrow.d.ts b/node_modules/@smithy/types/dist-types/transform/client-payload-blob-type-narrow.d.ts new file mode 100644 index 00000000..243a40f4 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/transform/client-payload-blob-type-narrow.d.ts @@ -0,0 +1,79 @@ +/// +/// +import type { IncomingMessage } from "http"; +import type { ClientHttp2Stream } from "http2"; +import type { InvokeMethod } from "../client"; +import type { GetOutputType } from "../command"; +import type { HttpHandlerOptions } from "../http"; +import type { SdkStream } from "../serde"; +import type { BrowserRuntimeStreamingBlobPayloadInputTypes, NodeJsRuntimeStreamingBlobPayloadInputTypes, StreamingBlobPayloadInputTypes } from "../streaming-payload/streaming-blob-payload-input-types"; +import type { StreamingBlobPayloadOutputTypes } from "../streaming-payload/streaming-blob-payload-output-types"; +import type { NarrowedInvokeMethod } from "./client-method-transforms"; +import type { Transform } from "./type-transform"; +/** + * @public + * + * Creates a type with a given client type that narrows payload blob output + * types to SdkStream. + * + * This can be used for clients with the NodeHttpHandler requestHandler, + * the default in Node.js when not using HTTP2. + * + * Usage example: + * ```typescript + * const client = new YourClient({}) as NodeJsClient; + * ``` + */ +export type NodeJsClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * Variant of NodeJsClient for node:http2. + */ +export type NodeJsHttp2Client = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * Creates a type with a given client type that narrows payload blob output + * types to SdkStream. + * + * This can be used for clients with the FetchHttpHandler requestHandler, + * which is the default in browser environments. + * + * Usage example: + * ```typescript + * const client = new YourClient({}) as BrowserClient; + * ``` + */ +export type BrowserClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * Variant of BrowserClient for XMLHttpRequest. + */ +export type BrowserXhrClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * @deprecated use NarrowPayloadBlobTypes. + * + * Narrow a given Client's blob payload outputs to the given type T. + */ +export type NarrowPayloadBlobOutputType = { + [key in keyof ClientType]: [ClientType[key]] extends [ + InvokeMethod + ] ? NarrowedInvokeMethod : ClientType[key]; +} & { + send(command: Command, options?: any): Promise, StreamingBlobPayloadOutputTypes | undefined, T>>; +}; +/** + * @public + * + * Narrow a Client's blob payload input and output types to I and O. + */ +export type NarrowPayloadBlobTypes = { + [key in keyof ClientType]: [ClientType[key]] extends [ + InvokeMethod + ] ? NarrowedInvokeMethod, FunctionOutputTypes> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise, StreamingBlobPayloadOutputTypes | undefined, O>>; +}; diff --git a/node_modules/@smithy/types/dist-types/transform/exact.d.ts b/node_modules/@smithy/types/dist-types/transform/exact.d.ts new file mode 100644 index 00000000..c8a15d8d --- /dev/null +++ b/node_modules/@smithy/types/dist-types/transform/exact.d.ts @@ -0,0 +1,6 @@ +/** + * @internal + * + * Checks that A and B extend each other. + */ +export type Exact = [A] extends [B] ? ([B] extends [A] ? true : false) : false; diff --git a/node_modules/@smithy/types/dist-types/transform/no-undefined.d.ts b/node_modules/@smithy/types/dist-types/transform/no-undefined.d.ts new file mode 100644 index 00000000..a0ec72e8 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/transform/no-undefined.d.ts @@ -0,0 +1,68 @@ +import type { InvokeMethod, InvokeMethodOptionalArgs } from "../client"; +import type { GetOutputType } from "../command"; +import type { DocumentType } from "../shapes"; +/** + * @public + * + * This type is intended as a type helper for generated clients. + * When initializing client, cast it to this type by passing + * the client constructor type as the type parameter. + * + * It will then recursively remove "undefined" as a union type from all + * input and output shapes' members. Note, this does not affect + * any member that is optional (?) such as outputs with no required members. + * + * @example + * ```ts + * const client = new Client({}) as AssertiveClient; + * ``` + */ +export type AssertiveClient = NarrowClientIOTypes; +/** + * @public + * + * This is similar to AssertiveClient but additionally changes all + * output types to (recursive) Required so as to bypass all output nullability guards. + */ +export type UncheckedClient = UncheckedClientOutputTypes; +/** + * @internal + * + * Excludes undefined recursively. + */ +export type NoUndefined = T extends Function ? T : T extends DocumentType ? T : [T] extends [object] ? { + [key in keyof T]: NoUndefined; +} : Exclude; +/** + * @internal + * + * Excludes undefined and optional recursively. + */ +export type RecursiveRequired = T extends Function ? T : T extends DocumentType ? T : [T] extends [object] ? { + [key in keyof T]-?: RecursiveRequired; +} : Exclude; +/** + * @internal + * + * Removes undefined from unions. + */ +type NarrowClientIOTypes = { + [key in keyof ClientType]: [ClientType[key]] extends [ + InvokeMethodOptionalArgs + ] ? InvokeMethodOptionalArgs, NoUndefined> : [ClientType[key]] extends [InvokeMethod] ? InvokeMethod, NoUndefined> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise>>; +}; +/** + * @internal + * + * Removes undefined from unions and adds yolo output types. + */ +type UncheckedClientOutputTypes = { + [key in keyof ClientType]: [ClientType[key]] extends [ + InvokeMethodOptionalArgs + ] ? InvokeMethodOptionalArgs, RecursiveRequired> : [ClientType[key]] extends [InvokeMethod] ? InvokeMethod, RecursiveRequired> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise>>>; +}; +export {}; diff --git a/node_modules/@smithy/types/dist-types/transform/type-transform.d.ts b/node_modules/@smithy/types/dist-types/transform/type-transform.d.ts new file mode 100644 index 00000000..90373fb3 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/transform/type-transform.d.ts @@ -0,0 +1,34 @@ +/** + * @public + * + * Transforms any members of the object T having type FromType + * to ToType. This applies only to exact type matches. + * + * This is for the case where FromType is a union and only those fields + * matching the same union should be transformed. + */ +export type Transform = ConditionalRecursiveTransformExact; +/** + * @internal + * + * Returns ToType if T matches exactly with FromType. + */ +type TransformExact = [T] extends [FromType] ? ([FromType] extends [T] ? ToType : T) : T; +/** + * @internal + * + * Applies TransformExact to members of an object recursively. + */ +type RecursiveTransformExact = T extends Function ? T : T extends object ? { + [key in keyof T]: [T[key]] extends [FromType] ? [FromType] extends [T[key]] ? ToType : ConditionalRecursiveTransformExact : ConditionalRecursiveTransformExact; +} : TransformExact; +/** + * @internal + * + * Same as RecursiveTransformExact but does not assign to an object + * unless there is a matching transformed member. + */ +type ConditionalRecursiveTransformExact = [T] extends [ + RecursiveTransformExact +] ? [RecursiveTransformExact] extends [T] ? T : RecursiveTransformExact : RecursiveTransformExact; +export {}; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/abort-handler.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/abort-handler.d.ts new file mode 100644 index 00000000..26c068c6 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/abort-handler.d.ts @@ -0,0 +1,7 @@ +import { AbortSignal as DeprecatedAbortSignal } from "./abort"; +/** + * @public + */ +export interface AbortHandler { + (this: AbortSignal | DeprecatedAbortSignal, ev: any): any; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/abort.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/abort.d.ts new file mode 100644 index 00000000..00741af7 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/abort.d.ts @@ -0,0 +1,50 @@ +import { AbortHandler } from "./abort-handler"; +/** + * @public + */ +export { AbortHandler }; +/** + * @public + * @deprecated use platform (global) type for AbortSignal. + * + * Holders of an AbortSignal object may query if the associated operation has + * been aborted and register an onabort handler. + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal + */ +export interface AbortSignal { + /** + * Whether the action represented by this signal has been cancelled. + */ + readonly aborted: boolean; + /** + * A function to be invoked when the action represented by this signal has + * been cancelled. + */ + onabort: AbortHandler | Function | null; +} +/** + * @public + * @deprecated use platform (global) type for AbortController. + * + * The AWS SDK uses a Controller/Signal model to allow for cooperative + * cancellation of asynchronous operations. When initiating such an operation, + * the caller can create an AbortController and then provide linked signal to + * subtasks. This allows a single source to communicate to multiple consumers + * that an action has been aborted without dictating how that cancellation + * should be handled. + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortController + */ +export interface AbortController { + /** + * An object that reports whether the action associated with this + * `AbortController` has been cancelled. + */ + readonly signal: AbortSignal; + /** + * Declares the operation associated with this AbortController to have been + * cancelled. + */ + abort(): void; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpApiKeyAuth.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpApiKeyAuth.d.ts new file mode 100644 index 00000000..380c8fc1 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpApiKeyAuth.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum HttpApiKeyAuthLocation { + HEADER = "header", + QUERY = "query" +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthScheme.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthScheme.d.ts new file mode 100644 index 00000000..e0d939ed --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthScheme.d.ts @@ -0,0 +1,49 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +import { HandlerExecutionContext } from "../middleware"; +import { HttpSigner } from "./HttpSigner"; +import { IdentityProviderConfig } from "./IdentityProviderConfig"; +/** + * ID for {@link HttpAuthScheme} + * @internal + */ +export type HttpAuthSchemeId = string; +/** + * Interface that defines an HttpAuthScheme + * @internal + */ +export interface HttpAuthScheme { + /** + * ID for an HttpAuthScheme, typically the absolute shape ID of a Smithy auth trait. + */ + schemeId: HttpAuthSchemeId; + /** + * Gets the IdentityProvider corresponding to an HttpAuthScheme. + */ + identityProvider(config: IdentityProviderConfig): IdentityProvider | undefined; + /** + * HttpSigner corresponding to an HttpAuthScheme. + */ + signer: HttpSigner; +} +/** + * Interface that defines the identity and signing properties when selecting + * an HttpAuthScheme. + * @internal + */ +export interface HttpAuthOption { + schemeId: HttpAuthSchemeId; + identityProperties?: Record; + signingProperties?: Record; + propertiesExtractor?: (config: TConfig, context: TContext) => { + identityProperties?: Record; + signingProperties?: Record; + }; +} +/** + * @internal + */ +export interface SelectedHttpAuthScheme { + httpAuthOption: HttpAuthOption; + identity: Identity; + signer: HttpSigner; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthSchemeProvider.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthSchemeProvider.d.ts new file mode 100644 index 00000000..d417aaf7 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthSchemeProvider.d.ts @@ -0,0 +1,20 @@ +import { HandlerExecutionContext } from "../middleware"; +import { HttpAuthOption } from "./HttpAuthScheme"; +/** + * @internal + */ +export interface HttpAuthSchemeParameters { + operation?: string; +} +/** + * @internal + */ +export interface HttpAuthSchemeProvider { + (authParameters: TParameters): HttpAuthOption[]; +} +/** + * @internal + */ +export interface HttpAuthSchemeParametersProvider { + (config: TConfig, context: TContext, input: TInput): Promise; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpSigner.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpSigner.d.ts new file mode 100644 index 00000000..7abcf847 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpSigner.d.ts @@ -0,0 +1,41 @@ +import { HttpRequest, HttpResponse } from "../http"; +import { Identity } from "../identity/identity"; +/** + * @internal + */ +export interface ErrorHandler { + (signingProperties: Record): (error: E) => never; +} +/** + * @internal + */ +export interface SuccessHandler { + (httpResponse: HttpResponse | unknown, signingProperties: Record): void; +} +/** + * Interface to sign identity and signing properties. + * @internal + */ +export interface HttpSigner { + /** + * Signs an HttpRequest with an identity and signing properties. + * @param httpRequest request to sign + * @param identity identity to sing the request with + * @param signingProperties property bag for signing + * @returns signed request in a promise + */ + sign(httpRequest: HttpRequest, identity: Identity, signingProperties: Record): Promise; + /** + * Handler that executes after the {@link HttpSigner.sign} invocation and corresponding + * middleware throws an error. + * The error handler is expected to throw the error it receives, so the return type of the error handler is `never`. + * @internal + */ + errorHandler?: ErrorHandler; + /** + * Handler that executes after the {@link HttpSigner.sign} invocation and corresponding + * middleware succeeds. + * @internal + */ + successHandler?: SuccessHandler; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/auth/IdentityProviderConfig.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/auth/IdentityProviderConfig.d.ts new file mode 100644 index 00000000..6a50f657 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/auth/IdentityProviderConfig.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +import { HttpAuthSchemeId } from "./HttpAuthScheme"; +/** + * Interface to get an IdentityProvider for a specified HttpAuthScheme + * @internal + */ +export interface IdentityProviderConfig { + /** + * Get the IdentityProvider for a specified HttpAuthScheme. + * @param schemeId schemeId of the HttpAuthScheme + * @returns IdentityProvider or undefined if HttpAuthScheme is not found + */ + getIdentityProvider(schemeId: HttpAuthSchemeId): IdentityProvider | undefined; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/auth/auth.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/auth/auth.d.ts new file mode 100644 index 00000000..8241fe3e --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/auth/auth.d.ts @@ -0,0 +1,57 @@ +/** + * @internal + * + * Authentication schemes represent a way that the service will authenticate the customer’s identity. + */ +export interface AuthScheme { + /** + * @example "sigv4a" or "sigv4" + */ + name: "sigv4" | "sigv4a" | string; + /** + * @example "s3" + */ + signingName: string; + /** + * @example "us-east-1" + */ + signingRegion: string; + /** + * @example ["*"] + * @example ["us-west-2", "us-east-1"] + */ + signingRegionSet?: string[]; + /** + * @deprecated this field was renamed to signingRegion. + */ + signingScope?: never; + properties: Record; +} +/** + * @internal + * @deprecated + */ +export interface HttpAuthDefinition { + /** + * Defines the location of where the Auth is serialized. + */ + in: HttpAuthLocation; + /** + * Defines the name of the HTTP header or query string parameter + * that contains the Auth. + */ + name: string; + /** + * Defines the security scheme to use on the `Authorization` header value. + * This can only be set if the "in" property is set to {@link HttpAuthLocation.HEADER}. + */ + scheme?: string; +} +/** + * @internal + * @deprecated + */ +export declare enum HttpAuthLocation { + HEADER = "header", + QUERY = "query" +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/auth/index.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/auth/index.d.ts new file mode 100644 index 00000000..fbb845d4 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/auth/index.d.ts @@ -0,0 +1,6 @@ +export * from "./auth"; +export * from "./HttpApiKeyAuth"; +export * from "./HttpAuthScheme"; +export * from "./HttpAuthSchemeProvider"; +export * from "./HttpSigner"; +export * from "./IdentityProviderConfig"; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/blob/blob-payload-input-types.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/blob/blob-payload-input-types.d.ts new file mode 100644 index 00000000..465c9a3f --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/blob/blob-payload-input-types.d.ts @@ -0,0 +1,41 @@ +/// +import { Readable } from "stream"; +import { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * A union of types that can be used as inputs for the service model + * "blob" type when it represents the request's entire payload or body. + * + * For example, in Lambda::invoke, the payload is modeled as a blob type + * and this union applies to it. + * In contrast, in Lambda::createFunction the Zip file option is a blob type, + * but is not the (entire) payload and this union does not apply. + * + * Note: not all types are signable by the standard SignatureV4 signer when + * used as the request body. For example, in Node.js a Readable stream + * is not signable by the default signer. + * They are included in the union because it may work in some cases, + * but the expected types are primarily string and Uint8Array. + * + * Additional details may be found in the internal + * function "getPayloadHash" in the SignatureV4 module. + */ +export type BlobPayloadInputTypes = string | ArrayBuffer | ArrayBufferView | Uint8Array | NodeJsRuntimeBlobTypes | BrowserRuntimeBlobTypes; +/** + * @public + * + * Additional blob types for the Node.js environment. + */ +export type NodeJsRuntimeBlobTypes = Readable | Buffer; +/** + * @public + * + * Additional blob types for the browser environment. + */ +export type BrowserRuntimeBlobTypes = BlobOptionalType | ReadableStreamOptionalType; +/** + * @internal + * @deprecated renamed to BlobPayloadInputTypes. + */ +export type BlobTypes = BlobPayloadInputTypes; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/checksum.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/checksum.d.ts new file mode 100644 index 00000000..dbfff0cf --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/checksum.d.ts @@ -0,0 +1,63 @@ +import { SourceData } from "./crypto"; +/** + * @public + * + * An object that provides a checksum of data provided in chunks to `update`. + * The checksum may be performed incrementally as chunks are received or all + * at once when the checksum is finalized, depending on the underlying + * implementation. + * + * It's recommended to compute checksum incrementally to avoid reading the + * entire payload in memory. + * + * A class that implements this interface may accept an optional secret key in its + * constructor while computing checksum value, when using HMAC. If provided, + * this secret key would be used when computing checksum. + */ +export interface Checksum { + /** + * Constant length of the digest created by the algorithm in bytes. + */ + digestLength?: number; + /** + * Creates a new checksum object that contains a deep copy of the internal + * state of the current `Checksum` object. + */ + copy?(): Checksum; + /** + * Returns the digest of all of the data passed. + */ + digest(): Promise; + /** + * Allows marking a checksum for checksums that support the ability + * to mark and reset. + * + * @param readLimit - The maximum limit of bytes that can be read + * before the mark position becomes invalid. + */ + mark?(readLimit: number): void; + /** + * Resets the checksum to its initial value. + */ + reset(): void; + /** + * Adds a chunk of data for which checksum needs to be computed. + * This can be called many times with new data as it is streamed. + * + * Implementations may override this method which passes second param + * which makes Checksum object stateless. + * + * @param chunk - The buffer to update checksum with. + */ + update(chunk: Uint8Array): void; +} +/** + * @public + * + * A constructor for a Checksum that may be used to calculate an HMAC. Implementing + * classes should not directly hold the provided key in memory beyond the + * lexical scope of the constructor. + */ +export interface ChecksumConstructor { + new (secret?: SourceData): Checksum; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/client.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/client.d.ts new file mode 100644 index 00000000..1d05c041 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/client.d.ts @@ -0,0 +1,57 @@ +import { Command } from "./command"; +import { MiddlewareStack } from "./middleware"; +import { MetadataBearer } from "./response"; +import { OptionalParameter } from "./util"; +/** + * @public + * + * A type which checks if the client configuration is optional. + * If all entries of the client configuration are optional, it allows client creation without passing any config. + */ +export type CheckOptionalClientConfig = OptionalParameter; +/** + * @public + * + * function definition for different overrides of client's 'send' function. + */ +export interface InvokeFunction { + (command: Command, options?: any): Promise; + (command: Command, cb: (err: any, data?: OutputType) => void): void; + (command: Command, options: any, cb: (err: any, data?: OutputType) => void): void; + (command: Command, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * @public + * + * Signature that appears on aggregated clients' methods. + */ +export interface InvokeMethod { + (input: InputType, options?: any): Promise; + (input: InputType, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options: any, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * @public + * + * Signature that appears on aggregated clients' methods when argument is optional. + */ +export interface InvokeMethodOptionalArgs { + (): Promise; + (input: InputType, options?: any): Promise; + (input: InputType, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options: any, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * A general interface for service clients, idempotent to browser or node clients + * This type corresponds to SmithyClient(https://github.com/aws/aws-sdk-js-v3/blob/main/packages/smithy-client/src/client.ts). + * It's provided for using without importing the SmithyClient class. + * @internal + */ +export interface Client { + readonly config: ResolvedClientConfiguration; + middlewareStack: MiddlewareStack; + send: InvokeFunction; + destroy: () => void; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/command.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/command.d.ts new file mode 100644 index 00000000..fb7c5b6d --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/command.d.ts @@ -0,0 +1,23 @@ +import { Handler, MiddlewareStack } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + */ +export interface Command extends CommandIO { + readonly input: InputType; + readonly middlewareStack: MiddlewareStack; + resolveMiddleware(stack: MiddlewareStack, configuration: ResolvedConfiguration, options: any): Handler; +} +/** + * @internal + * + * This is a subset of the Command type used only to detect the i/o types. + */ +export interface CommandIO { + readonly input: InputType; + resolveMiddleware(stack: any, configuration: any, options: any): Handler; +} +/** + * @internal + */ +export type GetOutputType = Command extends CommandIO ? O : never; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/connection/config.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/connection/config.d.ts new file mode 100644 index 00000000..09ed18b5 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/connection/config.d.ts @@ -0,0 +1,10 @@ +/** + * @public + */ +export interface ConnectConfiguration { + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + */ + requestTimeout?: number; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/connection/index.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/connection/index.d.ts new file mode 100644 index 00000000..eaacf8bd --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/connection/index.d.ts @@ -0,0 +1,3 @@ +export * from "./config"; +export * from "./manager"; +export * from "./pool"; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/connection/manager.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/connection/manager.d.ts new file mode 100644 index 00000000..72450283 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/connection/manager.d.ts @@ -0,0 +1,34 @@ +import { RequestContext } from "../transfer"; +import { ConnectConfiguration } from "./config"; +/** + * @public + */ +export interface ConnectionManagerConfiguration { + /** + * Maximum number of allowed concurrent requests per connection. + */ + maxConcurrency?: number; + /** + * Disables concurrent requests per connection. + */ + disableConcurrency?: boolean; +} +/** + * @public + */ +export interface ConnectionManager { + /** + * Retrieves a connection from the connection pool if available, + * otherwise establish a new connection + */ + lease(requestContext: RequestContext, connectionConfiguration: ConnectConfiguration): T; + /** + * Releases the connection back to the pool making it potentially + * re-usable by other requests. + */ + release(requestContext: RequestContext, connection: T): void; + /** + * Destroys the connection manager. All connections will be closed. + */ + destroy(): void; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/connection/pool.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/connection/pool.d.ts new file mode 100644 index 00000000..161094fe --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/connection/pool.d.ts @@ -0,0 +1,32 @@ +/** + * @public + */ +export interface ConnectionPool { + /** + * Retrieve the first connection in the pool + */ + poll(): T | void; + /** + * Release the connection back to the pool making it potentially + * re-usable by other requests. + */ + offerLast(connection: T): void; + /** + * Removes the connection from the pool, and destroys it. + */ + destroy(connection: T): void; + /** + * Implements the iterable protocol and allows arrays to be consumed + * by most syntaxes expecting iterables, such as the spread syntax + * and for...of loops + */ + [Symbol.iterator](): Iterator; +} +/** + * Unused. + * @internal + * @deprecated + */ +export interface CacheKey { + destination: string; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/crypto.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/crypto.d.ts new file mode 100644 index 00000000..467ec865 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/crypto.d.ts @@ -0,0 +1,60 @@ +/** + * @public + */ +export type SourceData = string | ArrayBuffer | ArrayBufferView; +/** + * @public + * + * An object that provides a hash of data provided in chunks to `update`. The + * hash may be performed incrementally as chunks are received or all at once + * when the hash is finalized, depending on the underlying implementation. + * + * @deprecated use {@link Checksum} + */ +export interface Hash { + /** + * Adds a chunk of data to the hash. If a buffer is provided, the `encoding` + * argument will be ignored. If a string is provided without a specified + * encoding, implementations must assume UTF-8 encoding. + * + * Not all encodings are supported on all platforms, though all must support + * UTF-8. + */ + update(toHash: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + /** + * Finalizes the hash and provides a promise that will be fulfilled with the + * raw bytes of the calculated hash. + */ + digest(): Promise; +} +/** + * @public + * + * A constructor for a hash that may be used to calculate an HMAC. Implementing + * classes should not directly hold the provided key in memory beyond the + * lexical scope of the constructor. + * + * @deprecated use {@link ChecksumConstructor} + */ +export interface HashConstructor { + new (secret?: SourceData): Hash; +} +/** + * @public + * + * A function that calculates the hash of a data stream. Determining the hash + * will consume the stream, so only replayable streams should be provided to an + * implementation of this interface. + */ +export interface StreamHasher { + (hashCtor: HashConstructor, stream: StreamType): Promise; +} +/** + * @public + * + * A function that returns a promise fulfilled with bytes from a + * cryptographically secure pseudorandom number generator. + */ +export interface randomValues { + (byteLength: number): Promise; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/downlevel-ts3.4/transform/type-transform.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/downlevel-ts3.4/transform/type-transform.d.ts new file mode 100644 index 00000000..547303f7 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/downlevel-ts3.4/transform/type-transform.d.ts @@ -0,0 +1,41 @@ +/** + * @public + * + * Transforms any members of the object T having type FromType + * to ToType. This applies only to exact type matches. + * + * This is for the case where FromType is a union and only those fields + * matching the same union should be transformed. + */ +export type Transform = RecursiveTransformExact; +/** + * @internal + * + * Returns ToType if T matches exactly with FromType. + */ +type TransformExact = [ + T +] extends [ + FromType +] ? ([ + FromType +] extends [ + T +] ? ToType : T) : T; +/** + * @internal + * + * Applies TransformExact to members of an object recursively. + */ +type RecursiveTransformExact = T extends Function ? T : T extends object ? { + [key in keyof T]: [ + T[key] + ] extends [ + FromType + ] ? [ + FromType + ] extends [ + T[key] + ] ? ToType : RecursiveTransformExact : RecursiveTransformExact; +} : TransformExact; +export {}; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/encode.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/encode.d.ts new file mode 100644 index 00000000..4714bf90 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/encode.d.ts @@ -0,0 +1,31 @@ +import { Message } from "./eventStream"; +/** + * @public + */ +export interface MessageEncoder { + encode(message: Message): Uint8Array; +} +/** + * @public + */ +export interface MessageDecoder { + decode(message: ArrayBufferView): Message; + feed(message: ArrayBufferView): void; + endOfStream(): void; + getMessage(): AvailableMessage; + getAvailableMessages(): AvailableMessages; +} +/** + * @public + */ +export interface AvailableMessage { + getMessage(): Message | undefined; + isEndOfStream(): boolean; +} +/** + * @public + */ +export interface AvailableMessages { + getMessages(): Message[]; + isEndOfStream(): boolean; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/endpoint.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/endpoint.d.ts new file mode 100644 index 00000000..a1221ee5 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/endpoint.d.ts @@ -0,0 +1,77 @@ +import { AuthScheme } from "./auth/auth"; +/** + * @public + */ +export interface EndpointPartition { + name: string; + dnsSuffix: string; + dualStackDnsSuffix: string; + supportsFIPS: boolean; + supportsDualStack: boolean; +} +/** + * @public + */ +export interface EndpointARN { + partition: string; + service: string; + region: string; + accountId: string; + resourceId: Array; +} +/** + * @public + */ +export declare enum EndpointURLScheme { + HTTP = "http", + HTTPS = "https" +} +/** + * @public + */ +export interface EndpointURL { + /** + * The URL scheme such as http or https. + */ + scheme: EndpointURLScheme; + /** + * The authority is the host and optional port component of the URL. + */ + authority: string; + /** + * The parsed path segment of the URL. + * This value is as-is as provided by the user. + */ + path: string; + /** + * The parsed path segment of the URL. + * This value is guranteed to start and end with a "/". + */ + normalizedPath: string; + /** + * A boolean indicating whether the authority is an IP address. + */ + isIp: boolean; +} +/** + * @public + */ +export type EndpointObjectProperty = string | boolean | { + [key: string]: EndpointObjectProperty; +} | EndpointObjectProperty[]; +/** + * @public + */ +export interface EndpointV2 { + url: URL; + properties?: { + authSchemes?: AuthScheme[]; + } & Record; + headers?: Record; +} +/** + * @public + */ +export type EndpointParameters = { + [name: string]: undefined | boolean | string | string[]; +}; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/endpoints/EndpointRuleObject.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/endpoints/EndpointRuleObject.d.ts new file mode 100644 index 00000000..2c8026b2 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/endpoints/EndpointRuleObject.d.ts @@ -0,0 +1,27 @@ +import { EndpointObjectProperty } from "../endpoint"; +import { ConditionObject, Expression } from "./shared"; +/** + * @public + */ +export type EndpointObjectProperties = Record; +/** + * @public + */ +export type EndpointObjectHeaders = Record; +/** + * @public + */ +export type EndpointObject = { + url: Expression; + properties?: EndpointObjectProperties; + headers?: EndpointObjectHeaders; +}; +/** + * @public + */ +export type EndpointRuleObject = { + type: "endpoint"; + conditions?: ConditionObject[]; + endpoint: EndpointObject; + documentation?: string; +}; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/endpoints/ErrorRuleObject.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/endpoints/ErrorRuleObject.d.ts new file mode 100644 index 00000000..98fc7a8f --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/endpoints/ErrorRuleObject.d.ts @@ -0,0 +1,10 @@ +import { ConditionObject, Expression } from "./shared"; +/** + * @public + */ +export type ErrorRuleObject = { + type: "error"; + conditions?: ConditionObject[]; + error: Expression; + documentation?: string; +}; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/endpoints/RuleSetObject.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/endpoints/RuleSetObject.d.ts new file mode 100644 index 00000000..e749fba8 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/endpoints/RuleSetObject.d.ts @@ -0,0 +1,28 @@ +import { RuleSetRules } from "./TreeRuleObject"; +/** + * @public + */ +export type DeprecatedObject = { + message?: string; + since?: string; +}; +/** + * @public + */ +export type ParameterObject = { + type: "String" | "string" | "Boolean" | "boolean"; + default?: string | boolean; + required?: boolean; + documentation?: string; + builtIn?: string; + deprecated?: DeprecatedObject; +}; +/** + * @public + */ +export type RuleSetObject = { + version: string; + serviceId?: string; + parameters: Record; + rules: RuleSetRules; +}; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/endpoints/TreeRuleObject.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/endpoints/TreeRuleObject.d.ts new file mode 100644 index 00000000..c203eedf --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/endpoints/TreeRuleObject.d.ts @@ -0,0 +1,16 @@ +import { EndpointRuleObject } from "./EndpointRuleObject"; +import { ErrorRuleObject } from "./ErrorRuleObject"; +import { ConditionObject } from "./shared"; +/** + * @public + */ +export type RuleSetRules = Array; +/** + * @public + */ +export type TreeRuleObject = { + type: "tree"; + conditions?: ConditionObject[]; + rules: RuleSetRules; + documentation?: string; +}; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/endpoints/index.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/endpoints/index.d.ts new file mode 100644 index 00000000..8a297895 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/endpoints/index.d.ts @@ -0,0 +1,5 @@ +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./shared"; +export * from "./TreeRuleObject"; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/endpoints/shared.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/endpoints/shared.d.ts new file mode 100644 index 00000000..1c5d4b69 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/endpoints/shared.d.ts @@ -0,0 +1,55 @@ +import { Logger } from "../logger"; +/** + * @public + */ +export type ReferenceObject = { + ref: string; +}; +/** + * @public + */ +export type FunctionObject = { + fn: string; + argv: FunctionArgv; +}; +/** + * @public + */ +export type FunctionArgv = Array; +/** + * @public + */ +export type FunctionReturn = string | boolean | number | { + [key: string]: FunctionReturn; +}; +/** + * @public + */ +export type ConditionObject = FunctionObject & { + assign?: string; +}; +/** + * @public + */ +export type Expression = string | ReferenceObject | FunctionObject; +/** + * @public + */ +export type EndpointParams = Record; +/** + * @public + */ +export type EndpointResolverOptions = { + endpointParams: EndpointParams; + logger?: Logger; +}; +/** + * @public + */ +export type ReferenceRecord = Record; +/** + * @public + */ +export type EvaluateOptions = EndpointResolverOptions & { + referenceRecord: ReferenceRecord; +}; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/eventStream.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/eventStream.d.ts new file mode 100644 index 00000000..49c37c76 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/eventStream.d.ts @@ -0,0 +1,137 @@ +import { HttpRequest } from "./http"; +import { FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput, HandlerExecutionContext } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * An event stream message. The headers and body properties will always be + * defined, with empty headers represented as an object with no keys and an + * empty body represented as a zero-length Uint8Array. + */ +export interface Message { + headers: MessageHeaders; + body: Uint8Array; +} +/** + * @public + */ +export type MessageHeaders = Record; +/** + * @public + */ +export type HeaderValue = { + type: K; + value: V; +}; +/** + * @public + */ +export type BooleanHeaderValue = HeaderValue<"boolean", boolean>; +/** + * @public + */ +export type ByteHeaderValue = HeaderValue<"byte", number>; +/** + * @public + */ +export type ShortHeaderValue = HeaderValue<"short", number>; +/** + * @public + */ +export type IntegerHeaderValue = HeaderValue<"integer", number>; +/** + * @public + */ +export type LongHeaderValue = HeaderValue<"long", Int64>; +/** + * @public + */ +export type BinaryHeaderValue = HeaderValue<"binary", Uint8Array>; +/** + * @public + */ +export type StringHeaderValue = HeaderValue<"string", string>; +/** + * @public + */ +export type TimestampHeaderValue = HeaderValue<"timestamp", Date>; +/** + * @public + */ +export type UuidHeaderValue = HeaderValue<"uuid", string>; +/** + * @public + */ +export type MessageHeaderValue = BooleanHeaderValue | ByteHeaderValue | ShortHeaderValue | IntegerHeaderValue | LongHeaderValue | BinaryHeaderValue | StringHeaderValue | TimestampHeaderValue | UuidHeaderValue; +/** + * @public + */ +export interface Int64 { + readonly bytes: Uint8Array; + valueOf: () => number; + toString: () => string; +} +/** + * @public + * + * Util functions for serializing or deserializing event stream + */ +export interface EventStreamSerdeContext { + eventStreamMarshaller: EventStreamMarshaller; +} +/** + * @public + * + * A function which deserializes binary event stream message into modeled shape. + */ +export interface EventStreamMarshallerDeserFn { + (body: StreamType, deserializer: (input: Record) => Promise): AsyncIterable; +} +/** + * @public + * + * A function that serializes modeled shape into binary stream message. + */ +export interface EventStreamMarshallerSerFn { + (input: AsyncIterable, serializer: (event: T) => Message): StreamType; +} +/** + * @public + * + * An interface which provides functions for serializing and deserializing binary event stream + * to/from corresponsing modeled shape. + */ +export interface EventStreamMarshaller { + deserialize: EventStreamMarshallerDeserFn; + serialize: EventStreamMarshallerSerFn; +} +/** + * @public + */ +export interface EventStreamRequestSigner { + sign(request: HttpRequest): Promise; +} +/** + * @public + */ +export interface EventStreamPayloadHandler { + handle: (next: FinalizeHandler, args: FinalizeHandlerArguments, context?: HandlerExecutionContext) => Promise>; +} +/** + * @public + */ +export interface EventStreamPayloadHandlerProvider { + (options: any): EventStreamPayloadHandler; +} +/** + * @public + */ +export interface EventStreamSerdeProvider { + (options: any): EventStreamMarshaller; +} +/** + * @public + */ +export interface EventStreamSignerProvider { + (options: any): EventStreamRequestSigner; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/extensions/checksum.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/extensions/checksum.d.ts new file mode 100644 index 00000000..8ebbf005 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/extensions/checksum.d.ts @@ -0,0 +1,58 @@ +import { ChecksumConstructor } from "../checksum"; +import { HashConstructor } from "../crypto"; +/** + * @internal + */ +export declare enum AlgorithmId { + MD5 = "md5", + CRC32 = "crc32", + CRC32C = "crc32c", + SHA1 = "sha1", + SHA256 = "sha256" +} +/** + * @internal + */ +export interface ChecksumAlgorithm { + algorithmId(): AlgorithmId; + checksumConstructor(): ChecksumConstructor | HashConstructor; +} +/** + * @deprecated unused. + * @internal + */ +type ChecksumConfigurationLegacy = { + [other in string | number]: any; +}; +/** + * @internal + */ +export interface ChecksumConfiguration extends ChecksumConfigurationLegacy { + addChecksumAlgorithm(algo: ChecksumAlgorithm): void; + checksumAlgorithms(): ChecksumAlgorithm[]; +} +/** + * @deprecated will be removed for implicit type. + * @internal + */ +type GetChecksumConfigurationType = (runtimeConfig: Partial<{ + sha256: ChecksumConstructor | HashConstructor; + md5: ChecksumConstructor | HashConstructor; +}>) => ChecksumConfiguration; +/** + * @internal + * @deprecated will be moved to smithy-client. + */ +export declare const getChecksumConfiguration: GetChecksumConfigurationType; +/** + * @internal + * @deprecated will be removed for implicit type. + */ +type ResolveChecksumRuntimeConfigType = (clientConfig: ChecksumConfiguration) => any; +/** + * @internal + * + * @deprecated will be moved to smithy-client. + */ +export declare const resolveChecksumRuntimeConfig: ResolveChecksumRuntimeConfigType; +export {}; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultClientConfiguration.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultClientConfiguration.d.ts new file mode 100644 index 00000000..40458b45 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultClientConfiguration.d.ts @@ -0,0 +1,33 @@ +import { ChecksumConfiguration } from "./checksum"; +/** + * @deprecated will be replaced by DefaultExtensionConfiguration. + * @internal + * + * Default client configuration consisting various configurations for modifying a service client + */ +export interface DefaultClientConfiguration extends ChecksumConfiguration { +} +/** + * @deprecated will be removed for implicit type. + */ +type GetDefaultConfigurationType = (runtimeConfig: any) => DefaultClientConfiguration; +/** + * @deprecated moving to @smithy/smithy-client. + * @internal + * + * Helper function to resolve default client configuration from runtime config + * + */ +export declare const getDefaultClientConfiguration: GetDefaultConfigurationType; +/** + * @deprecated will be removed for implicit type. + */ +type ResolveDefaultRuntimeConfigType = (clientConfig: DefaultClientConfiguration) => any; +/** + * @deprecated moving to @smithy/smithy-client. + * @internal + * + * Helper function to resolve runtime config from default client configuration + */ +export declare const resolveDefaultRuntimeConfig: ResolveDefaultRuntimeConfigType; +export {}; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts new file mode 100644 index 00000000..55f51373 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { ChecksumConfiguration } from "./checksum"; +import { RetryStrategyConfiguration } from "./retry"; +/** + * @internal + * + * Default extension configuration consisting various configurations for modifying a service client + */ +export interface DefaultExtensionConfiguration extends ChecksumConfiguration, RetryStrategyConfiguration { +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/extensions/index.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 00000000..55edb164 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1,4 @@ +export * from "./defaultClientConfiguration"; +export * from "./defaultExtensionConfiguration"; +export { AlgorithmId, ChecksumAlgorithm, ChecksumConfiguration } from "./checksum"; +export { RetryStrategyConfiguration } from "./retry"; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/extensions/retry.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/extensions/retry.d.ts new file mode 100644 index 00000000..3471d087 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/extensions/retry.d.ts @@ -0,0 +1,18 @@ +import { RetryStrategyV2 } from "../retry"; +import { Provider, RetryStrategy } from "../util"; +/** + * A configuration interface with methods called by runtime extension + * @internal + */ +export interface RetryStrategyConfiguration { + /** + * Set retry strategy used for all http requests + * @param retryStrategy + */ + setRetryStrategy(retryStrategy: Provider): void; + /** + * Get retry strategy used for all http requests + * @param retryStrategy + */ + retryStrategy(): Provider; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/externals-check/browser-externals-check.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/externals-check/browser-externals-check.d.ts new file mode 100644 index 00000000..b709d7f4 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/externals-check/browser-externals-check.d.ts @@ -0,0 +1,35 @@ +import { Exact } from "../transform/exact"; +/** + * @public + * + * A checked type that resolves to Blob if it is defined as more than a stub, otherwise + * resolves to 'never' so as not to widen the type of unions containing Blob + * excessively. + */ +export type BlobOptionalType = BlobDefined extends true ? Blob : Unavailable; +/** + * @public + * + * A checked type that resolves to ReadableStream if it is defined as more than a stub, otherwise + * resolves to 'never' so as not to widen the type of unions containing ReadableStream + * excessively. + */ +export type ReadableStreamOptionalType = ReadableStreamDefined extends true ? ReadableStream : Unavailable; +/** + * @public + * + * Indicates a type is unavailable if it resolves to this. + */ +export type Unavailable = never; +/** + * @internal + * + * Whether the global types define more than a stub for ReadableStream. + */ +export type ReadableStreamDefined = Exact extends true ? false : true; +/** + * @internal + * + * Whether the global types define more than a stub for Blob. + */ +export type BlobDefined = Exact extends true ? false : true; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/feature-ids.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/feature-ids.d.ts new file mode 100644 index 00000000..1a2c157c --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/feature-ids.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export type SmithyFeatures = Partial<{ + RESOURCE_MODEL: "A"; + WAITER: "B"; + PAGINATOR: "C"; + RETRY_MODE_LEGACY: "D"; + RETRY_MODE_STANDARD: "E"; + RETRY_MODE_ADAPTIVE: "F"; + GZIP_REQUEST_COMPRESSION: "L"; + PROTOCOL_RPC_V2_CBOR: "M"; + ENDPOINT_OVERRIDE: "N"; + SIGV4A_SIGNING: "S"; + CREDENTIALS_CODE: "e"; +}>; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/http.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/http.d.ts new file mode 100644 index 00000000..1e47e4e2 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/http.d.ts @@ -0,0 +1,112 @@ +import { AbortSignal as DeprecatedAbortSignal } from "./abort"; +import { URI } from "./uri"; +/** + * @public + * + * @deprecated use {@link EndpointV2} from `@smithy/types`. + */ +export interface Endpoint { + protocol: string; + hostname: string; + port?: number; + path: string; + query?: QueryParameterBag; +} +/** + * @public + * + * Interface an HTTP request class. Contains + * addressing information in addition to standard message properties. + */ +export interface HttpRequest extends HttpMessage, URI { + method: string; +} +/** + * @public + * + * Represents an HTTP message as received in reply to a request. Contains a + * numeric status code in addition to standard message properties. + */ +export interface HttpResponse extends HttpMessage { + statusCode: number; + reason?: string; +} +/** + * @public + * + * Represents an HTTP message with headers and an optional static or streaming + * body. body: ArrayBuffer | ArrayBufferView | string | Uint8Array | Readable | ReadableStream; + */ +export interface HttpMessage { + headers: HeaderBag; + body?: any; +} +/** + * @public + * + * A mapping of query parameter names to strings or arrays of strings, with the + * second being used when a parameter contains a list of values. Value can be set + * to null when query is not in key-value pairs shape + */ +export type QueryParameterBag = Record | null>; +/** + * @public + */ +export type FieldOptions = { + name: string; + kind?: FieldPosition; + values?: string[]; +}; +/** + * @public + */ +export declare enum FieldPosition { + HEADER = 0, + TRAILER = 1 +} +/** + * @public + * + * A mapping of header names to string values. Multiple values for the same + * header should be represented as a single string with values separated by + * `, `. + * + * Keys should be considered case insensitive, even if this is not enforced by a + * particular implementation. For example, given the following HeaderBag, where + * keys differ only in case: + * + * ```json + * { + * 'x-request-date': '2000-01-01T00:00:00Z', + * 'X-Request-Date': '2001-01-01T00:00:00Z' + * } + * ``` + * + * The SDK may at any point during processing remove one of the object + * properties in favor of the other. The headers may or may not be combined, and + * the SDK will not deterministically select which header candidate to use. + */ +export type HeaderBag = Record; +/** + * @public + * + * Represents an HTTP message with headers and an optional static or streaming + * body. bode: ArrayBuffer | ArrayBufferView | string | Uint8Array | Readable | ReadableStream; + */ +export interface HttpMessage { + headers: HeaderBag; + body?: any; +} +/** + * @public + * + * Represents the options that may be passed to an Http Handler. + */ +export interface HttpHandlerOptions { + abortSignal?: AbortSignal | DeprecatedAbortSignal; + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + */ + requestTimeout?: number; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/http/httpHandlerInitialization.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/http/httpHandlerInitialization.d.ts new file mode 100644 index 00000000..0ee18e44 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/http/httpHandlerInitialization.d.ts @@ -0,0 +1,122 @@ +/// +import { Agent as hAgent, AgentOptions as hAgentOptions } from "http"; +import { Agent as hsAgent, AgentOptions as hsAgentOptions } from "https"; +import { HttpRequest as IHttpRequest } from "../http"; +import { Logger } from "../logger"; +/** + * + * This type represents an alternate client constructor option for the entry + * "requestHandler". Instead of providing an instance of a requestHandler, the user + * may provide the requestHandler's constructor options for either the + * NodeHttpHandler or FetchHttpHandler. + * + * For other RequestHandlers like HTTP2 or WebSocket, + * constructor parameter passthrough is not currently available. + * + * @public + */ +export type RequestHandlerParams = NodeHttpHandlerOptions | FetchHttpHandlerOptions; +/** + * Represents the http options that can be passed to a node http client. + * @public + */ +export interface NodeHttpHandlerOptions { + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + * + * Defaults to 0, which disables the timeout. + */ + connectionTimeout?: number; + /** + * The number of milliseconds a request can take before automatically being terminated. + * Defaults to 0, which disables the timeout. + */ + requestTimeout?: number; + /** + * Delay before the NodeHttpHandler checks for socket exhaustion, + * and emits a warning if the active sockets and enqueued request count is greater than + * 2x the maxSockets count. + * + * Defaults to connectionTimeout + requestTimeout or 3000ms if those are not set. + */ + socketAcquisitionWarningTimeout?: number; + /** + * This field is deprecated, and requestTimeout should be used instead. + * The maximum time in milliseconds that a socket may remain idle before it + * is closed. + * + * @deprecated Use {@link requestTimeout} + */ + socketTimeout?: number; + /** + * You can pass http.Agent or its constructor options. + */ + httpAgent?: hAgent | hAgentOptions; + /** + * You can pass https.Agent or its constructor options. + */ + httpsAgent?: hsAgent | hsAgentOptions; + /** + * Optional logger. + */ + logger?: Logger; +} +/** + * Represents the http options that can be passed to a browser http client. + * @public + */ +export interface FetchHttpHandlerOptions { + /** + * The number of milliseconds a request can take before being automatically + * terminated. + */ + requestTimeout?: number; + /** + * Whether to allow the request to outlive the page. Default value is false. + * + * There may be limitations to the payload size, number of concurrent requests, + * request duration etc. when using keepalive in browsers. + * + * These may change over time, so look for up to date information about + * these limitations before enabling keepalive. + */ + keepAlive?: boolean; + /** + * A string indicating whether credentials will be sent with the request always, never, or + * only when sent to a same-origin URL. + * @see https://developer.mozilla.org/en-US/docs/Web/API/Request/credentials + */ + credentials?: "include" | "omit" | "same-origin" | undefined | string; + /** + * Cache settings for fetch. + * @see https://developer.mozilla.org/en-US/docs/Web/API/Request/cache + */ + cache?: "default" | "force-cache" | "no-cache" | "no-store" | "only-if-cached" | "reload"; + /** + * An optional function that produces additional RequestInit + * parameters for each httpRequest. + * + * This is applied last via merging with Object.assign() and overwrites other values + * set from other sources. + * + * @example + * ```js + * new Client({ + * requestHandler: { + * requestInit(httpRequest) { + * return { cache: "no-store" }; + * } + * } + * }); + * ``` + */ + requestInit?: (httpRequest: IHttpRequest) => RequestInit; +} +declare global { + /** + * interface merging stub. + */ + interface RequestInit { + } +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/identity/apiKeyIdentity.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/identity/apiKeyIdentity.d.ts new file mode 100644 index 00000000..4aee7a21 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/identity/apiKeyIdentity.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +/** + * @public + */ +export interface ApiKeyIdentity extends Identity { + /** + * The literal API Key + */ + readonly apiKey: string; +} +/** + * @public + */ +export type ApiKeyIdentityProvider = IdentityProvider; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/identity/awsCredentialIdentity.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/identity/awsCredentialIdentity.d.ts new file mode 100644 index 00000000..9605e4d5 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/identity/awsCredentialIdentity.d.ts @@ -0,0 +1,31 @@ +import { Identity, IdentityProvider } from "./identity"; +/** + * @public + */ +export interface AwsCredentialIdentity extends Identity { + /** + * AWS access key ID + */ + readonly accessKeyId: string; + /** + * AWS secret access key + */ + readonly secretAccessKey: string; + /** + * A security or session token to use with these credentials. Usually + * present for temporary credentials. + */ + readonly sessionToken?: string; + /** + * AWS credential scope for this set of credentials. + */ + readonly credentialScope?: string; + /** + * AWS accountId. + */ + readonly accountId?: string; +} +/** + * @public + */ +export type AwsCredentialIdentityProvider = IdentityProvider; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/identity/identity.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/identity/identity.d.ts new file mode 100644 index 00000000..eaa7e5dc --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/identity/identity.d.ts @@ -0,0 +1,15 @@ +/** + * @public + */ +export interface Identity { + /** + * A `Date` when the identity or credential will no longer be accepted. + */ + readonly expiration?: Date; +} +/** + * @public + */ +export interface IdentityProvider { + (identityProperties?: Record): Promise; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/identity/index.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/identity/index.d.ts new file mode 100644 index 00000000..031a0fe1 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/identity/index.d.ts @@ -0,0 +1,4 @@ +export * from "./apiKeyIdentity"; +export * from "./awsCredentialIdentity"; +export * from "./identity"; +export * from "./tokenIdentity"; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/identity/tokenIdentity.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/identity/tokenIdentity.d.ts new file mode 100644 index 00000000..33783eb1 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/identity/tokenIdentity.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +/** + * @internal + */ +export interface TokenIdentity extends Identity { + /** + * The literal token string + */ + readonly token: string; +} +/** + * @internal + */ +export type TokenIdentityProvider = IdentityProvider; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..85b4e44c --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/index.d.ts @@ -0,0 +1,37 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-payload-input-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./crypto"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./endpoints"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./http"; +export * from "./http/httpHandlerInitialization"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./streaming-payload/streaming-blob-common-types"; +export * from "./streaming-payload/streaming-blob-payload-input-types"; +export * from "./streaming-payload/streaming-blob-payload-output-types"; +export * from "./transfer"; +export * from "./transform/client-payload-blob-type-narrow"; +export * from "./transform/no-undefined"; +export * from "./transform/type-transform"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/logger.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/logger.d.ts new file mode 100644 index 00000000..cc69a11f --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/logger.d.ts @@ -0,0 +1,13 @@ +/** + * @public + * + * Represents a logger object that is available in HandlerExecutionContext + * throughout the middleware stack. + */ +export interface Logger { + trace?: (...content: any[]) => void; + debug: (...content: any[]) => void; + info: (...content: any[]) => void; + warn: (...content: any[]) => void; + error: (...content: any[]) => void; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/middleware.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/middleware.d.ts new file mode 100644 index 00000000..8b35bbeb --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/middleware.d.ts @@ -0,0 +1,534 @@ +import { AuthScheme, HttpAuthDefinition } from "./auth/auth"; +import { SelectedHttpAuthScheme } from "./auth/HttpAuthScheme"; +import { Command } from "./command"; +import { EndpointV2 } from "./endpoint"; +import { SmithyFeatures } from "./feature-ids"; +import { Logger } from "./logger"; +import { UserAgent } from "./util"; +/** + * @public + */ +export interface InitializeHandlerArguments { + /** + * User input to a command. Reflects the userland representation of the + * union of data types the command can effectively handle. + */ + input: Input; +} +/** + * @public + */ +export interface InitializeHandlerOutput extends DeserializeHandlerOutput { + output: Output; +} +/** + * @public + */ +export interface SerializeHandlerArguments extends InitializeHandlerArguments { + /** + * The user input serialized as a request object. The request object is unknown, + * so you cannot modify it directly. When work with request, you need to guard its + * type to e.g. HttpRequest with 'instanceof' operand + * + * During the build phase of the execution of a middleware stack, a built + * request may or may not be available. + */ + request?: unknown; +} +/** + * @public + */ +export interface SerializeHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface BuildHandlerArguments extends FinalizeHandlerArguments { +} +/** + * @public + */ +export interface BuildHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface FinalizeHandlerArguments extends SerializeHandlerArguments { + /** + * The user input serialized as a request. + */ + request: unknown; +} +/** + * @public + */ +export interface FinalizeHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface DeserializeHandlerArguments extends FinalizeHandlerArguments { +} +/** + * @public + */ +export interface DeserializeHandlerOutput { + /** + * The raw response object from runtime is deserialized to structured output object. + * The response object is unknown so you cannot modify it directly. When work with + * response, you need to guard its type to e.g. HttpResponse with 'instanceof' operand. + * + * During the deserialize phase of the execution of a middleware stack, a deserialized + * response may or may not be available + */ + response: unknown; + output?: Output; +} +/** + * @public + */ +export interface InitializeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: InitializeHandlerArguments): Promise>; +} +/** + * @public + */ +export type Handler = InitializeHandler; +/** + * @public + */ +export interface SerializeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: SerializeHandlerArguments): Promise>; +} +/** + * @public + */ +export interface FinalizeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: FinalizeHandlerArguments): Promise>; +} +/** + * @public + */ +export interface BuildHandler { + (args: BuildHandlerArguments): Promise>; +} +/** + * @public + */ +export interface DeserializeHandler { + (args: DeserializeHandlerArguments): Promise>; +} +/** + * @public + * + * A factory function that creates functions implementing the `Handler` + * interface. + */ +export interface InitializeMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: InitializeHandler, context: HandlerExecutionContext): InitializeHandler; +} +/** + * @public + * + * A factory function that creates functions implementing the `BuildHandler` + * interface. + */ +export interface SerializeMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: SerializeHandler, context: HandlerExecutionContext): SerializeHandler; +} +/** + * @public + * + * A factory function that creates functions implementing the `FinalizeHandler` + * interface. + */ +export interface FinalizeRequestMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: FinalizeHandler, context: HandlerExecutionContext): FinalizeHandler; +} +/** + * @public + */ +export interface BuildMiddleware { + (next: BuildHandler, context: HandlerExecutionContext): BuildHandler; +} +/** + * @public + */ +export interface DeserializeMiddleware { + (next: DeserializeHandler, context: HandlerExecutionContext): DeserializeHandler; +} +/** + * @public + */ +export type MiddlewareType = InitializeMiddleware | SerializeMiddleware | BuildMiddleware | FinalizeRequestMiddleware | DeserializeMiddleware; +/** + * @public + * + * A factory function that creates the terminal handler atop which a middleware + * stack sits. + */ +export interface Terminalware { + (context: HandlerExecutionContext): DeserializeHandler; +} +/** + * @public + */ +export type Step = "initialize" | "serialize" | "build" | "finalizeRequest" | "deserialize"; +/** + * @public + */ +export type Priority = "high" | "normal" | "low"; +/** + * @public + */ +export interface HandlerOptions { + /** + * Handlers are ordered using a "step" that describes the stage of command + * execution at which the handler will be executed. The available steps are: + * + * - initialize: The input is being prepared. Examples of typical + * initialization tasks include injecting default options computing + * derived parameters. + * - serialize: The input is complete and ready to be serialized. Examples + * of typical serialization tasks include input validation and building + * an HTTP request from user input. + * - build: The input has been serialized into an HTTP request, but that + * request may require further modification. Any request alterations + * will be applied to all retries. Examples of typical build tasks + * include injecting HTTP headers that describe a stable aspect of the + * request, such as `Content-Length` or a body checksum. + * - finalizeRequest: The request is being prepared to be sent over the wire. The + * request in this stage should already be semantically complete and + * should therefore only be altered as match the recipient's + * expectations. Examples of typical finalization tasks include request + * signing and injecting hop-by-hop headers. + * - deserialize: The response has arrived, the middleware here will deserialize + * the raw response object to structured response + * + * Unlike initialization and build handlers, which are executed once + * per operation execution, finalization and deserialize handlers will be + * executed foreach HTTP request sent. + * + * @defaultValue 'initialize' + */ + step?: Step; + /** + * A list of strings to any that identify the general purpose or important + * characteristics of a given handler. + */ + tags?: Array; + /** + * A unique name to refer to a middleware + */ + name?: string; + /** + * @internal + * Aliases allows for middleware to be found by multiple names besides {@link HandlerOptions.name}. + * This allows for references to replaced middleware to continue working, e.g. replacing + * multiple auth-specific middleware with a single generic auth middleware. + */ + aliases?: Array; + /** + * A flag to override the existing middleware with the same name. Without + * setting it, adding middleware with duplicated name will throw an exception. + * @internal + */ + override?: boolean; +} +/** + * @public + */ +export interface AbsoluteLocation { + /** + * By default middleware will be added to individual step in un-guaranteed order. + * In the case that + * + * @defaultValue 'normal' + */ + priority?: Priority; +} +/** + * @public + */ +export type Relation = "before" | "after"; +/** + * @public + */ +export interface RelativeLocation { + /** + * Specify the relation to be before or after a know middleware. + */ + relation: Relation; + /** + * A known middleware name to indicate inserting middleware's location. + */ + toMiddleware: string; +} +/** + * @public + */ +export type RelativeMiddlewareOptions = RelativeLocation & Pick>; +/** + * @public + */ +export interface InitializeHandlerOptions extends HandlerOptions { + step?: "initialize"; +} +/** + * @public + */ +export interface SerializeHandlerOptions extends HandlerOptions { + step: "serialize"; +} +/** + * @public + */ +export interface BuildHandlerOptions extends HandlerOptions { + step: "build"; +} +/** + * @public + */ +export interface FinalizeRequestHandlerOptions extends HandlerOptions { + step: "finalizeRequest"; +} +/** + * @public + */ +export interface DeserializeHandlerOptions extends HandlerOptions { + step: "deserialize"; +} +/** + * @public + * + * A stack storing middleware. It can be resolved into a handler. It supports 2 + * approaches for adding middleware: + * 1. Adding middleware to specific step with `add()`. The order of middleware + * added into same step is determined by order of adding them. If one middleware + * needs to be executed at the front of the step or at the end of step, set + * `priority` options to `high` or `low`. + * 2. Adding middleware to location relative to known middleware with `addRelativeTo()`. + * This is useful when given middleware must be executed before or after specific + * middleware(`toMiddleware`). You can add a middleware relatively to another + * middleware which also added relatively. But eventually, this relative middleware + * chain **must** be 'anchored' by a middleware that added using `add()` API + * with absolute `step` and `priority`. This mothod will throw if specified + * `toMiddleware` is not found. + */ +export interface MiddlewareStack extends Pluggable { + /** + * Add middleware to the stack to be executed during the "initialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: InitializeMiddleware, options?: InitializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "serialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: SerializeMiddleware, options: SerializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "build" step, + * optionally specifying a priority, tags and name + */ + add(middleware: BuildMiddleware, options: BuildHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "finalizeRequest" step, + * optionally specifying a priority, tags and name + */ + add(middleware: FinalizeRequestMiddleware, options: FinalizeRequestHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "deserialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: DeserializeMiddleware, options: DeserializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to a stack position before or after a known middleware,optionally + * specifying name and tags. + */ + addRelativeTo(middleware: MiddlewareType, options: RelativeMiddlewareOptions): void; + /** + * Apply a customization function to mutate the middleware stack, often + * used for customizations that requires mutating multiple middleware. + */ + use(pluggable: Pluggable): void; + /** + * Create a shallow clone of this stack. Step bindings and handler priorities + * and tags are preserved in the copy. + */ + clone(): MiddlewareStack; + /** + * Removes middleware from the stack. + * + * If a string is provided, it will be treated as middleware name. If a middleware + * is inserted with the given name, it will be removed. + * + * If a middleware class is provided, all usages thereof will be removed. + */ + remove(toRemove: MiddlewareType | string): boolean; + /** + * Removes middleware that contains given tag + * + * Multiple middleware will potentially be removed + */ + removeByTag(toRemove: string): boolean; + /** + * Create a stack containing the middlewares in this stack as well as the + * middlewares in the `from` stack. Neither source is modified, and step + * bindings and handler priorities and tags are preserved in the copy. + */ + concat(from: MiddlewareStack): MiddlewareStack; + /** + * Returns a list of the current order of middleware in the stack. + * This does not execute the middleware functions, nor does it + * provide a reference to the stack itself. + */ + identify(): string[]; + /** + * @internal + * + * When an operation is called using this stack, + * it will log its list of middleware to the console using + * the identify function. + * + * @param toggle - set whether to log on resolve. + * If no argument given, returns the current value. + */ + identifyOnResolve(toggle?: boolean): boolean; + /** + * Builds a single handler function from zero or more middleware classes and + * a core handler. The core handler is meant to send command objects to AWS + * services and return promises that will resolve with the operation result + * or be rejected with an error. + * + * When a composed handler is invoked, the arguments will pass through all + * middleware in a defined order, and the return from the innermost handler + * will pass through all middleware in the reverse of that order. + */ + resolve(handler: DeserializeHandler, context: HandlerExecutionContext): InitializeHandler; +} +/** + * @internal + */ +export declare const SMITHY_CONTEXT_KEY = "__smithy_context"; +/** + * @public + * + * Data and helper objects that are not expected to change from one execution of + * a composed handler to another. + */ +export interface HandlerExecutionContext { + /** + * A logger that may be invoked by any handler during execution of an + * operation. + */ + logger?: Logger; + /** + * Name of the service the operation is being sent to. + */ + clientName?: string; + /** + * Name of the operation being executed. + */ + commandName?: string; + /** + * Additional user agent that inferred by middleware. It can be used to save + * the internal user agent sections without overriding the `customUserAgent` + * config in clients. + */ + userAgent?: UserAgent; + /** + * Resolved by the endpointMiddleware function of `@smithy/middleware-endpoint` + * in the serialization stage. + */ + endpointV2?: EndpointV2; + /** + * Set at the same time as endpointV2. + */ + authSchemes?: AuthScheme[]; + /** + * The current auth configuration that has been set by any auth middleware and + * that will prevent from being set more than once. + */ + currentAuthConfig?: HttpAuthDefinition; + /** + * @deprecated do not extend this field, it is a carryover from AWS SDKs. + * Used by DynamoDbDocumentClient. + */ + dynamoDbDocumentClientOptions?: Partial<{ + overrideInputFilterSensitiveLog(...args: any[]): string | void; + overrideOutputFilterSensitiveLog(...args: any[]): string | void; + }>; + /** + * @internal + * Context for Smithy properties. + */ + [SMITHY_CONTEXT_KEY]?: { + service?: string; + operation?: string; + commandInstance?: Command; + selectedHttpAuthScheme?: SelectedHttpAuthScheme; + features?: SmithyFeatures; + /** + * @deprecated + * Do not assign arbitrary members to the Smithy Context, + * fields should be explicitly declared here to avoid collisions. + */ + [key: string]: unknown; + }; + /** + * @deprecated + * Do not assign arbitrary members to the context, since + * they can interfere with existing functionality. + * + * Additional members should instead be declared on the SMITHY_CONTEXT_KEY + * or other reserved keys. + */ + [key: string]: any; +} +/** + * @public + */ +export interface Pluggable { + /** + * A function that mutate the passed in middleware stack. Functions implementing + * this interface can add, remove, modify existing middleware stack from clients + * or commands + */ + applyToStack: (stack: MiddlewareStack) => void; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/pagination.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/pagination.d.ts new file mode 100644 index 00000000..c9d1c927 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/pagination.d.ts @@ -0,0 +1,33 @@ +import { Client } from "./client"; +import { Command } from "./command"; +/** + * @public + * + * Expected type definition of a paginator. + */ +export type Paginator = AsyncGenerator; +/** + * @public + * + * Expected paginator configuration passed to an operation. Services will extend + * this interface definition and may type client further. + */ +export interface PaginationConfiguration { + client: Client; + pageSize?: number; + startingToken?: any; + /** + * For some APIs, such as CloudWatchLogs events, the next page token will always + * be present. + * + * When true, this config field will have the paginator stop when the token doesn't change + * instead of when it is not present. + */ + stopOnSameToken?: boolean; + /** + * @param command - reference to the instantiated command. This callback is executed + * prior to sending the command with the paginator's client. + * @returns the original command or a replacement, defaulting to the original command object. + */ + withCommand?: (command: Command) => typeof command | undefined; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/profile.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/profile.d.ts new file mode 100644 index 00000000..1b3dba79 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/profile.d.ts @@ -0,0 +1,30 @@ +/** + * @public + */ +export declare enum IniSectionType { + PROFILE = "profile", + SSO_SESSION = "sso-session", + SERVICES = "services" +} +/** + * @public + */ +export type IniSection = Record; +/** + * @public + * + * @deprecated Please use {@link IniSection} + */ +export interface Profile extends IniSection { +} +/** + * @public + */ +export type ParsedIniData = Record; +/** + * @public + */ +export interface SharedConfigFiles { + credentialsFile: ParsedIniData; + configFile: ParsedIniData; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/response.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/response.d.ts new file mode 100644 index 00000000..3d8a45a2 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/response.d.ts @@ -0,0 +1,40 @@ +/** + * @public + */ +export interface ResponseMetadata { + /** + * The status code of the last HTTP response received for this operation. + */ + httpStatusCode?: number; + /** + * A unique identifier for the last request sent for this operation. Often + * requested by AWS service teams to aid in debugging. + */ + requestId?: string; + /** + * A secondary identifier for the last request sent. Used for debugging. + */ + extendedRequestId?: string; + /** + * A tertiary identifier for the last request sent. Used for debugging. + */ + cfId?: string; + /** + * The number of times this operation was attempted. + */ + attempts?: number; + /** + * The total amount of time (in milliseconds) that was spent waiting between + * retry attempts. + */ + totalRetryDelay?: number; +} +/** + * @public + */ +export interface MetadataBearer { + /** + * Metadata pertaining to this request. + */ + $metadata: ResponseMetadata; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/retry.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/retry.d.ts new file mode 100644 index 00000000..8436c9a7 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/retry.d.ts @@ -0,0 +1,133 @@ +import { SdkError } from "./shapes"; +/** + * @public + */ +export type RetryErrorType = +/** + * This is a connection level error such as a socket timeout, socket connect + * error, tls negotiation timeout etc... + * Typically these should never be applied for non-idempotent request types + * since in this scenario, it's impossible to know whether the operation had + * a side effect on the server. + */ +"TRANSIENT" +/** + * This is an error where the server explicitly told the client to back off, + * such as a 429 or 503 Http error. + */ + | "THROTTLING" +/** + * This is a server error that isn't explicitly throttling but is considered + * by the client to be something that should be retried. + */ + | "SERVER_ERROR" +/** + * Doesn't count against any budgets. This could be something like a 401 + * challenge in Http. + */ + | "CLIENT_ERROR"; +/** + * @public + */ +export interface RetryErrorInfo { + /** + * The error thrown during the initial request, if available. + */ + error?: SdkError; + errorType: RetryErrorType; + /** + * Protocol hint. This could come from Http's 'retry-after' header or + * something from MQTT or any other protocol that has the ability to convey + * retry info from a peer. + * + * The Date after which a retry should be attempted. + */ + retryAfterHint?: Date; +} +/** + * @public + */ +export interface RetryBackoffStrategy { + /** + * @returns the number of milliseconds to wait before retrying an action. + */ + computeNextBackoffDelay(retryAttempt: number): number; +} +/** + * @public + */ +export interface StandardRetryBackoffStrategy extends RetryBackoffStrategy { + /** + * Sets the delayBase used to compute backoff delays. + * @param delayBase - + */ + setDelayBase(delayBase: number): void; +} +/** + * @public + */ +export interface RetryStrategyOptions { + backoffStrategy: RetryBackoffStrategy; + maxRetriesBase: number; +} +/** + * @public + */ +export interface RetryToken { + /** + * @returns the current count of retry. + */ + getRetryCount(): number; + /** + * @returns the number of milliseconds to wait before retrying an action. + */ + getRetryDelay(): number; +} +/** + * @public + */ +export interface StandardRetryToken extends RetryToken { + /** + * @returns the cost of the last retry attempt. + */ + getRetryCost(): number | undefined; +} +/** + * @public + */ +export interface RetryStrategyV2 { + /** + * Called before any retries (for the first call to the operation). It either + * returns a retry token or an error upon the failure to acquire a token prior. + * + * tokenScope is arbitrary and out of scope for this component. However, + * adding it here offers us a lot of future flexibility for outage detection. + * For example, it could be "us-east-1" on a shared retry strategy, or + * "us-west-2-c:dynamodb". + */ + acquireInitialRetryToken(retryTokenScope: string): Promise; + /** + * After a failed operation call, this function is invoked to refresh the + * retryToken returned by acquireInitialRetryToken(). This function can + * either choose to allow another retry and send a new or updated token, + * or reject the retry attempt and report the error either in an exception + * or returning an error. + */ + refreshRetryTokenForRetry(tokenToRenew: RetryToken, errorInfo: RetryErrorInfo): Promise; + /** + * Upon successful completion of the operation, this function is called + * to record that the operation was successful. + */ + recordSuccess(token: RetryToken): void; +} +/** + * @public + */ +export type ExponentialBackoffJitterType = "DEFAULT" | "NONE" | "FULL" | "DECORRELATED"; +/** + * @public + */ +export interface ExponentialBackoffStrategyOptions { + jitterType: ExponentialBackoffJitterType; + backoffScaleValue?: number; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/serde.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/serde.d.ts new file mode 100644 index 00000000..d2d7ea97 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/serde.d.ts @@ -0,0 +1,112 @@ +import { Endpoint } from "./http"; +import { RequestHandler } from "./transfer"; +import { Decoder, Encoder, Provider } from "./util"; +/** + * @public + * + * Interface for object requires an Endpoint set. + */ +export interface EndpointBearer { + endpoint: Provider; +} +/** + * @public + */ +export interface StreamCollector { + /** + * A function that converts a stream into an array of bytes. + * + * @param stream - The low-level native stream from browser or Nodejs runtime + */ + (stream: any): Promise; +} +/** + * @public + * + * Request and Response serde util functions and settings for AWS services + */ +export interface SerdeContext extends SerdeFunctions, EndpointBearer { + requestHandler: RequestHandler; + disableHostPrefix: boolean; +} +/** + * @public + * + * Serde functions from the client config. + */ +export interface SerdeFunctions { + base64Encoder: Encoder; + base64Decoder: Decoder; + utf8Encoder: Encoder; + utf8Decoder: Decoder; + streamCollector: StreamCollector; +} +/** + * @public + */ +export interface RequestSerializer { + /** + * Converts the provided `input` into a request object + * + * @param input - The user input to serialize. + * + * @param context - Context containing runtime-specific util functions. + */ + (input: any, context: Context): Promise; +} +/** + * @public + */ +export interface ResponseDeserializer { + /** + * Converts the output of an operation into JavaScript types. + * + * @param output - The HTTP response received from the service + * + * @param context - context containing runtime-specific util functions. + */ + (output: ResponseType, context: Context): Promise; +} +/** + * The interface contains mix-in utility functions to transfer the runtime-specific + * stream implementation to specified format. Each stream can ONLY be transformed + * once. + * @public + */ +export interface SdkStreamMixin { + transformToByteArray: () => Promise; + transformToString: (encoding?: string) => Promise; + transformToWebStream: () => ReadableStream; +} +/** + * @public + * + * The type describing a runtime-specific stream implementation with mix-in + * utility functions. + */ +export type SdkStream = BaseStream & SdkStreamMixin; +/** + * @public + * + * Indicates that the member of type T with + * key StreamKey have been extended + * with the SdkStreamMixin helper methods. + */ +export type WithSdkStreamMixin = { + [key in keyof T]: key extends StreamKey ? SdkStream : T[key]; +}; +/** + * Interface for internal function to inject stream utility functions + * implementation + * + * @internal + */ +export interface SdkStreamMixinInjector { + (stream: unknown): SdkStreamMixin; +} +/** + * @internal + */ +export interface SdkStreamSerdeContext { + sdkStreamMixin: SdkStreamMixinInjector; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/shapes.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/shapes.d.ts new file mode 100644 index 00000000..a81cbf1b --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/shapes.d.ts @@ -0,0 +1,82 @@ +import { HttpResponse } from "./http"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * A document type represents an untyped JSON-like value. + * + * Not all protocols support document types, and the serialization format of a + * document type is protocol specific. All JSON protocols SHOULD support + * document types and they SHOULD serialize document types inline as normal + * JSON values. + */ +export type DocumentType = null | boolean | number | string | DocumentType[] | { + [prop: string]: DocumentType; +}; +/** + * @public + * + * A structure shape with the error trait. + * https://smithy.io/2.0/spec/behavior-traits.html#smithy-api-retryable-trait + */ +export interface RetryableTrait { + /** + * Indicates that the error is a retryable throttling error. + */ + readonly throttling?: boolean; +} +/** + * @public + * + * Type that is implemented by all Smithy shapes marked with the + * error trait. + * @deprecated + */ +export interface SmithyException { + /** + * The shape ID name of the exception. + */ + readonly name: string; + /** + * Whether the client or server are at fault. + */ + readonly $fault: "client" | "server"; + /** + * The service that encountered the exception. + */ + readonly $service?: string; + /** + * Indicates that an error MAY be retried by the client. + */ + readonly $retryable?: RetryableTrait; + /** + * Reference to low-level HTTP response object. + */ + readonly $response?: HttpResponse; +} +/** + * @public + * + * @deprecated See {@link https://aws.amazon.com/blogs/developer/service-error-handling-modular-aws-sdk-js/} + * + * This type should not be used in your application. + * Users of the AWS SDK for JavaScript v3 service clients should prefer to + * use the specific Exception classes corresponding to each operation. + * These can be found as code in the deserializer for the operation's Command class, + * or as declarations in the service model file in codegen/sdk-codegen/aws-models. + * + * If no exceptions are enumerated by a particular Command operation, + * the base exception for the service should be used. Each client exports + * a base ServiceException prefixed with the service name. + */ +export type SdkError = Error & Partial & Partial & { + $metadata?: Partial["$metadata"] & { + /** + * If present, will have value of true and indicates that the error resulted in a + * correction of the clock skew, a.k.a. config.systemClockOffset. + * This is specific to AWS SDK and sigv4. + */ + readonly clockSkewCorrected?: true; + }; + cause?: Error; +}; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/signature.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/signature.d.ts new file mode 100644 index 00000000..bbaecde5 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/signature.d.ts @@ -0,0 +1,155 @@ +import { Message } from "./eventStream"; +import { HttpRequest } from "./http"; +/** + * @public + * + * A `Date` object, a unix (epoch) timestamp in seconds, or a string that can be + * understood by the JavaScript `Date` constructor. + */ +export type DateInput = number | string | Date; +/** + * @public + */ +export interface SigningArguments { + /** + * The date and time to be used as signature metadata. This value should be + * a Date object, a unix (epoch) timestamp, or a string that can be + * understood by the JavaScript `Date` constructor.If not supplied, the + * value returned by `new Date()` will be used. + */ + signingDate?: DateInput; + /** + * The service signing name. It will override the service name of the signer + * in current invocation + */ + signingService?: string; + /** + * The region name to sign the request. It will override the signing region of the + * signer in current invocation + */ + signingRegion?: string; +} +/** + * @public + */ +export interface RequestSigningArguments extends SigningArguments { + /** + * A set of strings whose members represents headers that cannot be signed. + * All headers in the provided request will have their names converted to + * lower case and then checked for existence in the unsignableHeaders set. + */ + unsignableHeaders?: Set; + /** + * A set of strings whose members represents headers that should be signed. + * Any values passed here will override those provided via unsignableHeaders, + * allowing them to be signed. + * + * All headers in the provided request will have their names converted to + * lower case before signing. + */ + signableHeaders?: Set; +} +/** + * @public + */ +export interface RequestPresigningArguments extends RequestSigningArguments { + /** + * The number of seconds before the presigned URL expires + */ + expiresIn?: number; + /** + * A set of strings whose representing headers that should not be hoisted + * to presigned request's query string. If not supplied, the presigner + * moves all the AWS-specific headers (starting with `x-amz-`) to the request + * query string. If supplied, these headers remain in the presigned request's + * header. + * All headers in the provided request will have their names converted to + * lower case and then checked for existence in the unhoistableHeaders set. + */ + unhoistableHeaders?: Set; + /** + * This overrides any headers with the same name(s) set by unhoistableHeaders. + * These headers will be hoisted into the query string and signed. + */ + hoistableHeaders?: Set; +} +/** + * @public + */ +export interface EventSigningArguments extends SigningArguments { + priorSignature: string; +} +/** + * @public + */ +export interface RequestPresigner { + /** + * Signs a request for future use. + * + * The request will be valid until either the provided `expiration` time has + * passed or the underlying credentials have expired. + * + * @param requestToSign - The request that should be signed. + * @param options - Additional signing options. + */ + presign(requestToSign: HttpRequest, options?: RequestPresigningArguments): Promise; +} +/** + * @public + * + * An object that signs request objects with AWS credentials using one of the + * AWS authentication protocols. + */ +export interface RequestSigner { + /** + * Sign the provided request for immediate dispatch. + */ + sign(requestToSign: HttpRequest, options?: RequestSigningArguments): Promise; +} +/** + * @public + */ +export interface StringSigner { + /** + * Sign the provided `stringToSign` for use outside of the context of + * request signing. Typical uses include signed policy generation. + */ + sign(stringToSign: string, options?: SigningArguments): Promise; +} +/** + * @public + */ +export interface FormattedEvent { + headers: Uint8Array; + payload: Uint8Array; +} +/** + * @public + */ +export interface EventSigner { + /** + * Sign the individual event of the event stream. + */ + sign(event: FormattedEvent, options: EventSigningArguments): Promise; +} +/** + * @public + */ +export interface SignableMessage { + message: Message; + priorSignature: string; +} +/** + * @public + */ +export interface SignedMessage { + message: Message; + signature: string; +} +/** + * @public + */ +export interface MessageSigner { + signMessage(message: SignableMessage, args: SigningArguments): Promise; + sign(event: SignableMessage, options: SigningArguments): Promise; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/stream.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/stream.d.ts new file mode 100644 index 00000000..1e2b85d7 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/stream.d.ts @@ -0,0 +1,22 @@ +import { ChecksumConstructor } from "./checksum"; +import { HashConstructor, StreamHasher } from "./crypto"; +import { BodyLengthCalculator, Encoder } from "./util"; +/** + * @public + */ +export interface GetAwsChunkedEncodingStreamOptions { + base64Encoder?: Encoder; + bodyLengthChecker: BodyLengthCalculator; + checksumAlgorithmFn?: ChecksumConstructor | HashConstructor; + checksumLocationName?: string; + streamHasher?: StreamHasher; +} +/** + * @public + * + * A function that returns Readable Stream which follows aws-chunked encoding stream. + * It optionally adds checksum if options are provided. + */ +export interface GetAwsChunkedEncodingStream { + (readableStream: StreamType, options: GetAwsChunkedEncodingStreamOptions): StreamType; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-common-types.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-common-types.d.ts new file mode 100644 index 00000000..27088db2 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-common-types.d.ts @@ -0,0 +1,33 @@ +/// +import { Readable } from "stream"; +import { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * This is the union representing the modeled blob type with streaming trait + * in a generic format that does not relate to HTTP input or output payloads. + * + * Note: the non-streaming blob type is represented by Uint8Array, but because + * the streaming blob type is always in the request/response paylod, it has + * historically been handled with different types. + * + * @see https://smithy.io/2.0/spec/simple-types.html#blob + * + * For compatibility with its historical representation, it must contain at least + * Readble (Node.js), Blob (browser), and ReadableStream (browser). + * + * @see StreamingPayloadInputTypes for FAQ about mixing types from multiple environments. + */ +export type StreamingBlobTypes = NodeJsRuntimeStreamingBlobTypes | BrowserRuntimeStreamingBlobTypes; +/** + * @public + * + * Node.js streaming blob type. + */ +export type NodeJsRuntimeStreamingBlobTypes = Readable; +/** + * @public + * + * Browser streaming blob types. + */ +export type BrowserRuntimeStreamingBlobTypes = ReadableStreamOptionalType | BlobOptionalType; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-input-types.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-input-types.d.ts new file mode 100644 index 00000000..1a86dea8 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-input-types.d.ts @@ -0,0 +1,61 @@ +/// +import { Readable } from "stream"; +import { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * This union represents a superset of the compatible types you + * can use for streaming payload inputs. + * + * FAQ: + * Why does the type union mix mutually exclusive runtime types, namely + * Node.js and browser types? + * + * There are several reasons: + * 1. For backwards compatibility. + * 2. As a convenient compromise solution so that users in either environment may use the types + * without customization. + * 3. The SDK does not have static type information about the exact implementation + * of the HTTP RequestHandler being used in your client(s) (e.g. fetch, XHR, node:http, or node:http2), + * given that it is chosen at runtime. There are multiple possible request handlers + * in both the Node.js and browser runtime environments. + * + * Rather than restricting the type to a known common format (Uint8Array, for example) + * which doesn't include a universal streaming format in the currently supported Node.js versions, + * the type declaration is widened to multiple possible formats. + * It is up to the user to ultimately select a compatible format with the + * runtime and HTTP handler implementation they are using. + * + * Usage: + * The typical solution we expect users to have is to manually narrow the + * type when needed, picking the appropriate one out of the union according to the + * runtime environment and specific request handler. + * There is also the type utility "NodeJsClient", "BrowserClient" and more + * exported from this package. These can be applied at the client level + * to pre-narrow these streaming payload blobs. For usage see the readme.md + * in the root of the \@smithy/types NPM package. + */ +export type StreamingBlobPayloadInputTypes = NodeJsRuntimeStreamingBlobPayloadInputTypes | BrowserRuntimeStreamingBlobPayloadInputTypes; +/** + * @public + * + * Streaming payload input types in the Node.js environment. + * These are derived from the types compatible with the request body used by node:http. + * + * Note: not all types are signable by the standard SignatureV4 signer when + * used as the request body. For example, in Node.js a Readable stream + * is not signable by the default signer. + * They are included in the union because it may be intended in some cases, + * but the expected types are primarily string, Uint8Array, and Buffer. + * + * Additional details may be found in the internal + * function "getPayloadHash" in the SignatureV4 module. + */ +export type NodeJsRuntimeStreamingBlobPayloadInputTypes = string | Uint8Array | Buffer | Readable; +/** + * @public + * + * Streaming payload input types in the browser environment. + * These are derived from the types compatible with fetch's Request.body. + */ +export type BrowserRuntimeStreamingBlobPayloadInputTypes = string | Uint8Array | ReadableStreamOptionalType | BlobOptionalType; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-output-types.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-output-types.d.ts new file mode 100644 index 00000000..e344a46a --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-output-types.d.ts @@ -0,0 +1,52 @@ +/// +import { IncomingMessage } from "http"; +import { Readable } from "stream"; +import { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +import { SdkStream } from "../serde"; +/** + * @public + * + * This union represents a superset of the types you may receive + * in streaming payload outputs. + * + * @see StreamingPayloadInputTypes for FAQ about mixing types from multiple environments. + * + * To highlight the upstream docs about the SdkStream mixin: + * + * The interface contains mix-in (via Object.assign) methods to transform the runtime-specific + * stream implementation to specified format. Each stream can ONLY be transformed + * once. + * + * The available methods are described on the SdkStream type via SdkStreamMixin. + */ +export type StreamingBlobPayloadOutputTypes = NodeJsRuntimeStreamingBlobPayloadOutputTypes | BrowserRuntimeStreamingBlobPayloadOutputTypes; +/** + * @public + * + * Streaming payload output types in the Node.js environment. + * + * This is by default the IncomingMessage type from node:http responses when + * using the default node-http-handler in Node.js environments. + * + * It can be other Readable types like node:http2's ClientHttp2Stream + * such as when using the node-http2-handler. + * + * The SdkStreamMixin adds methods on this type to help transform (collect) it to + * other formats. + */ +export type NodeJsRuntimeStreamingBlobPayloadOutputTypes = SdkStream; +/** + * @public + * + * Streaming payload output types in the browser environment. + * + * This is by default fetch's Response.body type (ReadableStream) when using + * the default fetch-http-handler in browser-like environments. + * + * It may be a Blob, such as when using the XMLHttpRequest handler + * and receiving an arraybuffer response body. + * + * The SdkStreamMixin adds methods on this type to help transform (collect) it to + * other formats. + */ +export type BrowserRuntimeStreamingBlobPayloadOutputTypes = SdkStream; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/transfer.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/transfer.d.ts new file mode 100644 index 00000000..f37ddb7c --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/transfer.d.ts @@ -0,0 +1,41 @@ +/** + * @public + */ +export type RequestHandlerOutput = { + response: ResponseType; +}; +/** + * @public + */ +export interface RequestHandler { + /** + * metadata contains information of a handler. For example + * 'h2' refers this handler is for handling HTTP/2 requests, + * whereas 'h1' refers handling HTTP1 requests + */ + metadata?: RequestHandlerMetadata; + destroy?: () => void; + handle: (request: RequestType, handlerOptions?: HandlerOptions) => Promise>; +} +/** + * @public + */ +export interface RequestHandlerMetadata { + handlerProtocol: RequestHandlerProtocol | string; +} +/** + * @public + * Values from ALPN Protocol IDs. + * @see https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids + */ +export declare enum RequestHandlerProtocol { + HTTP_0_9 = "http/0.9", + HTTP_1_0 = "http/1.0", + TDS_8_0 = "tds/8.0" +} +/** + * @public + */ +export interface RequestContext { + destination: URL; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/transform/client-method-transforms.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/transform/client-method-transforms.d.ts new file mode 100644 index 00000000..f1aecf39 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/transform/client-method-transforms.d.ts @@ -0,0 +1,26 @@ +import { CommandIO } from "../command"; +import { MetadataBearer } from "../response"; +import { StreamingBlobPayloadOutputTypes } from "../streaming-payload/streaming-blob-payload-output-types"; +import { Transform } from "./type-transform"; +/** + * @internal + * + * Narrowed version of InvokeFunction used in Client::send. + */ +export interface NarrowedInvokeFunction { + (command: CommandIO, options?: HttpHandlerOptions): Promise>; + (command: CommandIO, cb: (err: unknown, data?: Transform) => void): void; + (command: CommandIO, options: HttpHandlerOptions, cb: (err: unknown, data?: Transform) => void): void; + (command: CommandIO, options?: HttpHandlerOptions, cb?: (err: unknown, data?: Transform) => void): Promise> | void; +} +/** + * @internal + * + * Narrowed version of InvokeMethod used in aggregated Client methods. + */ +export interface NarrowedInvokeMethod { + (input: InputType, options?: HttpHandlerOptions): Promise>; + (input: InputType, cb: (err: unknown, data?: Transform) => void): void; + (input: InputType, options: HttpHandlerOptions, cb: (err: unknown, data?: Transform) => void): void; + (input: InputType, options?: HttpHandlerOptions, cb?: (err: unknown, data?: OutputType) => void): Promise> | void; +} diff --git a/node_modules/@smithy/types/dist-types/ts3.4/transform/client-payload-blob-type-narrow.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/transform/client-payload-blob-type-narrow.d.ts new file mode 100644 index 00000000..e9516e23 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/transform/client-payload-blob-type-narrow.d.ts @@ -0,0 +1,82 @@ +/// +import { IncomingMessage } from "http"; +import { ClientHttp2Stream } from "http2"; +import { InvokeMethod } from "../client"; +import { GetOutputType } from "../command"; +import { HttpHandlerOptions } from "../http"; +import { SdkStream } from "../serde"; +import { BrowserRuntimeStreamingBlobPayloadInputTypes, NodeJsRuntimeStreamingBlobPayloadInputTypes, StreamingBlobPayloadInputTypes } from "../streaming-payload/streaming-blob-payload-input-types"; +import { StreamingBlobPayloadOutputTypes } from "../streaming-payload/streaming-blob-payload-output-types"; +import { NarrowedInvokeMethod } from "./client-method-transforms"; +import { Transform } from "./type-transform"; +/** + * @public + * + * Creates a type with a given client type that narrows payload blob output + * types to SdkStream. + * + * This can be used for clients with the NodeHttpHandler requestHandler, + * the default in Node.js when not using HTTP2. + * + * Usage example: + * ```typescript + * const client = new YourClient({}) as NodeJsClient; + * ``` + */ +export type NodeJsClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * Variant of NodeJsClient for node:http2. + */ +export type NodeJsHttp2Client = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * Creates a type with a given client type that narrows payload blob output + * types to SdkStream. + * + * This can be used for clients with the FetchHttpHandler requestHandler, + * which is the default in browser environments. + * + * Usage example: + * ```typescript + * const client = new YourClient({}) as BrowserClient; + * ``` + */ +export type BrowserClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * Variant of BrowserClient for XMLHttpRequest. + */ +export type BrowserXhrClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * @deprecated use NarrowPayloadBlobTypes. + * + * Narrow a given Client's blob payload outputs to the given type T. + */ +export type NarrowPayloadBlobOutputType = { + [key in keyof ClientType]: [ + ClientType[key] + ] extends [ + InvokeMethod + ] ? NarrowedInvokeMethod : ClientType[key]; +} & { + send(command: Command, options?: any): Promise, StreamingBlobPayloadOutputTypes | undefined, T>>; +}; +/** + * @public + * + * Narrow a Client's blob payload input and output types to I and O. + */ +export type NarrowPayloadBlobTypes = { + [key in keyof ClientType]: [ + ClientType[key] + ] extends [ + InvokeMethod + ] ? NarrowedInvokeMethod, FunctionOutputTypes> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise, StreamingBlobPayloadOutputTypes | undefined, O>>; +}; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/transform/exact.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/transform/exact.d.ts new file mode 100644 index 00000000..3a812df3 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/transform/exact.d.ts @@ -0,0 +1,14 @@ +/** + * @internal + * + * Checks that A and B extend each other. + */ +export type Exact = [ + A +] extends [ + B +] ? ([ + B +] extends [ + A +] ? true : false) : false; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/transform/no-undefined.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/transform/no-undefined.d.ts new file mode 100644 index 00000000..6a7f6d85 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/transform/no-undefined.d.ts @@ -0,0 +1,88 @@ +import { InvokeMethod, InvokeMethodOptionalArgs } from "../client"; +import { GetOutputType } from "../command"; +import { DocumentType } from "../shapes"; +/** + * @public + * + * This type is intended as a type helper for generated clients. + * When initializing client, cast it to this type by passing + * the client constructor type as the type parameter. + * + * It will then recursively remove "undefined" as a union type from all + * input and output shapes' members. Note, this does not affect + * any member that is optional (?) such as outputs with no required members. + * + * @example + * ```ts + * const client = new Client({}) as AssertiveClient; + * ``` + */ +export type AssertiveClient = NarrowClientIOTypes; +/** + * @public + * + * This is similar to AssertiveClient but additionally changes all + * output types to (recursive) Required so as to bypass all output nullability guards. + */ +export type UncheckedClient = UncheckedClientOutputTypes; +/** + * @internal + * + * Excludes undefined recursively. + */ +export type NoUndefined = T extends Function ? T : T extends DocumentType ? T : [ + T +] extends [ + object +] ? { + [key in keyof T]: NoUndefined; +} : Exclude; +/** + * @internal + * + * Excludes undefined and optional recursively. + */ +export type RecursiveRequired = T extends Function ? T : T extends DocumentType ? T : [ + T +] extends [ + object +] ? { + [key in keyof T]-?: RecursiveRequired; +} : Exclude; +/** + * @internal + * + * Removes undefined from unions. + */ +type NarrowClientIOTypes = { + [key in keyof ClientType]: [ + ClientType[key] + ] extends [ + InvokeMethodOptionalArgs + ] ? InvokeMethodOptionalArgs, NoUndefined> : [ + ClientType[key] + ] extends [ + InvokeMethod + ] ? InvokeMethod, NoUndefined> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise>>; +}; +/** + * @internal + * + * Removes undefined from unions and adds yolo output types. + */ +type UncheckedClientOutputTypes = { + [key in keyof ClientType]: [ + ClientType[key] + ] extends [ + InvokeMethodOptionalArgs + ] ? InvokeMethodOptionalArgs, RecursiveRequired> : [ + ClientType[key] + ] extends [ + InvokeMethod + ] ? InvokeMethod, RecursiveRequired> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise>>>; +}; +export {}; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/transform/type-transform.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/transform/type-transform.d.ts new file mode 100644 index 00000000..547303f7 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/transform/type-transform.d.ts @@ -0,0 +1,41 @@ +/** + * @public + * + * Transforms any members of the object T having type FromType + * to ToType. This applies only to exact type matches. + * + * This is for the case where FromType is a union and only those fields + * matching the same union should be transformed. + */ +export type Transform = RecursiveTransformExact; +/** + * @internal + * + * Returns ToType if T matches exactly with FromType. + */ +type TransformExact = [ + T +] extends [ + FromType +] ? ([ + FromType +] extends [ + T +] ? ToType : T) : T; +/** + * @internal + * + * Applies TransformExact to members of an object recursively. + */ +type RecursiveTransformExact = T extends Function ? T : T extends object ? { + [key in keyof T]: [ + T[key] + ] extends [ + FromType + ] ? [ + FromType + ] extends [ + T[key] + ] ? ToType : RecursiveTransformExact : RecursiveTransformExact; +} : TransformExact; +export {}; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/uri.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/uri.d.ts new file mode 100644 index 00000000..4e7adb41 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/uri.d.ts @@ -0,0 +1,17 @@ +import { QueryParameterBag } from "./http"; +/** + * @internal + * + * Represents the components parts of a Uniform Resource Identifier used to + * construct the target location of a Request. + */ +export type URI = { + protocol: string; + hostname: string; + port?: number; + path: string; + query?: QueryParameterBag; + username?: string; + password?: string; + fragment?: string; +}; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/util.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/util.d.ts new file mode 100644 index 00000000..7c700af4 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/util.d.ts @@ -0,0 +1,192 @@ +import { Endpoint } from "./http"; +import { FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * A generic which checks if Type1 is exactly same as Type2. + */ +export type Exact = [ + Type1 +] extends [ + Type2 +] ? ([ + Type2 +] extends [ + Type1 +] ? true : false) : false; +/** + * @public + * + * A function that, given a Uint8Array of bytes, can produce a string + * representation thereof. The function may optionally attempt to + * convert other input types to Uint8Array before encoding. + * + * @example An encoder function that converts bytes to hexadecimal + * representation would return `'hello'` when given + * `new Uint8Array([104, 101, 108, 108, 111])`. + */ +export interface Encoder { + /** + * Caution: the `any` type on the input is for backwards compatibility. + * Runtime support is limited to Uint8Array and string by default. + * + * You may choose to support more encoder input types if overriding the default + * implementations. + */ + (input: Uint8Array | string | any): string; +} +/** + * @public + * + * A function that, given a string, can derive the bytes represented by that + * string. + * + * @example A decoder function that converts bytes to hexadecimal + * representation would return `new Uint8Array([104, 101, 108, 108, 111])` when + * given the string `'hello'`. + */ +export interface Decoder { + (input: string): Uint8Array; +} +/** + * @public + * + * A function that, when invoked, returns a promise that will be fulfilled with + * a value of type T. + * + * @example A function that reads credentials from shared SDK configuration + * files, assuming roles and collecting MFA tokens as necessary. + */ +export interface Provider { + (): Promise; +} +/** + * @public + * + * A tuple that represents an API name and optional version + * of a library built using the AWS SDK. + */ +export type UserAgentPair = [ + /*name*/ string, + /*version*/ string +]; +/** + * @public + * + * User agent data that to be put into the request's user + * agent. + */ +export type UserAgent = UserAgentPair[]; +/** + * @public + * + * Parses a URL in string form into an Endpoint object. + */ +export interface UrlParser { + (url: string | URL): Endpoint; +} +/** + * @public + * + * A function that, when invoked, returns a promise that will be fulfilled with + * a value of type T. It memoizes the result from the previous invocation + * instead of calling the underlying resources every time. + * + * You can force the provider to refresh the memoized value by invoke the + * function with optional parameter hash with `forceRefresh` boolean key and + * value `true`. + * + * @example A function that reads credentials from IMDS service that could + * return expired credentials. The SDK will keep using the expired credentials + * until an unretryable service error requiring a force refresh of the + * credentials. + */ +export interface MemoizedProvider { + (options?: { + forceRefresh?: boolean; + }): Promise; +} +/** + * @public + * + * A function that, given a request body, determines the + * length of the body. This is used to determine the Content-Length + * that should be sent with a request. + * + * @example A function that reads a file stream and calculates + * the size of the file. + */ +export interface BodyLengthCalculator { + (body: any): number | undefined; +} +/** + * @public + * + * Object containing regionalization information of + * AWS services. + */ +export interface RegionInfo { + hostname: string; + partition: string; + path?: string; + signingService?: string; + signingRegion?: string; +} +/** + * @public + * + * Options to pass when calling {@link RegionInfoProvider} + */ +export interface RegionInfoProviderOptions { + /** + * Enables IPv6/IPv4 dualstack endpoint. + * @defaultValue false + */ + useDualstackEndpoint: boolean; + /** + * Enables FIPS compatible endpoints. + * @defaultValue false + */ + useFipsEndpoint: boolean; +} +/** + * @public + * + * Function returns designated service's regionalization + * information from given region. Each service client + * comes with its regionalization provider. it serves + * to provide the default values of related configurations + */ +export interface RegionInfoProvider { + (region: string, options?: RegionInfoProviderOptions): Promise; +} +/** + * @public + * + * Interface that specifies the retry behavior + */ +export interface RetryStrategy { + /** + * The retry mode describing how the retry strategy control the traffic flow. + */ + mode?: string; + /** + * the retry behavior the will invoke the next handler and handle the retry accordingly. + * This function should also update the $metadata from the response accordingly. + * @see {@link ResponseMetadata} + */ + retry: (next: FinalizeHandler, args: FinalizeHandlerArguments) => Promise>; +} +/** + * @public + * + * Indicates the parameter may be omitted if the parameter object T + * is equivalent to a Partial, i.e. all properties optional. + */ +export type OptionalParameter = Exact, T> extends true ? [ +] | [ + T +] : [ + T +]; diff --git a/node_modules/@smithy/types/dist-types/ts3.4/waiter.d.ts b/node_modules/@smithy/types/dist-types/ts3.4/waiter.d.ts new file mode 100644 index 00000000..2cc2fff6 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/ts3.4/waiter.d.ts @@ -0,0 +1,35 @@ +import { AbortController as DeprecatedAbortController } from "./abort"; +/** + * @public + */ +export interface WaiterConfiguration { + /** + * Required service client + */ + client: Client; + /** + * The amount of time in seconds a user is willing to wait for a waiter to complete. + */ + maxWaitTime: number; + /** + * @deprecated Use abortSignal + * Abort controller. Used for ending the waiter early. + */ + abortController?: AbortController | DeprecatedAbortController; + /** + * Abort Signal. Used for ending the waiter early. + */ + abortSignal?: AbortController["signal"] | DeprecatedAbortController["signal"]; + /** + * The minimum amount of time to delay between retries in seconds. This is the + * floor of the exponential backoff. This value defaults to service default + * if not specified. This value MUST be less than or equal to maxDelay and greater than 0. + */ + minDelay?: number; + /** + * The maximum amount of time to delay between retries in seconds. This is the + * ceiling of the exponential backoff. This value defaults to service default + * if not specified. If specified, this value MUST be greater than or equal to 1. + */ + maxDelay?: number; +} diff --git a/node_modules/@smithy/types/dist-types/uri.d.ts b/node_modules/@smithy/types/dist-types/uri.d.ts new file mode 100644 index 00000000..d7b874c5 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/uri.d.ts @@ -0,0 +1,17 @@ +import { QueryParameterBag } from "./http"; +/** + * @internal + * + * Represents the components parts of a Uniform Resource Identifier used to + * construct the target location of a Request. + */ +export type URI = { + protocol: string; + hostname: string; + port?: number; + path: string; + query?: QueryParameterBag; + username?: string; + password?: string; + fragment?: string; +}; diff --git a/node_modules/@smithy/types/dist-types/util.d.ts b/node_modules/@smithy/types/dist-types/util.d.ts new file mode 100644 index 00000000..b15045ca --- /dev/null +++ b/node_modules/@smithy/types/dist-types/util.d.ts @@ -0,0 +1,176 @@ +import { Endpoint } from "./http"; +import { FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * A generic which checks if Type1 is exactly same as Type2. + */ +export type Exact = [Type1] extends [Type2] ? ([Type2] extends [Type1] ? true : false) : false; +/** + * @public + * + * A function that, given a Uint8Array of bytes, can produce a string + * representation thereof. The function may optionally attempt to + * convert other input types to Uint8Array before encoding. + * + * @example An encoder function that converts bytes to hexadecimal + * representation would return `'hello'` when given + * `new Uint8Array([104, 101, 108, 108, 111])`. + */ +export interface Encoder { + /** + * Caution: the `any` type on the input is for backwards compatibility. + * Runtime support is limited to Uint8Array and string by default. + * + * You may choose to support more encoder input types if overriding the default + * implementations. + */ + (input: Uint8Array | string | any): string; +} +/** + * @public + * + * A function that, given a string, can derive the bytes represented by that + * string. + * + * @example A decoder function that converts bytes to hexadecimal + * representation would return `new Uint8Array([104, 101, 108, 108, 111])` when + * given the string `'hello'`. + */ +export interface Decoder { + (input: string): Uint8Array; +} +/** + * @public + * + * A function that, when invoked, returns a promise that will be fulfilled with + * a value of type T. + * + * @example A function that reads credentials from shared SDK configuration + * files, assuming roles and collecting MFA tokens as necessary. + */ +export interface Provider { + (): Promise; +} +/** + * @public + * + * A tuple that represents an API name and optional version + * of a library built using the AWS SDK. + */ +export type UserAgentPair = [name: string, version?: string]; +/** + * @public + * + * User agent data that to be put into the request's user + * agent. + */ +export type UserAgent = UserAgentPair[]; +/** + * @public + * + * Parses a URL in string form into an Endpoint object. + */ +export interface UrlParser { + (url: string | URL): Endpoint; +} +/** + * @public + * + * A function that, when invoked, returns a promise that will be fulfilled with + * a value of type T. It memoizes the result from the previous invocation + * instead of calling the underlying resources every time. + * + * You can force the provider to refresh the memoized value by invoke the + * function with optional parameter hash with `forceRefresh` boolean key and + * value `true`. + * + * @example A function that reads credentials from IMDS service that could + * return expired credentials. The SDK will keep using the expired credentials + * until an unretryable service error requiring a force refresh of the + * credentials. + */ +export interface MemoizedProvider { + (options?: { + forceRefresh?: boolean; + }): Promise; +} +/** + * @public + * + * A function that, given a request body, determines the + * length of the body. This is used to determine the Content-Length + * that should be sent with a request. + * + * @example A function that reads a file stream and calculates + * the size of the file. + */ +export interface BodyLengthCalculator { + (body: any): number | undefined; +} +/** + * @public + * + * Object containing regionalization information of + * AWS services. + */ +export interface RegionInfo { + hostname: string; + partition: string; + path?: string; + signingService?: string; + signingRegion?: string; +} +/** + * @public + * + * Options to pass when calling {@link RegionInfoProvider} + */ +export interface RegionInfoProviderOptions { + /** + * Enables IPv6/IPv4 dualstack endpoint. + * @defaultValue false + */ + useDualstackEndpoint: boolean; + /** + * Enables FIPS compatible endpoints. + * @defaultValue false + */ + useFipsEndpoint: boolean; +} +/** + * @public + * + * Function returns designated service's regionalization + * information from given region. Each service client + * comes with its regionalization provider. it serves + * to provide the default values of related configurations + */ +export interface RegionInfoProvider { + (region: string, options?: RegionInfoProviderOptions): Promise; +} +/** + * @public + * + * Interface that specifies the retry behavior + */ +export interface RetryStrategy { + /** + * The retry mode describing how the retry strategy control the traffic flow. + */ + mode?: string; + /** + * the retry behavior the will invoke the next handler and handle the retry accordingly. + * This function should also update the $metadata from the response accordingly. + * @see {@link ResponseMetadata} + */ + retry: (next: FinalizeHandler, args: FinalizeHandlerArguments) => Promise>; +} +/** + * @public + * + * Indicates the parameter may be omitted if the parameter object T + * is equivalent to a Partial, i.e. all properties optional. + */ +export type OptionalParameter = Exact, T> extends true ? [] | [T] : [T]; diff --git a/node_modules/@smithy/types/dist-types/waiter.d.ts b/node_modules/@smithy/types/dist-types/waiter.d.ts new file mode 100644 index 00000000..59418322 --- /dev/null +++ b/node_modules/@smithy/types/dist-types/waiter.d.ts @@ -0,0 +1,35 @@ +import { AbortController as DeprecatedAbortController } from "./abort"; +/** + * @public + */ +export interface WaiterConfiguration { + /** + * Required service client + */ + client: Client; + /** + * The amount of time in seconds a user is willing to wait for a waiter to complete. + */ + maxWaitTime: number; + /** + * @deprecated Use abortSignal + * Abort controller. Used for ending the waiter early. + */ + abortController?: AbortController | DeprecatedAbortController; + /** + * Abort Signal. Used for ending the waiter early. + */ + abortSignal?: AbortController["signal"] | DeprecatedAbortController["signal"]; + /** + * The minimum amount of time to delay between retries in seconds. This is the + * floor of the exponential backoff. This value defaults to service default + * if not specified. This value MUST be less than or equal to maxDelay and greater than 0. + */ + minDelay?: number; + /** + * The maximum amount of time to delay between retries in seconds. This is the + * ceiling of the exponential backoff. This value defaults to service default + * if not specified. If specified, this value MUST be greater than or equal to 1. + */ + maxDelay?: number; +} diff --git a/node_modules/@smithy/types/package.json b/node_modules/@smithy/types/package.json new file mode 100644 index 00000000..87c5ad0f --- /dev/null +++ b/node_modules/@smithy/types/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/types", + "version": "4.2.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline types", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4 && node scripts/downlevel", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:tsc -p tsconfig.test.json", + "extract:docs": "api-extractor run --local" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS Smithy Team", + "email": "", + "url": "https://smithy.io" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<=4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/types", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/types" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/url-parser/LICENSE b/node_modules/@smithy/url-parser/LICENSE new file mode 100644 index 00000000..dd65ae06 --- /dev/null +++ b/node_modules/@smithy/url-parser/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@smithy/url-parser/README.md b/node_modules/@smithy/url-parser/README.md new file mode 100644 index 00000000..0d8d61ef --- /dev/null +++ b/node_modules/@smithy/url-parser/README.md @@ -0,0 +1,10 @@ +# @smithy/url-parser + +[![NPM version](https://img.shields.io/npm/v/@smithy/url-parser/latest.svg)](https://www.npmjs.com/package/@smithy/url-parser) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/url-parser.svg)](https://www.npmjs.com/package/@smithy/url-parser) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/url-parser/dist-cjs/index.js b/node_modules/@smithy/url-parser/dist-cjs/index.js new file mode 100644 index 00000000..ab81787c --- /dev/null +++ b/node_modules/@smithy/url-parser/dist-cjs/index.js @@ -0,0 +1,49 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + parseUrl: () => parseUrl +}); +module.exports = __toCommonJS(src_exports); +var import_querystring_parser = require("@smithy/querystring-parser"); +var parseUrl = /* @__PURE__ */ __name((url) => { + if (typeof url === "string") { + return parseUrl(new URL(url)); + } + const { hostname, pathname, port, protocol, search } = url; + let query; + if (search) { + query = (0, import_querystring_parser.parseQueryString)(search); + } + return { + hostname, + port: port ? parseInt(port) : void 0, + protocol, + path: pathname, + query + }; +}, "parseUrl"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + parseUrl +}); + diff --git a/node_modules/@smithy/url-parser/dist-es/index.js b/node_modules/@smithy/url-parser/dist-es/index.js new file mode 100644 index 00000000..811f8bf8 --- /dev/null +++ b/node_modules/@smithy/url-parser/dist-es/index.js @@ -0,0 +1,18 @@ +import { parseQueryString } from "@smithy/querystring-parser"; +export const parseUrl = (url) => { + if (typeof url === "string") { + return parseUrl(new URL(url)); + } + const { hostname, pathname, port, protocol, search } = url; + let query; + if (search) { + query = parseQueryString(search); + } + return { + hostname, + port: port ? parseInt(port) : undefined, + protocol, + path: pathname, + query, + }; +}; diff --git a/node_modules/@smithy/url-parser/dist-types/index.d.ts b/node_modules/@smithy/url-parser/dist-types/index.d.ts new file mode 100644 index 00000000..b0d91c9b --- /dev/null +++ b/node_modules/@smithy/url-parser/dist-types/index.d.ts @@ -0,0 +1,5 @@ +import { UrlParser } from "@smithy/types"; +/** + * @internal + */ +export declare const parseUrl: UrlParser; diff --git a/node_modules/@smithy/url-parser/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/url-parser/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..d6f0ec5f --- /dev/null +++ b/node_modules/@smithy/url-parser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +import { UrlParser } from "@smithy/types"; +/** + * @internal + */ +export declare const parseUrl: UrlParser; diff --git a/node_modules/@smithy/url-parser/package.json b/node_modules/@smithy/url-parser/package.json new file mode 100644 index 00000000..10aebb85 --- /dev/null +++ b/node_modules/@smithy/url-parser/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/url-parser", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline url-parser", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/querystring-parser": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/url-parser", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/url-parser" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/util-base64/LICENSE b/node_modules/@smithy/util-base64/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/util-base64/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/util-base64/README.md b/node_modules/@smithy/util-base64/README.md new file mode 100644 index 00000000..c9b6c87f --- /dev/null +++ b/node_modules/@smithy/util-base64/README.md @@ -0,0 +1,4 @@ +# @smithy/util-base64 + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-base64/latest.svg)](https://www.npmjs.com/package/@smithy/util-base64) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-base64.svg)](https://www.npmjs.com/package/@smithy/util-base64) diff --git a/node_modules/@smithy/util-base64/dist-cjs/constants.browser.js b/node_modules/@smithy/util-base64/dist-cjs/constants.browser.js new file mode 100644 index 00000000..d35d09fd --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-cjs/constants.browser.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.maxLetterValue = exports.bitsPerByte = exports.bitsPerLetter = exports.alphabetByValue = exports.alphabetByEncoding = void 0; +const alphabetByEncoding = {}; +exports.alphabetByEncoding = alphabetByEncoding; +const alphabetByValue = new Array(64); +exports.alphabetByValue = alphabetByValue; +for (let i = 0, start = "A".charCodeAt(0), limit = "Z".charCodeAt(0); i + start <= limit; i++) { + const char = String.fromCharCode(i + start); + alphabetByEncoding[char] = i; + alphabetByValue[i] = char; +} +for (let i = 0, start = "a".charCodeAt(0), limit = "z".charCodeAt(0); i + start <= limit; i++) { + const char = String.fromCharCode(i + start); + const index = i + 26; + alphabetByEncoding[char] = index; + alphabetByValue[index] = char; +} +for (let i = 0; i < 10; i++) { + alphabetByEncoding[i.toString(10)] = i + 52; + const char = i.toString(10); + const index = i + 52; + alphabetByEncoding[char] = index; + alphabetByValue[index] = char; +} +alphabetByEncoding["+"] = 62; +alphabetByValue[62] = "+"; +alphabetByEncoding["/"] = 63; +alphabetByValue[63] = "/"; +const bitsPerLetter = 6; +exports.bitsPerLetter = bitsPerLetter; +const bitsPerByte = 8; +exports.bitsPerByte = bitsPerByte; +const maxLetterValue = 0b111111; +exports.maxLetterValue = maxLetterValue; diff --git a/node_modules/@smithy/util-base64/dist-cjs/fromBase64.browser.js b/node_modules/@smithy/util-base64/dist-cjs/fromBase64.browser.js new file mode 100644 index 00000000..a5baffd0 --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-cjs/fromBase64.browser.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromBase64 = void 0; +const constants_browser_1 = require("./constants.browser"); +const fromBase64 = (input) => { + let totalByteLength = (input.length / 4) * 3; + if (input.slice(-2) === "==") { + totalByteLength -= 2; + } + else if (input.slice(-1) === "=") { + totalByteLength--; + } + const out = new ArrayBuffer(totalByteLength); + const dataView = new DataView(out); + for (let i = 0; i < input.length; i += 4) { + let bits = 0; + let bitLength = 0; + for (let j = i, limit = i + 3; j <= limit; j++) { + if (input[j] !== "=") { + if (!(input[j] in constants_browser_1.alphabetByEncoding)) { + throw new TypeError(`Invalid character ${input[j]} in base64 string.`); + } + bits |= constants_browser_1.alphabetByEncoding[input[j]] << ((limit - j) * constants_browser_1.bitsPerLetter); + bitLength += constants_browser_1.bitsPerLetter; + } + else { + bits >>= constants_browser_1.bitsPerLetter; + } + } + const chunkOffset = (i / 4) * 3; + bits >>= bitLength % constants_browser_1.bitsPerByte; + const byteLength = Math.floor(bitLength / constants_browser_1.bitsPerByte); + for (let k = 0; k < byteLength; k++) { + const offset = (byteLength - k - 1) * constants_browser_1.bitsPerByte; + dataView.setUint8(chunkOffset + k, (bits & (255 << offset)) >> offset); + } + } + return new Uint8Array(out); +}; +exports.fromBase64 = fromBase64; diff --git a/node_modules/@smithy/util-base64/dist-cjs/fromBase64.js b/node_modules/@smithy/util-base64/dist-cjs/fromBase64.js new file mode 100644 index 00000000..b06a7b87 --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-cjs/fromBase64.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromBase64 = void 0; +const util_buffer_from_1 = require("@smithy/util-buffer-from"); +const BASE64_REGEX = /^[A-Za-z0-9+/]*={0,2}$/; +const fromBase64 = (input) => { + if ((input.length * 3) % 4 !== 0) { + throw new TypeError(`Incorrect padding on base64 string.`); + } + if (!BASE64_REGEX.exec(input)) { + throw new TypeError(`Invalid base64 string.`); + } + const buffer = (0, util_buffer_from_1.fromString)(input, "base64"); + return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); +}; +exports.fromBase64 = fromBase64; diff --git a/node_modules/@smithy/util-base64/dist-cjs/index.js b/node_modules/@smithy/util-base64/dist-cjs/index.js new file mode 100644 index 00000000..02848d02 --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-cjs/index.js @@ -0,0 +1,27 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +module.exports = __toCommonJS(src_exports); +__reExport(src_exports, require("././fromBase64"), module.exports); +__reExport(src_exports, require("././toBase64"), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromBase64, + toBase64 +}); + diff --git a/node_modules/@smithy/util-base64/dist-cjs/toBase64.browser.js b/node_modules/@smithy/util-base64/dist-cjs/toBase64.browser.js new file mode 100644 index 00000000..e294f3fb --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-cjs/toBase64.browser.js @@ -0,0 +1,39 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toBase64 = void 0; +const util_utf8_1 = require("@smithy/util-utf8"); +const constants_browser_1 = require("./constants.browser"); +function toBase64(_input) { + let input; + if (typeof _input === "string") { + input = (0, util_utf8_1.fromUtf8)(_input); + } + else { + input = _input; + } + const isArrayLike = typeof input === "object" && typeof input.length === "number"; + const isUint8Array = typeof input === "object" && + typeof input.byteOffset === "number" && + typeof input.byteLength === "number"; + if (!isArrayLike && !isUint8Array) { + throw new Error("@smithy/util-base64: toBase64 encoder function only accepts string | Uint8Array."); + } + let str = ""; + for (let i = 0; i < input.length; i += 3) { + let bits = 0; + let bitLength = 0; + for (let j = i, limit = Math.min(i + 3, input.length); j < limit; j++) { + bits |= input[j] << ((limit - j - 1) * constants_browser_1.bitsPerByte); + bitLength += constants_browser_1.bitsPerByte; + } + const bitClusterCount = Math.ceil(bitLength / constants_browser_1.bitsPerLetter); + bits <<= bitClusterCount * constants_browser_1.bitsPerLetter - bitLength; + for (let k = 1; k <= bitClusterCount; k++) { + const offset = (bitClusterCount - k) * constants_browser_1.bitsPerLetter; + str += constants_browser_1.alphabetByValue[(bits & (constants_browser_1.maxLetterValue << offset)) >> offset]; + } + str += "==".slice(0, 4 - bitClusterCount); + } + return str; +} +exports.toBase64 = toBase64; diff --git a/node_modules/@smithy/util-base64/dist-cjs/toBase64.js b/node_modules/@smithy/util-base64/dist-cjs/toBase64.js new file mode 100644 index 00000000..0590ce3f --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-cjs/toBase64.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toBase64 = void 0; +const util_buffer_from_1 = require("@smithy/util-buffer-from"); +const util_utf8_1 = require("@smithy/util-utf8"); +const toBase64 = (_input) => { + let input; + if (typeof _input === "string") { + input = (0, util_utf8_1.fromUtf8)(_input); + } + else { + input = _input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-base64: toBase64 encoder function only accepts string | Uint8Array."); + } + return (0, util_buffer_from_1.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("base64"); +}; +exports.toBase64 = toBase64; diff --git a/node_modules/@smithy/util-base64/dist-es/constants.browser.js b/node_modules/@smithy/util-base64/dist-es/constants.browser.js new file mode 100644 index 00000000..fd4df4dc --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-es/constants.browser.js @@ -0,0 +1,28 @@ +const alphabetByEncoding = {}; +const alphabetByValue = new Array(64); +for (let i = 0, start = "A".charCodeAt(0), limit = "Z".charCodeAt(0); i + start <= limit; i++) { + const char = String.fromCharCode(i + start); + alphabetByEncoding[char] = i; + alphabetByValue[i] = char; +} +for (let i = 0, start = "a".charCodeAt(0), limit = "z".charCodeAt(0); i + start <= limit; i++) { + const char = String.fromCharCode(i + start); + const index = i + 26; + alphabetByEncoding[char] = index; + alphabetByValue[index] = char; +} +for (let i = 0; i < 10; i++) { + alphabetByEncoding[i.toString(10)] = i + 52; + const char = i.toString(10); + const index = i + 52; + alphabetByEncoding[char] = index; + alphabetByValue[index] = char; +} +alphabetByEncoding["+"] = 62; +alphabetByValue[62] = "+"; +alphabetByEncoding["/"] = 63; +alphabetByValue[63] = "/"; +const bitsPerLetter = 6; +const bitsPerByte = 8; +const maxLetterValue = 0b111111; +export { alphabetByEncoding, alphabetByValue, bitsPerLetter, bitsPerByte, maxLetterValue }; diff --git a/node_modules/@smithy/util-base64/dist-es/fromBase64.browser.js b/node_modules/@smithy/util-base64/dist-es/fromBase64.browser.js new file mode 100644 index 00000000..c2c6a66d --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-es/fromBase64.browser.js @@ -0,0 +1,36 @@ +import { alphabetByEncoding, bitsPerByte, bitsPerLetter } from "./constants.browser"; +export const fromBase64 = (input) => { + let totalByteLength = (input.length / 4) * 3; + if (input.slice(-2) === "==") { + totalByteLength -= 2; + } + else if (input.slice(-1) === "=") { + totalByteLength--; + } + const out = new ArrayBuffer(totalByteLength); + const dataView = new DataView(out); + for (let i = 0; i < input.length; i += 4) { + let bits = 0; + let bitLength = 0; + for (let j = i, limit = i + 3; j <= limit; j++) { + if (input[j] !== "=") { + if (!(input[j] in alphabetByEncoding)) { + throw new TypeError(`Invalid character ${input[j]} in base64 string.`); + } + bits |= alphabetByEncoding[input[j]] << ((limit - j) * bitsPerLetter); + bitLength += bitsPerLetter; + } + else { + bits >>= bitsPerLetter; + } + } + const chunkOffset = (i / 4) * 3; + bits >>= bitLength % bitsPerByte; + const byteLength = Math.floor(bitLength / bitsPerByte); + for (let k = 0; k < byteLength; k++) { + const offset = (byteLength - k - 1) * bitsPerByte; + dataView.setUint8(chunkOffset + k, (bits & (255 << offset)) >> offset); + } + } + return new Uint8Array(out); +}; diff --git a/node_modules/@smithy/util-base64/dist-es/fromBase64.js b/node_modules/@smithy/util-base64/dist-es/fromBase64.js new file mode 100644 index 00000000..5197e93b --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-es/fromBase64.js @@ -0,0 +1,12 @@ +import { fromString } from "@smithy/util-buffer-from"; +const BASE64_REGEX = /^[A-Za-z0-9+/]*={0,2}$/; +export const fromBase64 = (input) => { + if ((input.length * 3) % 4 !== 0) { + throw new TypeError(`Incorrect padding on base64 string.`); + } + if (!BASE64_REGEX.exec(input)) { + throw new TypeError(`Invalid base64 string.`); + } + const buffer = fromString(input, "base64"); + return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); +}; diff --git a/node_modules/@smithy/util-base64/dist-es/index.js b/node_modules/@smithy/util-base64/dist-es/index.js new file mode 100644 index 00000000..594bd435 --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./fromBase64"; +export * from "./toBase64"; diff --git a/node_modules/@smithy/util-base64/dist-es/toBase64.browser.js b/node_modules/@smithy/util-base64/dist-es/toBase64.browser.js new file mode 100644 index 00000000..2a03a9d0 --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-es/toBase64.browser.js @@ -0,0 +1,35 @@ +import { fromUtf8 } from "@smithy/util-utf8"; +import { alphabetByValue, bitsPerByte, bitsPerLetter, maxLetterValue } from "./constants.browser"; +export function toBase64(_input) { + let input; + if (typeof _input === "string") { + input = fromUtf8(_input); + } + else { + input = _input; + } + const isArrayLike = typeof input === "object" && typeof input.length === "number"; + const isUint8Array = typeof input === "object" && + typeof input.byteOffset === "number" && + typeof input.byteLength === "number"; + if (!isArrayLike && !isUint8Array) { + throw new Error("@smithy/util-base64: toBase64 encoder function only accepts string | Uint8Array."); + } + let str = ""; + for (let i = 0; i < input.length; i += 3) { + let bits = 0; + let bitLength = 0; + for (let j = i, limit = Math.min(i + 3, input.length); j < limit; j++) { + bits |= input[j] << ((limit - j - 1) * bitsPerByte); + bitLength += bitsPerByte; + } + const bitClusterCount = Math.ceil(bitLength / bitsPerLetter); + bits <<= bitClusterCount * bitsPerLetter - bitLength; + for (let k = 1; k <= bitClusterCount; k++) { + const offset = (bitClusterCount - k) * bitsPerLetter; + str += alphabetByValue[(bits & (maxLetterValue << offset)) >> offset]; + } + str += "==".slice(0, 4 - bitClusterCount); + } + return str; +} diff --git a/node_modules/@smithy/util-base64/dist-es/toBase64.js b/node_modules/@smithy/util-base64/dist-es/toBase64.js new file mode 100644 index 00000000..61f03ce6 --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-es/toBase64.js @@ -0,0 +1,15 @@ +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +import { fromUtf8 } from "@smithy/util-utf8"; +export const toBase64 = (_input) => { + let input; + if (typeof _input === "string") { + input = fromUtf8(_input); + } + else { + input = _input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-base64: toBase64 encoder function only accepts string | Uint8Array."); + } + return fromArrayBuffer(input.buffer, input.byteOffset, input.byteLength).toString("base64"); +}; diff --git a/node_modules/@smithy/util-base64/dist-types/constants.browser.d.ts b/node_modules/@smithy/util-base64/dist-types/constants.browser.d.ts new file mode 100644 index 00000000..eb750ea1 --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-types/constants.browser.d.ts @@ -0,0 +1,6 @@ +declare const alphabetByEncoding: Record; +declare const alphabetByValue: Array; +declare const bitsPerLetter = 6; +declare const bitsPerByte = 8; +declare const maxLetterValue = 63; +export { alphabetByEncoding, alphabetByValue, bitsPerLetter, bitsPerByte, maxLetterValue }; diff --git a/node_modules/@smithy/util-base64/dist-types/fromBase64.browser.d.ts b/node_modules/@smithy/util-base64/dist-types/fromBase64.browser.d.ts new file mode 100644 index 00000000..6a640f14 --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-types/fromBase64.browser.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a base-64 encoded string to a Uint8Array of bytes. + * + * @param input The base-64 encoded string + * + * @see https://tools.ietf.org/html/rfc4648#section-4 + */ +export declare const fromBase64: (input: string) => Uint8Array; diff --git a/node_modules/@smithy/util-base64/dist-types/fromBase64.d.ts b/node_modules/@smithy/util-base64/dist-types/fromBase64.d.ts new file mode 100644 index 00000000..1878a891 --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-types/fromBase64.d.ts @@ -0,0 +1,7 @@ +/** + * Converts a base-64 encoded string to a Uint8Array of bytes using Node.JS's + * `buffer` module. + * + * @param input The base-64 encoded string + */ +export declare const fromBase64: (input: string) => Uint8Array; diff --git a/node_modules/@smithy/util-base64/dist-types/index.d.ts b/node_modules/@smithy/util-base64/dist-types/index.d.ts new file mode 100644 index 00000000..594bd435 --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fromBase64"; +export * from "./toBase64"; diff --git a/node_modules/@smithy/util-base64/dist-types/toBase64.browser.d.ts b/node_modules/@smithy/util-base64/dist-types/toBase64.browser.d.ts new file mode 100644 index 00000000..5f5615e8 --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-types/toBase64.browser.d.ts @@ -0,0 +1,9 @@ +/** + * Converts a Uint8Array of binary data or a utf-8 string to a base-64 encoded string. + * + * @param _input - the binary data or string to encode. + * @returns base64 string. + * + * @see https://tools.ietf.org/html/rfc4648#section-4 + */ +export declare function toBase64(_input: Uint8Array | string): string; diff --git a/node_modules/@smithy/util-base64/dist-types/toBase64.d.ts b/node_modules/@smithy/util-base64/dist-types/toBase64.d.ts new file mode 100644 index 00000000..96bd0edd --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-types/toBase64.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a Uint8Array of binary data or a utf-8 string to a base-64 encoded string using + * Node.JS's `buffer` module. + * + * @param _input - the binary data or string to encode. + * @returns base64 string. + */ +export declare const toBase64: (_input: Uint8Array | string) => string; diff --git a/node_modules/@smithy/util-base64/dist-types/ts3.4/constants.browser.d.ts b/node_modules/@smithy/util-base64/dist-types/ts3.4/constants.browser.d.ts new file mode 100644 index 00000000..61c36c81 --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-types/ts3.4/constants.browser.d.ts @@ -0,0 +1,6 @@ +declare const alphabetByEncoding: Record; +declare const alphabetByValue: Array; +declare const bitsPerLetter = 6; +declare const bitsPerByte = 8; +declare const maxLetterValue = 63; +export { alphabetByEncoding, alphabetByValue, bitsPerLetter, bitsPerByte, maxLetterValue }; diff --git a/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.browser.d.ts b/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.browser.d.ts new file mode 100644 index 00000000..3a500068 --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.browser.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a base-64 encoded string to a Uint8Array of bytes. + * + * @param input The base-64 encoded string + * + * @see https://tools.ietf.org/html/rfc4648#section-4 + */ +export declare const fromBase64: (input: string) => Uint8Array; diff --git a/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.d.ts b/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.d.ts new file mode 100644 index 00000000..f84c7c60 --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.d.ts @@ -0,0 +1,7 @@ +/** + * Converts a base-64 encoded string to a Uint8Array of bytes using Node.JS's + * `buffer` module. + * + * @param input The base-64 encoded string + */ +export declare const fromBase64: (input: string) => Uint8Array; diff --git a/node_modules/@smithy/util-base64/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/util-base64/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..c4e1d039 --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fromBase64"; +export * from "./toBase64"; diff --git a/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.browser.d.ts b/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.browser.d.ts new file mode 100644 index 00000000..260f6969 --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.browser.d.ts @@ -0,0 +1,9 @@ +/** + * Converts a Uint8Array of binary data or a utf-8 string to a base-64 encoded string. + * + * @param _input - the binary data or string to encode. + * @returns base64 string. + * + * @see https://tools.ietf.org/html/rfc4648#section-4 + */ +export declare function toBase64(_input: Uint8Array | string): string; diff --git a/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.d.ts b/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.d.ts new file mode 100644 index 00000000..7e8bb709 --- /dev/null +++ b/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a Uint8Array of binary data or a utf-8 string to a base-64 encoded string using + * Node.JS's `buffer` module. + * + * @param _input - the binary data or string to encode. + * @returns base64 string. + */ +export declare const toBase64: (_input: Uint8Array | string) => string; diff --git a/node_modules/@smithy/util-base64/package.json b/node_modules/@smithy/util-base64/package.json new file mode 100644 index 00000000..e122233a --- /dev/null +++ b/node_modules/@smithy/util-base64/package.json @@ -0,0 +1,73 @@ +{ + "name": "@smithy/util-base64", + "version": "4.0.0", + "description": "A Base64 <-> UInt8Array converter", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-base64", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/fromBase64": "./dist-es/fromBase64.browser", + "./dist-es/toBase64": "./dist-es/toBase64.browser" + }, + "react-native": { + "./dist-es/fromBase64": "./dist-es/fromBase64.browser", + "./dist-es/toBase64": "./dist-es/toBase64.browser", + "./dist-cjs/fromBase64": "./dist-cjs/fromBase64.browser", + "./dist-cjs/toBase64": "./dist-cjs/toBase64.browser" + }, + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-base64", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-base64" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/util-body-length-browser/LICENSE b/node_modules/@smithy/util-body-length-browser/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/util-body-length-browser/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/util-body-length-browser/README.md b/node_modules/@smithy/util-body-length-browser/README.md new file mode 100644 index 00000000..460d0929 --- /dev/null +++ b/node_modules/@smithy/util-body-length-browser/README.md @@ -0,0 +1,12 @@ +# @smithy/util-body-length-browser + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-body-length-browser/latest.svg)](https://www.npmjs.com/package/@smithy/util-body-length-browser) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-body-length-browser.svg)](https://www.npmjs.com/package/@smithy/util-body-length-browser) + +Determines the length of a request body in browsers + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/util-body-length-browser/dist-cjs/calculateBodyLength.js b/node_modules/@smithy/util-body-length-browser/dist-cjs/calculateBodyLength.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-body-length-browser/dist-cjs/calculateBodyLength.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-body-length-browser/dist-cjs/index.js b/node_modules/@smithy/util-body-length-browser/dist-cjs/index.js new file mode 100644 index 00000000..9e872bc3 --- /dev/null +++ b/node_modules/@smithy/util-body-length-browser/dist-cjs/index.js @@ -0,0 +1,57 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + calculateBodyLength: () => calculateBodyLength +}); +module.exports = __toCommonJS(src_exports); + +// src/calculateBodyLength.ts +var TEXT_ENCODER = typeof TextEncoder == "function" ? new TextEncoder() : null; +var calculateBodyLength = /* @__PURE__ */ __name((body) => { + if (typeof body === "string") { + if (TEXT_ENCODER) { + return TEXT_ENCODER.encode(body).byteLength; + } + let len = body.length; + for (let i = len - 1; i >= 0; i--) { + const code = body.charCodeAt(i); + if (code > 127 && code <= 2047) + len++; + else if (code > 2047 && code <= 65535) + len += 2; + if (code >= 56320 && code <= 57343) + i--; + } + return len; + } else if (typeof body.byteLength === "number") { + return body.byteLength; + } else if (typeof body.size === "number") { + return body.size; + } + throw new Error(`Body Length computation failed for ${body}`); +}, "calculateBodyLength"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + calculateBodyLength +}); + diff --git a/node_modules/@smithy/util-body-length-browser/dist-es/calculateBodyLength.js b/node_modules/@smithy/util-body-length-browser/dist-es/calculateBodyLength.js new file mode 100644 index 00000000..6b994ca2 --- /dev/null +++ b/node_modules/@smithy/util-body-length-browser/dist-es/calculateBodyLength.js @@ -0,0 +1,26 @@ +const TEXT_ENCODER = typeof TextEncoder == "function" ? new TextEncoder() : null; +export const calculateBodyLength = (body) => { + if (typeof body === "string") { + if (TEXT_ENCODER) { + return TEXT_ENCODER.encode(body).byteLength; + } + let len = body.length; + for (let i = len - 1; i >= 0; i--) { + const code = body.charCodeAt(i); + if (code > 0x7f && code <= 0x7ff) + len++; + else if (code > 0x7ff && code <= 0xffff) + len += 2; + if (code >= 0xdc00 && code <= 0xdfff) + i--; + } + return len; + } + else if (typeof body.byteLength === "number") { + return body.byteLength; + } + else if (typeof body.size === "number") { + return body.size; + } + throw new Error(`Body Length computation failed for ${body}`); +}; diff --git a/node_modules/@smithy/util-body-length-browser/dist-es/index.js b/node_modules/@smithy/util-body-length-browser/dist-es/index.js new file mode 100644 index 00000000..16ba478e --- /dev/null +++ b/node_modules/@smithy/util-body-length-browser/dist-es/index.js @@ -0,0 +1 @@ +export * from "./calculateBodyLength"; diff --git a/node_modules/@smithy/util-body-length-browser/dist-types/calculateBodyLength.d.ts b/node_modules/@smithy/util-body-length-browser/dist-types/calculateBodyLength.d.ts new file mode 100644 index 00000000..8e1bdb01 --- /dev/null +++ b/node_modules/@smithy/util-body-length-browser/dist-types/calculateBodyLength.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const calculateBodyLength: (body: any) => number | undefined; diff --git a/node_modules/@smithy/util-body-length-browser/dist-types/index.d.ts b/node_modules/@smithy/util-body-length-browser/dist-types/index.d.ts new file mode 100644 index 00000000..7b4a0d7f --- /dev/null +++ b/node_modules/@smithy/util-body-length-browser/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./calculateBodyLength"; diff --git a/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/calculateBodyLength.d.ts b/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/calculateBodyLength.d.ts new file mode 100644 index 00000000..32605368 --- /dev/null +++ b/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/calculateBodyLength.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const calculateBodyLength: (body: any) => number | undefined; diff --git a/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..ab6cb834 --- /dev/null +++ b/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./calculateBodyLength"; diff --git a/node_modules/@smithy/util-body-length-browser/package.json b/node_modules/@smithy/util-body-length-browser/package.json new file mode 100644 index 00000000..b571489e --- /dev/null +++ b/node_modules/@smithy/util-body-length-browser/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/util-body-length-browser", + "description": "Determines the length of a request body in browsers", + "version": "4.0.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-body-length-browser", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-body-length-browser", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-body-length-browser" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/util-body-length-node/LICENSE b/node_modules/@smithy/util-body-length-node/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/util-body-length-node/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/util-body-length-node/README.md b/node_modules/@smithy/util-body-length-node/README.md new file mode 100644 index 00000000..9a80efe5 --- /dev/null +++ b/node_modules/@smithy/util-body-length-node/README.md @@ -0,0 +1,12 @@ +# @smithy/util-body-length-node + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-body-length-node/latest.svg)](https://www.npmjs.com/package/@smithy/util-body-length-node) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-body-length-node.svg)](https://www.npmjs.com/package/@smithy/util-body-length-node) + +Determines the length of a request body in node.js + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/util-body-length-node/dist-cjs/calculateBodyLength.js b/node_modules/@smithy/util-body-length-node/dist-cjs/calculateBodyLength.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-body-length-node/dist-cjs/calculateBodyLength.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-body-length-node/dist-cjs/index.js b/node_modules/@smithy/util-body-length-node/dist-cjs/index.js new file mode 100644 index 00000000..1ecdc793 --- /dev/null +++ b/node_modules/@smithy/util-body-length-node/dist-cjs/index.js @@ -0,0 +1,53 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + calculateBodyLength: () => calculateBodyLength +}); +module.exports = __toCommonJS(src_exports); + +// src/calculateBodyLength.ts +var import_fs = require("fs"); +var calculateBodyLength = /* @__PURE__ */ __name((body) => { + if (!body) { + return 0; + } + if (typeof body === "string") { + return Buffer.byteLength(body); + } else if (typeof body.byteLength === "number") { + return body.byteLength; + } else if (typeof body.size === "number") { + return body.size; + } else if (typeof body.start === "number" && typeof body.end === "number") { + return body.end + 1 - body.start; + } else if (typeof body.path === "string" || Buffer.isBuffer(body.path)) { + return (0, import_fs.lstatSync)(body.path).size; + } else if (typeof body.fd === "number") { + return (0, import_fs.fstatSync)(body.fd).size; + } + throw new Error(`Body Length computation failed for ${body}`); +}, "calculateBodyLength"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + calculateBodyLength +}); + diff --git a/node_modules/@smithy/util-body-length-node/dist-es/calculateBodyLength.js b/node_modules/@smithy/util-body-length-node/dist-es/calculateBodyLength.js new file mode 100644 index 00000000..857cff59 --- /dev/null +++ b/node_modules/@smithy/util-body-length-node/dist-es/calculateBodyLength.js @@ -0,0 +1,25 @@ +import { fstatSync, lstatSync } from "fs"; +export const calculateBodyLength = (body) => { + if (!body) { + return 0; + } + if (typeof body === "string") { + return Buffer.byteLength(body); + } + else if (typeof body.byteLength === "number") { + return body.byteLength; + } + else if (typeof body.size === "number") { + return body.size; + } + else if (typeof body.start === "number" && typeof body.end === "number") { + return body.end + 1 - body.start; + } + else if (typeof body.path === "string" || Buffer.isBuffer(body.path)) { + return lstatSync(body.path).size; + } + else if (typeof body.fd === "number") { + return fstatSync(body.fd).size; + } + throw new Error(`Body Length computation failed for ${body}`); +}; diff --git a/node_modules/@smithy/util-body-length-node/dist-es/index.js b/node_modules/@smithy/util-body-length-node/dist-es/index.js new file mode 100644 index 00000000..16ba478e --- /dev/null +++ b/node_modules/@smithy/util-body-length-node/dist-es/index.js @@ -0,0 +1 @@ +export * from "./calculateBodyLength"; diff --git a/node_modules/@smithy/util-body-length-node/dist-types/calculateBodyLength.d.ts b/node_modules/@smithy/util-body-length-node/dist-types/calculateBodyLength.d.ts new file mode 100644 index 00000000..8e1bdb01 --- /dev/null +++ b/node_modules/@smithy/util-body-length-node/dist-types/calculateBodyLength.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const calculateBodyLength: (body: any) => number | undefined; diff --git a/node_modules/@smithy/util-body-length-node/dist-types/index.d.ts b/node_modules/@smithy/util-body-length-node/dist-types/index.d.ts new file mode 100644 index 00000000..7b4a0d7f --- /dev/null +++ b/node_modules/@smithy/util-body-length-node/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./calculateBodyLength"; diff --git a/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/calculateBodyLength.d.ts b/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/calculateBodyLength.d.ts new file mode 100644 index 00000000..32605368 --- /dev/null +++ b/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/calculateBodyLength.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const calculateBodyLength: (body: any) => number | undefined; diff --git a/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..ab6cb834 --- /dev/null +++ b/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./calculateBodyLength"; diff --git a/node_modules/@smithy/util-body-length-node/package.json b/node_modules/@smithy/util-body-length-node/package.json new file mode 100644 index 00000000..25b0f7ac --- /dev/null +++ b/node_modules/@smithy/util-body-length-node/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/util-body-length-node", + "description": "Determines the length of a request body in node.js", + "version": "4.0.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-body-length-node", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-body-length-node", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-body-length-node" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/util-buffer-from/LICENSE b/node_modules/@smithy/util-buffer-from/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/util-buffer-from/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/util-buffer-from/README.md b/node_modules/@smithy/util-buffer-from/README.md new file mode 100644 index 00000000..c896b04a --- /dev/null +++ b/node_modules/@smithy/util-buffer-from/README.md @@ -0,0 +1,10 @@ +# @smithy/util-buffer-from + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-buffer-from/latest.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-buffer-from.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/util-buffer-from/dist-cjs/index.js b/node_modules/@smithy/util-buffer-from/dist-cjs/index.js new file mode 100644 index 00000000..c6738d94 --- /dev/null +++ b/node_modules/@smithy/util-buffer-from/dist-cjs/index.js @@ -0,0 +1,47 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromArrayBuffer: () => fromArrayBuffer, + fromString: () => fromString +}); +module.exports = __toCommonJS(src_exports); +var import_is_array_buffer = require("@smithy/is-array-buffer"); +var import_buffer = require("buffer"); +var fromArrayBuffer = /* @__PURE__ */ __name((input, offset = 0, length = input.byteLength - offset) => { + if (!(0, import_is_array_buffer.isArrayBuffer)(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return import_buffer.Buffer.from(input, offset, length); +}, "fromArrayBuffer"); +var fromString = /* @__PURE__ */ __name((input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? import_buffer.Buffer.from(input, encoding) : import_buffer.Buffer.from(input); +}, "fromString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromArrayBuffer, + fromString +}); + diff --git a/node_modules/@smithy/util-buffer-from/dist-es/index.js b/node_modules/@smithy/util-buffer-from/dist-es/index.js new file mode 100644 index 00000000..718f8315 --- /dev/null +++ b/node_modules/@smithy/util-buffer-from/dist-es/index.js @@ -0,0 +1,14 @@ +import { isArrayBuffer } from "@smithy/is-array-buffer"; +import { Buffer } from "buffer"; +export const fromArrayBuffer = (input, offset = 0, length = input.byteLength - offset) => { + if (!isArrayBuffer(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return Buffer.from(input, offset, length); +}; +export const fromString = (input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? Buffer.from(input, encoding) : Buffer.from(input); +}; diff --git a/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts b/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts new file mode 100644 index 00000000..a523134a --- /dev/null +++ b/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..f9173f74 --- /dev/null +++ b/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/node_modules/@smithy/util-buffer-from/package.json b/node_modules/@smithy/util-buffer-from/package.json new file mode 100644 index 00000000..08698997 --- /dev/null +++ b/node_modules/@smithy/util-buffer-from/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/util-buffer-from", + "version": "4.0.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-buffer-from", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-buffer-from", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-buffer-from" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/util-config-provider/LICENSE b/node_modules/@smithy/util-config-provider/LICENSE new file mode 100644 index 00000000..74d4e5c3 --- /dev/null +++ b/node_modules/@smithy/util-config-provider/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/util-config-provider/README.md b/node_modules/@smithy/util-config-provider/README.md new file mode 100644 index 00000000..5b0341d0 --- /dev/null +++ b/node_modules/@smithy/util-config-provider/README.md @@ -0,0 +1,4 @@ +# @smithy/util-config-provider + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-config-provider/latest.svg)](https://www.npmjs.com/package/@smithy/util-config-provider) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-config-provider.svg)](https://www.npmjs.com/package/@smithy/util-config-provider) diff --git a/node_modules/@smithy/util-config-provider/dist-cjs/booleanSelector.js b/node_modules/@smithy/util-config-provider/dist-cjs/booleanSelector.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-config-provider/dist-cjs/booleanSelector.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-config-provider/dist-cjs/index.js b/node_modules/@smithy/util-config-provider/dist-cjs/index.js new file mode 100644 index 00000000..210d40d7 --- /dev/null +++ b/node_modules/@smithy/util-config-provider/dist-cjs/index.js @@ -0,0 +1,64 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + SelectorType: () => SelectorType, + booleanSelector: () => booleanSelector, + numberSelector: () => numberSelector +}); +module.exports = __toCommonJS(src_exports); + +// src/booleanSelector.ts +var booleanSelector = /* @__PURE__ */ __name((obj, key, type) => { + if (!(key in obj)) + return void 0; + if (obj[key] === "true") + return true; + if (obj[key] === "false") + return false; + throw new Error(`Cannot load ${type} "${key}". Expected "true" or "false", got ${obj[key]}.`); +}, "booleanSelector"); + +// src/numberSelector.ts +var numberSelector = /* @__PURE__ */ __name((obj, key, type) => { + if (!(key in obj)) + return void 0; + const numberValue = parseInt(obj[key], 10); + if (Number.isNaN(numberValue)) { + throw new TypeError(`Cannot load ${type} '${key}'. Expected number, got '${obj[key]}'.`); + } + return numberValue; +}, "numberSelector"); + +// src/types.ts +var SelectorType = /* @__PURE__ */ ((SelectorType2) => { + SelectorType2["ENV"] = "env"; + SelectorType2["CONFIG"] = "shared config entry"; + return SelectorType2; +})(SelectorType || {}); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + booleanSelector, + numberSelector, + SelectorType +}); + diff --git a/node_modules/@smithy/util-config-provider/dist-cjs/numberSelector.js b/node_modules/@smithy/util-config-provider/dist-cjs/numberSelector.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-config-provider/dist-cjs/numberSelector.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-config-provider/dist-cjs/types.js b/node_modules/@smithy/util-config-provider/dist-cjs/types.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-config-provider/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-config-provider/dist-es/booleanSelector.js b/node_modules/@smithy/util-config-provider/dist-es/booleanSelector.js new file mode 100644 index 00000000..6ba2261f --- /dev/null +++ b/node_modules/@smithy/util-config-provider/dist-es/booleanSelector.js @@ -0,0 +1,9 @@ +export const booleanSelector = (obj, key, type) => { + if (!(key in obj)) + return undefined; + if (obj[key] === "true") + return true; + if (obj[key] === "false") + return false; + throw new Error(`Cannot load ${type} "${key}". Expected "true" or "false", got ${obj[key]}.`); +}; diff --git a/node_modules/@smithy/util-config-provider/dist-es/index.js b/node_modules/@smithy/util-config-provider/dist-es/index.js new file mode 100644 index 00000000..a926de84 --- /dev/null +++ b/node_modules/@smithy/util-config-provider/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./booleanSelector"; +export * from "./numberSelector"; +export * from "./types"; diff --git a/node_modules/@smithy/util-config-provider/dist-es/numberSelector.js b/node_modules/@smithy/util-config-provider/dist-es/numberSelector.js new file mode 100644 index 00000000..81cfe404 --- /dev/null +++ b/node_modules/@smithy/util-config-provider/dist-es/numberSelector.js @@ -0,0 +1,9 @@ +export const numberSelector = (obj, key, type) => { + if (!(key in obj)) + return undefined; + const numberValue = parseInt(obj[key], 10); + if (Number.isNaN(numberValue)) { + throw new TypeError(`Cannot load ${type} '${key}'. Expected number, got '${obj[key]}'.`); + } + return numberValue; +}; diff --git a/node_modules/@smithy/util-config-provider/dist-es/types.js b/node_modules/@smithy/util-config-provider/dist-es/types.js new file mode 100644 index 00000000..5b10fb55 --- /dev/null +++ b/node_modules/@smithy/util-config-provider/dist-es/types.js @@ -0,0 +1,5 @@ +export var SelectorType; +(function (SelectorType) { + SelectorType["ENV"] = "env"; + SelectorType["CONFIG"] = "shared config entry"; +})(SelectorType || (SelectorType = {})); diff --git a/node_modules/@smithy/util-config-provider/dist-types/booleanSelector.d.ts b/node_modules/@smithy/util-config-provider/dist-types/booleanSelector.d.ts new file mode 100644 index 00000000..d4977cbc --- /dev/null +++ b/node_modules/@smithy/util-config-provider/dist-types/booleanSelector.d.ts @@ -0,0 +1,10 @@ +import { SelectorType } from "./types"; +/** + * Returns boolean value true/false for string value "true"/"false", + * if the string is defined in obj[key] + * Returns undefined, if obj[key] is not defined. + * Throws error for all other cases. + * + * @internal + */ +export declare const booleanSelector: (obj: Record, key: string, type: SelectorType) => boolean | undefined; diff --git a/node_modules/@smithy/util-config-provider/dist-types/index.d.ts b/node_modules/@smithy/util-config-provider/dist-types/index.d.ts new file mode 100644 index 00000000..a926de84 --- /dev/null +++ b/node_modules/@smithy/util-config-provider/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./booleanSelector"; +export * from "./numberSelector"; +export * from "./types"; diff --git a/node_modules/@smithy/util-config-provider/dist-types/numberSelector.d.ts b/node_modules/@smithy/util-config-provider/dist-types/numberSelector.d.ts new file mode 100644 index 00000000..9e0cbf90 --- /dev/null +++ b/node_modules/@smithy/util-config-provider/dist-types/numberSelector.d.ts @@ -0,0 +1,9 @@ +import { SelectorType } from "./types"; +/** + * Returns number value for string value, if the string is defined in obj[key]. + * Returns undefined, if obj[key] is not defined. + * Throws error for all other cases. + * + * @internal + */ +export declare const numberSelector: (obj: Record, key: string, type: SelectorType) => number | undefined; diff --git a/node_modules/@smithy/util-config-provider/dist-types/ts3.4/booleanSelector.d.ts b/node_modules/@smithy/util-config-provider/dist-types/ts3.4/booleanSelector.d.ts new file mode 100644 index 00000000..0b854520 --- /dev/null +++ b/node_modules/@smithy/util-config-provider/dist-types/ts3.4/booleanSelector.d.ts @@ -0,0 +1,10 @@ +import { SelectorType } from "./types"; +/** + * Returns boolean value true/false for string value "true"/"false", + * if the string is defined in obj[key] + * Returns undefined, if obj[key] is not defined. + * Throws error for all other cases. + * + * @internal + */ +export declare const booleanSelector: (obj: Record, key: string, type: SelectorType) => boolean | undefined; diff --git a/node_modules/@smithy/util-config-provider/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/util-config-provider/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..02fd81d2 --- /dev/null +++ b/node_modules/@smithy/util-config-provider/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./booleanSelector"; +export * from "./numberSelector"; +export * from "./types"; diff --git a/node_modules/@smithy/util-config-provider/dist-types/ts3.4/numberSelector.d.ts b/node_modules/@smithy/util-config-provider/dist-types/ts3.4/numberSelector.d.ts new file mode 100644 index 00000000..3a346719 --- /dev/null +++ b/node_modules/@smithy/util-config-provider/dist-types/ts3.4/numberSelector.d.ts @@ -0,0 +1,9 @@ +import { SelectorType } from "./types"; +/** + * Returns number value for string value, if the string is defined in obj[key]. + * Returns undefined, if obj[key] is not defined. + * Throws error for all other cases. + * + * @internal + */ +export declare const numberSelector: (obj: Record, key: string, type: SelectorType) => number | undefined; diff --git a/node_modules/@smithy/util-config-provider/dist-types/ts3.4/types.d.ts b/node_modules/@smithy/util-config-provider/dist-types/ts3.4/types.d.ts new file mode 100644 index 00000000..e01c128e --- /dev/null +++ b/node_modules/@smithy/util-config-provider/dist-types/ts3.4/types.d.ts @@ -0,0 +1,4 @@ +export declare enum SelectorType { + ENV = "env", + CONFIG = "shared config entry" +} diff --git a/node_modules/@smithy/util-config-provider/dist-types/types.d.ts b/node_modules/@smithy/util-config-provider/dist-types/types.d.ts new file mode 100644 index 00000000..caa65d7f --- /dev/null +++ b/node_modules/@smithy/util-config-provider/dist-types/types.d.ts @@ -0,0 +1,4 @@ +export declare enum SelectorType { + ENV = "env", + CONFIG = "shared config entry" +} diff --git a/node_modules/@smithy/util-config-provider/package.json b/node_modules/@smithy/util-config-provider/package.json new file mode 100644 index 00000000..50796be6 --- /dev/null +++ b/node_modules/@smithy/util-config-provider/package.json @@ -0,0 +1,62 @@ +{ + "name": "@smithy/util-config-provider", + "version": "4.0.0", + "description": "Utilities package for configuration providers", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-config-provider", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-config-provider", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-config-provider" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/util-defaults-mode-browser/LICENSE b/node_modules/@smithy/util-defaults-mode-browser/LICENSE new file mode 100644 index 00000000..dd65ae06 --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-browser/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@smithy/util-defaults-mode-browser/README.md b/node_modules/@smithy/util-defaults-mode-browser/README.md new file mode 100644 index 00000000..f2f1cc07 --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-browser/README.md @@ -0,0 +1,10 @@ +# @smithy/util-defaults-mode-browser + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-defaults-mode-browser/latest.svg)](https://www.npmjs.com/package/@smithy/util-defaults-mode-browser) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-defaults-mode-browser.svg)](https://www.npmjs.com/package/@smithy/util-defaults-mode-browser) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/constants.js b/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/constants.js new file mode 100644 index 00000000..37335062 --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/constants.js @@ -0,0 +1,4 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DEFAULTS_MODE_OPTIONS = void 0; +exports.DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; diff --git a/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/index.js b/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/index.js new file mode 100644 index 00000000..4624ef1a --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/index.js @@ -0,0 +1,25 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +module.exports = __toCommonJS(src_exports); +__reExport(src_exports, require("././resolveDefaultsModeConfig"), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + resolveDefaultsModeConfig +}); + diff --git a/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.js b/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.js new file mode 100644 index 00000000..f23368cb --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.js @@ -0,0 +1,33 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveDefaultsModeConfig = void 0; +const tslib_1 = require("tslib"); +const property_provider_1 = require("@smithy/property-provider"); +const bowser_1 = tslib_1.__importDefault(require("bowser")); +const constants_1 = require("./constants"); +const resolveDefaultsModeConfig = ({ defaultsMode, } = {}) => (0, property_provider_1.memoize)(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode === null || mode === void 0 ? void 0 : mode.toLowerCase()) { + case "auto": + return Promise.resolve(isMobileBrowser() ? "mobile" : "standard"); + case "mobile": + case "in-region": + case "cross-region": + case "standard": + case "legacy": + return Promise.resolve(mode === null || mode === void 0 ? void 0 : mode.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${constants_1.DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); +exports.resolveDefaultsModeConfig = resolveDefaultsModeConfig; +const isMobileBrowser = () => { + var _a, _b; + const parsedUA = typeof window !== "undefined" && ((_a = window === null || window === void 0 ? void 0 : window.navigator) === null || _a === void 0 ? void 0 : _a.userAgent) + ? bowser_1.default.parse(window.navigator.userAgent) + : undefined; + const platform = (_b = parsedUA === null || parsedUA === void 0 ? void 0 : parsedUA.platform) === null || _b === void 0 ? void 0 : _b.type; + return platform === "tablet" || platform === "mobile"; +}; diff --git a/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.native.js b/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.native.js new file mode 100644 index 00000000..fc6be33c --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.native.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveDefaultsModeConfig = void 0; +const property_provider_1 = require("@smithy/property-provider"); +const constants_1 = require("./constants"); +const resolveDefaultsModeConfig = ({ defaultsMode, } = {}) => (0, property_provider_1.memoize)(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode === null || mode === void 0 ? void 0 : mode.toLowerCase()) { + case "auto": + return Promise.resolve("mobile"); + case "mobile": + case "in-region": + case "cross-region": + case "standard": + case "legacy": + return Promise.resolve(mode === null || mode === void 0 ? void 0 : mode.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${constants_1.DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); +exports.resolveDefaultsModeConfig = resolveDefaultsModeConfig; diff --git a/node_modules/@smithy/util-defaults-mode-browser/dist-es/constants.js b/node_modules/@smithy/util-defaults-mode-browser/dist-es/constants.js new file mode 100644 index 00000000..d58e11f4 --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-browser/dist-es/constants.js @@ -0,0 +1 @@ +export const DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; diff --git a/node_modules/@smithy/util-defaults-mode-browser/dist-es/index.js b/node_modules/@smithy/util-defaults-mode-browser/dist-es/index.js new file mode 100644 index 00000000..05aa8183 --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-browser/dist-es/index.js @@ -0,0 +1 @@ +export * from "./resolveDefaultsModeConfig"; diff --git a/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.js b/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.js new file mode 100644 index 00000000..940ab631 --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.js @@ -0,0 +1,27 @@ +import { memoize } from "@smithy/property-provider"; +import bowser from "bowser"; +import { DEFAULTS_MODE_OPTIONS } from "./constants"; +export const resolveDefaultsModeConfig = ({ defaultsMode, } = {}) => memoize(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode?.toLowerCase()) { + case "auto": + return Promise.resolve(isMobileBrowser() ? "mobile" : "standard"); + case "mobile": + case "in-region": + case "cross-region": + case "standard": + case "legacy": + return Promise.resolve(mode?.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); +const isMobileBrowser = () => { + const parsedUA = typeof window !== "undefined" && window?.navigator?.userAgent + ? bowser.parse(window.navigator.userAgent) + : undefined; + const platform = parsedUA?.platform?.type; + return platform === "tablet" || platform === "mobile"; +}; diff --git a/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.native.js b/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.native.js new file mode 100644 index 00000000..31641910 --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.native.js @@ -0,0 +1,19 @@ +import { memoize } from "@smithy/property-provider"; +import { DEFAULTS_MODE_OPTIONS } from "./constants"; +export const resolveDefaultsModeConfig = ({ defaultsMode, } = {}) => memoize(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode?.toLowerCase()) { + case "auto": + return Promise.resolve("mobile"); + case "mobile": + case "in-region": + case "cross-region": + case "standard": + case "legacy": + return Promise.resolve(mode?.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); diff --git a/node_modules/@smithy/util-defaults-mode-browser/dist-types/constants.d.ts b/node_modules/@smithy/util-defaults-mode-browser/dist-types/constants.d.ts new file mode 100644 index 00000000..18dbe6cc --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-browser/dist-types/constants.d.ts @@ -0,0 +1,12 @@ +import type { DefaultsMode } from "@smithy/smithy-client"; +import type { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULTS_MODE_OPTIONS: string[]; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} diff --git a/node_modules/@smithy/util-defaults-mode-browser/dist-types/index.d.ts b/node_modules/@smithy/util-defaults-mode-browser/dist-types/index.d.ts new file mode 100644 index 00000000..003de26f --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-browser/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./resolveDefaultsModeConfig"; diff --git a/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.d.ts b/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.d.ts new file mode 100644 index 00000000..e4cc1b7f --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.d.ts @@ -0,0 +1,17 @@ +import type { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import type { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "mobile" if the app is running in a mobile browser, + * otherwise it resolves to "standard". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.native.d.ts b/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.native.d.ts new file mode 100644 index 00000000..6c48ad8b --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.native.d.ts @@ -0,0 +1,16 @@ +import type { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import type { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "mobile". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/constants.d.ts b/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/constants.d.ts new file mode 100644 index 00000000..fc886026 --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,12 @@ +import { DefaultsMode } from "@smithy/smithy-client"; +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULTS_MODE_OPTIONS: string[]; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} diff --git a/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..4ab48b45 --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./resolveDefaultsModeConfig"; diff --git a/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts b/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts new file mode 100644 index 00000000..d4684787 --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts @@ -0,0 +1,17 @@ +import { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "mobile" if the app is running in a mobile browser, + * otherwise it resolves to "standard". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.native.d.ts b/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.native.d.ts new file mode 100644 index 00000000..86fe4b7f --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.native.d.ts @@ -0,0 +1,16 @@ +import { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "mobile". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/node_modules/@smithy/util-defaults-mode-browser/package.json b/node_modules/@smithy/util-defaults-mode-browser/package.json new file mode 100644 index 00000000..954040bb --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-browser/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-defaults-mode-browser", + "version": "4.0.9", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-defaults-mode-browser", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "react-native": {}, + "browser": {}, + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-defaults-mode-node", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-defaults-mode-node" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/util-defaults-mode-node/LICENSE b/node_modules/@smithy/util-defaults-mode-node/LICENSE new file mode 100644 index 00000000..dd65ae06 --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-node/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@smithy/util-defaults-mode-node/README.md b/node_modules/@smithy/util-defaults-mode-node/README.md new file mode 100644 index 00000000..bfae0bd6 --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-node/README.md @@ -0,0 +1,10 @@ +# @smithy/util-defaults-mode-node + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-defaults-mode-node/latest.svg)](https://www.npmjs.com/package/@smithy/util-defaults-mode-node) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-defaults-mode-node.svg)](https://www.npmjs.com/package/@smithy/util-defaults-mode-node) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/util-defaults-mode-node/dist-cjs/constants.js b/node_modules/@smithy/util-defaults-mode-node/dist-cjs/constants.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-node/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-defaults-mode-node/dist-cjs/defaultsModeConfig.js b/node_modules/@smithy/util-defaults-mode-node/dist-cjs/defaultsModeConfig.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-node/dist-cjs/defaultsModeConfig.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-defaults-mode-node/dist-cjs/index.js b/node_modules/@smithy/util-defaults-mode-node/dist-cjs/index.js new file mode 100644 index 00000000..ddd06848 --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-node/dist-cjs/index.js @@ -0,0 +1,119 @@ +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + resolveDefaultsModeConfig: () => resolveDefaultsModeConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/resolveDefaultsModeConfig.ts +var import_config_resolver = require("@smithy/config-resolver"); +var import_node_config_provider = require("@smithy/node-config-provider"); +var import_property_provider = require("@smithy/property-provider"); + +// src/constants.ts +var AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +var AWS_REGION_ENV = "AWS_REGION"; +var AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +var ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +var DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; +var IMDS_REGION_PATH = "/latest/meta-data/placement/region"; + +// src/defaultsModeConfig.ts +var AWS_DEFAULTS_MODE_ENV = "AWS_DEFAULTS_MODE"; +var AWS_DEFAULTS_MODE_CONFIG = "defaults_mode"; +var NODE_DEFAULTS_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + return env[AWS_DEFAULTS_MODE_ENV]; + }, + configFileSelector: (profile) => { + return profile[AWS_DEFAULTS_MODE_CONFIG]; + }, + default: "legacy" +}; + +// src/resolveDefaultsModeConfig.ts +var resolveDefaultsModeConfig = /* @__PURE__ */ __name(({ + region = (0, import_node_config_provider.loadConfig)(import_config_resolver.NODE_REGION_CONFIG_OPTIONS), + defaultsMode = (0, import_node_config_provider.loadConfig)(NODE_DEFAULTS_MODE_CONFIG_OPTIONS) +} = {}) => (0, import_property_provider.memoize)(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode?.toLowerCase()) { + case "auto": + return resolveNodeDefaultsModeAuto(region); + case "in-region": + case "cross-region": + case "mobile": + case "standard": + case "legacy": + return Promise.resolve(mode?.toLocaleLowerCase()); + case void 0: + return Promise.resolve("legacy"); + default: + throw new Error( + `Invalid parameter for "defaultsMode", expect ${DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}` + ); + } +}), "resolveDefaultsModeConfig"); +var resolveNodeDefaultsModeAuto = /* @__PURE__ */ __name(async (clientRegion) => { + if (clientRegion) { + const resolvedRegion = typeof clientRegion === "function" ? await clientRegion() : clientRegion; + const inferredRegion = await inferPhysicalRegion(); + if (!inferredRegion) { + return "standard"; + } + if (resolvedRegion === inferredRegion) { + return "in-region"; + } else { + return "cross-region"; + } + } + return "standard"; +}, "resolveNodeDefaultsModeAuto"); +var inferPhysicalRegion = /* @__PURE__ */ __name(async () => { + if (process.env[AWS_EXECUTION_ENV] && (process.env[AWS_REGION_ENV] || process.env[AWS_DEFAULT_REGION_ENV])) { + return process.env[AWS_REGION_ENV] ?? process.env[AWS_DEFAULT_REGION_ENV]; + } + if (!process.env[ENV_IMDS_DISABLED]) { + try { + const { getInstanceMetadataEndpoint, httpRequest } = await Promise.resolve().then(() => __toESM(require("@smithy/credential-provider-imds"))); + const endpoint = await getInstanceMetadataEndpoint(); + return (await httpRequest({ ...endpoint, path: IMDS_REGION_PATH })).toString(); + } catch (e) { + } + } +}, "inferPhysicalRegion"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + resolveDefaultsModeConfig +}); + diff --git a/node_modules/@smithy/util-defaults-mode-node/dist-cjs/resolveDefaultsModeConfig.js b/node_modules/@smithy/util-defaults-mode-node/dist-cjs/resolveDefaultsModeConfig.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-node/dist-cjs/resolveDefaultsModeConfig.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-defaults-mode-node/dist-es/constants.js b/node_modules/@smithy/util-defaults-mode-node/dist-es/constants.js new file mode 100644 index 00000000..69361a3f --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-node/dist-es/constants.js @@ -0,0 +1,6 @@ +export const AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +export const AWS_REGION_ENV = "AWS_REGION"; +export const AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +export const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +export const DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; +export const IMDS_REGION_PATH = "/latest/meta-data/placement/region"; diff --git a/node_modules/@smithy/util-defaults-mode-node/dist-es/defaultsModeConfig.js b/node_modules/@smithy/util-defaults-mode-node/dist-es/defaultsModeConfig.js new file mode 100644 index 00000000..f43b5708 --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-node/dist-es/defaultsModeConfig.js @@ -0,0 +1,11 @@ +const AWS_DEFAULTS_MODE_ENV = "AWS_DEFAULTS_MODE"; +const AWS_DEFAULTS_MODE_CONFIG = "defaults_mode"; +export const NODE_DEFAULTS_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + return env[AWS_DEFAULTS_MODE_ENV]; + }, + configFileSelector: (profile) => { + return profile[AWS_DEFAULTS_MODE_CONFIG]; + }, + default: "legacy", +}; diff --git a/node_modules/@smithy/util-defaults-mode-node/dist-es/index.js b/node_modules/@smithy/util-defaults-mode-node/dist-es/index.js new file mode 100644 index 00000000..05aa8183 --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-node/dist-es/index.js @@ -0,0 +1 @@ +export * from "./resolveDefaultsModeConfig"; diff --git a/node_modules/@smithy/util-defaults-mode-node/dist-es/resolveDefaultsModeConfig.js b/node_modules/@smithy/util-defaults-mode-node/dist-es/resolveDefaultsModeConfig.js new file mode 100644 index 00000000..8c9d050b --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-node/dist-es/resolveDefaultsModeConfig.js @@ -0,0 +1,52 @@ +import { NODE_REGION_CONFIG_OPTIONS } from "@smithy/config-resolver"; +import { loadConfig } from "@smithy/node-config-provider"; +import { memoize } from "@smithy/property-provider"; +import { AWS_DEFAULT_REGION_ENV, AWS_EXECUTION_ENV, AWS_REGION_ENV, DEFAULTS_MODE_OPTIONS, ENV_IMDS_DISABLED, IMDS_REGION_PATH, } from "./constants"; +import { NODE_DEFAULTS_MODE_CONFIG_OPTIONS } from "./defaultsModeConfig"; +export const resolveDefaultsModeConfig = ({ region = loadConfig(NODE_REGION_CONFIG_OPTIONS), defaultsMode = loadConfig(NODE_DEFAULTS_MODE_CONFIG_OPTIONS), } = {}) => memoize(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode?.toLowerCase()) { + case "auto": + return resolveNodeDefaultsModeAuto(region); + case "in-region": + case "cross-region": + case "mobile": + case "standard": + case "legacy": + return Promise.resolve(mode?.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); +const resolveNodeDefaultsModeAuto = async (clientRegion) => { + if (clientRegion) { + const resolvedRegion = typeof clientRegion === "function" ? await clientRegion() : clientRegion; + const inferredRegion = await inferPhysicalRegion(); + if (!inferredRegion) { + return "standard"; + } + if (resolvedRegion === inferredRegion) { + return "in-region"; + } + else { + return "cross-region"; + } + } + return "standard"; +}; +const inferPhysicalRegion = async () => { + if (process.env[AWS_EXECUTION_ENV] && (process.env[AWS_REGION_ENV] || process.env[AWS_DEFAULT_REGION_ENV])) { + return process.env[AWS_REGION_ENV] ?? process.env[AWS_DEFAULT_REGION_ENV]; + } + if (!process.env[ENV_IMDS_DISABLED]) { + try { + const { getInstanceMetadataEndpoint, httpRequest } = await import("@smithy/credential-provider-imds"); + const endpoint = await getInstanceMetadataEndpoint(); + return (await httpRequest({ ...endpoint, path: IMDS_REGION_PATH })).toString(); + } + catch (e) { + } + } +}; diff --git a/node_modules/@smithy/util-defaults-mode-node/dist-types/constants.d.ts b/node_modules/@smithy/util-defaults-mode-node/dist-types/constants.d.ts new file mode 100644 index 00000000..a2db2834 --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-node/dist-types/constants.d.ts @@ -0,0 +1,24 @@ +/** + * @internal + */ +export declare const AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +/** + * @internal + */ +export declare const AWS_REGION_ENV = "AWS_REGION"; +/** + * @internal + */ +export declare const AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +/** + * @internal + */ +export declare const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +/** + * @internal + */ +export declare const DEFAULTS_MODE_OPTIONS: string[]; +/** + * @internal + */ +export declare const IMDS_REGION_PATH = "/latest/meta-data/placement/region"; diff --git a/node_modules/@smithy/util-defaults-mode-node/dist-types/defaultsModeConfig.d.ts b/node_modules/@smithy/util-defaults-mode-node/dist-types/defaultsModeConfig.d.ts new file mode 100644 index 00000000..12f4dae0 --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-node/dist-types/defaultsModeConfig.d.ts @@ -0,0 +1,6 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import type { DefaultsMode } from "@smithy/smithy-client"; +/** + * @internal + */ +export declare const NODE_DEFAULTS_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@smithy/util-defaults-mode-node/dist-types/index.d.ts b/node_modules/@smithy/util-defaults-mode-node/dist-types/index.d.ts new file mode 100644 index 00000000..003de26f --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-node/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./resolveDefaultsModeConfig"; diff --git a/node_modules/@smithy/util-defaults-mode-node/dist-types/resolveDefaultsModeConfig.d.ts b/node_modules/@smithy/util-defaults-mode-node/dist-types/resolveDefaultsModeConfig.d.ts new file mode 100644 index 00000000..8f343715 --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-node/dist-types/resolveDefaultsModeConfig.d.ts @@ -0,0 +1,17 @@ +import type { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import type { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; + region?: string | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "in-region", "cross-region", or "standard". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ region, defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/constants.d.ts b/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/constants.d.ts new file mode 100644 index 00000000..b847dc2a --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,24 @@ +/** + * @internal + */ +export declare const AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +/** + * @internal + */ +export declare const AWS_REGION_ENV = "AWS_REGION"; +/** + * @internal + */ +export declare const AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +/** + * @internal + */ +export declare const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +/** + * @internal + */ +export declare const DEFAULTS_MODE_OPTIONS: string[]; +/** + * @internal + */ +export declare const IMDS_REGION_PATH = "/latest/meta-data/placement/region"; diff --git a/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/defaultsModeConfig.d.ts b/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/defaultsModeConfig.d.ts new file mode 100644 index 00000000..76c3d0da --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/defaultsModeConfig.d.ts @@ -0,0 +1,6 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { DefaultsMode } from "@smithy/smithy-client"; +/** + * @internal + */ +export declare const NODE_DEFAULTS_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..4ab48b45 --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./resolveDefaultsModeConfig"; diff --git a/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts b/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts new file mode 100644 index 00000000..4daa927b --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts @@ -0,0 +1,17 @@ +import { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; + region?: string | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "in-region", "cross-region", or "standard". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ region, defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/node_modules/@smithy/util-defaults-mode-node/package.json b/node_modules/@smithy/util-defaults-mode-node/package.json new file mode 100644 index 00000000..690becae --- /dev/null +++ b/node_modules/@smithy/util-defaults-mode-node/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-defaults-mode-node", + "version": "4.0.9", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-defaults-mode-node", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/config-resolver": "^4.1.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-defaults-mode-node", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-defaults-mode-node" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/LICENSE b/node_modules/@smithy/util-endpoints/LICENSE new file mode 100644 index 00000000..a1895fac --- /dev/null +++ b/node_modules/@smithy/util-endpoints/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/README.md b/node_modules/@smithy/util-endpoints/README.md new file mode 100644 index 00000000..85d60b31 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/README.md @@ -0,0 +1,10 @@ +# @smithy/util-endpoints + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-endpoints/latest.svg)](https://www.npmjs.com/package/@smithy/util-endpoints) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-endpoints.svg)](https://www.npmjs.com/package/@smithy/util-endpoints) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/cache/EndpointCache.js b/node_modules/@smithy/util-endpoints/dist-cjs/cache/EndpointCache.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/cache/EndpointCache.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/debug/debugId.js b/node_modules/@smithy/util-endpoints/dist-cjs/debug/debugId.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/debug/debugId.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/debug/index.js b/node_modules/@smithy/util-endpoints/dist-cjs/debug/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/debug/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/debug/toDebugString.js b/node_modules/@smithy/util-endpoints/dist-cjs/debug/toDebugString.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/debug/toDebugString.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/getEndpointUrlConfig.js b/node_modules/@smithy/util-endpoints/dist-cjs/getEndpointUrlConfig.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/getEndpointUrlConfig.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/index.js b/node_modules/@smithy/util-endpoints/dist-cjs/index.js new file mode 100644 index 00000000..3bc5a7dd --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/index.js @@ -0,0 +1,544 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + EndpointCache: () => EndpointCache, + EndpointError: () => EndpointError, + customEndpointFunctions: () => customEndpointFunctions, + isIpAddress: () => isIpAddress, + isValidHostLabel: () => isValidHostLabel, + resolveEndpoint: () => resolveEndpoint +}); +module.exports = __toCommonJS(src_exports); + +// src/cache/EndpointCache.ts +var EndpointCache = class { + /** + * @param [size] - desired average maximum capacity. A buffer of 10 additional keys will be allowed + * before keys are dropped. + * @param [params] - list of params to consider as part of the cache key. + * + * If the params list is not populated, no caching will happen. + * This may be out of order depending on how the object is created and arrives to this class. + */ + constructor({ size, params }) { + this.data = /* @__PURE__ */ new Map(); + this.parameters = []; + this.capacity = size ?? 50; + if (params) { + this.parameters = params; + } + } + static { + __name(this, "EndpointCache"); + } + /** + * @param endpointParams - query for endpoint. + * @param resolver - provider of the value if not present. + * @returns endpoint corresponding to the query. + */ + get(endpointParams, resolver) { + const key = this.hash(endpointParams); + if (key === false) { + return resolver(); + } + if (!this.data.has(key)) { + if (this.data.size > this.capacity + 10) { + const keys = this.data.keys(); + let i = 0; + while (true) { + const { value, done } = keys.next(); + this.data.delete(value); + if (done || ++i > 10) { + break; + } + } + } + this.data.set(key, resolver()); + } + return this.data.get(key); + } + size() { + return this.data.size; + } + /** + * @returns cache key or false if not cachable. + */ + hash(endpointParams) { + let buffer = ""; + const { parameters } = this; + if (parameters.length === 0) { + return false; + } + for (const param of parameters) { + const val = String(endpointParams[param] ?? ""); + if (val.includes("|;")) { + return false; + } + buffer += val + "|;"; + } + return buffer; + } +}; + +// src/lib/isIpAddress.ts +var IP_V4_REGEX = new RegExp( + `^(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}$` +); +var isIpAddress = /* @__PURE__ */ __name((value) => IP_V4_REGEX.test(value) || value.startsWith("[") && value.endsWith("]"), "isIpAddress"); + +// src/lib/isValidHostLabel.ts +var VALID_HOST_LABEL_REGEX = new RegExp(`^(?!.*-$)(?!-)[a-zA-Z0-9-]{1,63}$`); +var isValidHostLabel = /* @__PURE__ */ __name((value, allowSubDomains = false) => { + if (!allowSubDomains) { + return VALID_HOST_LABEL_REGEX.test(value); + } + const labels = value.split("."); + for (const label of labels) { + if (!isValidHostLabel(label)) { + return false; + } + } + return true; +}, "isValidHostLabel"); + +// src/utils/customEndpointFunctions.ts +var customEndpointFunctions = {}; + +// src/debug/debugId.ts +var debugId = "endpoints"; + +// src/debug/toDebugString.ts +function toDebugString(input) { + if (typeof input !== "object" || input == null) { + return input; + } + if ("ref" in input) { + return `$${toDebugString(input.ref)}`; + } + if ("fn" in input) { + return `${input.fn}(${(input.argv || []).map(toDebugString).join(", ")})`; + } + return JSON.stringify(input, null, 2); +} +__name(toDebugString, "toDebugString"); + +// src/types/EndpointError.ts +var EndpointError = class extends Error { + static { + __name(this, "EndpointError"); + } + constructor(message) { + super(message); + this.name = "EndpointError"; + } +}; + +// src/lib/booleanEquals.ts +var booleanEquals = /* @__PURE__ */ __name((value1, value2) => value1 === value2, "booleanEquals"); + +// src/lib/getAttrPathList.ts +var getAttrPathList = /* @__PURE__ */ __name((path) => { + const parts = path.split("."); + const pathList = []; + for (const part of parts) { + const squareBracketIndex = part.indexOf("["); + if (squareBracketIndex !== -1) { + if (part.indexOf("]") !== part.length - 1) { + throw new EndpointError(`Path: '${path}' does not end with ']'`); + } + const arrayIndex = part.slice(squareBracketIndex + 1, -1); + if (Number.isNaN(parseInt(arrayIndex))) { + throw new EndpointError(`Invalid array index: '${arrayIndex}' in path: '${path}'`); + } + if (squareBracketIndex !== 0) { + pathList.push(part.slice(0, squareBracketIndex)); + } + pathList.push(arrayIndex); + } else { + pathList.push(part); + } + } + return pathList; +}, "getAttrPathList"); + +// src/lib/getAttr.ts +var getAttr = /* @__PURE__ */ __name((value, path) => getAttrPathList(path).reduce((acc, index) => { + if (typeof acc !== "object") { + throw new EndpointError(`Index '${index}' in '${path}' not found in '${JSON.stringify(value)}'`); + } else if (Array.isArray(acc)) { + return acc[parseInt(index)]; + } + return acc[index]; +}, value), "getAttr"); + +// src/lib/isSet.ts +var isSet = /* @__PURE__ */ __name((value) => value != null, "isSet"); + +// src/lib/not.ts +var not = /* @__PURE__ */ __name((value) => !value, "not"); + +// src/lib/parseURL.ts +var import_types3 = require("@smithy/types"); +var DEFAULT_PORTS = { + [import_types3.EndpointURLScheme.HTTP]: 80, + [import_types3.EndpointURLScheme.HTTPS]: 443 +}; +var parseURL = /* @__PURE__ */ __name((value) => { + const whatwgURL = (() => { + try { + if (value instanceof URL) { + return value; + } + if (typeof value === "object" && "hostname" in value) { + const { hostname: hostname2, port, protocol: protocol2 = "", path = "", query = {} } = value; + const url = new URL(`${protocol2}//${hostname2}${port ? `:${port}` : ""}${path}`); + url.search = Object.entries(query).map(([k, v]) => `${k}=${v}`).join("&"); + return url; + } + return new URL(value); + } catch (error) { + return null; + } + })(); + if (!whatwgURL) { + console.error(`Unable to parse ${JSON.stringify(value)} as a whatwg URL.`); + return null; + } + const urlString = whatwgURL.href; + const { host, hostname, pathname, protocol, search } = whatwgURL; + if (search) { + return null; + } + const scheme = protocol.slice(0, -1); + if (!Object.values(import_types3.EndpointURLScheme).includes(scheme)) { + return null; + } + const isIp = isIpAddress(hostname); + const inputContainsDefaultPort = urlString.includes(`${host}:${DEFAULT_PORTS[scheme]}`) || typeof value === "string" && value.includes(`${host}:${DEFAULT_PORTS[scheme]}`); + const authority = `${host}${inputContainsDefaultPort ? `:${DEFAULT_PORTS[scheme]}` : ``}`; + return { + scheme, + authority, + path: pathname, + normalizedPath: pathname.endsWith("/") ? pathname : `${pathname}/`, + isIp + }; +}, "parseURL"); + +// src/lib/stringEquals.ts +var stringEquals = /* @__PURE__ */ __name((value1, value2) => value1 === value2, "stringEquals"); + +// src/lib/substring.ts +var substring = /* @__PURE__ */ __name((input, start, stop, reverse) => { + if (start >= stop || input.length < stop) { + return null; + } + if (!reverse) { + return input.substring(start, stop); + } + return input.substring(input.length - stop, input.length - start); +}, "substring"); + +// src/lib/uriEncode.ts +var uriEncode = /* @__PURE__ */ __name((value) => encodeURIComponent(value).replace(/[!*'()]/g, (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`), "uriEncode"); + +// src/utils/endpointFunctions.ts +var endpointFunctions = { + booleanEquals, + getAttr, + isSet, + isValidHostLabel, + not, + parseURL, + stringEquals, + substring, + uriEncode +}; + +// src/utils/evaluateTemplate.ts +var evaluateTemplate = /* @__PURE__ */ __name((template, options) => { + const evaluatedTemplateArr = []; + const templateContext = { + ...options.endpointParams, + ...options.referenceRecord + }; + let currentIndex = 0; + while (currentIndex < template.length) { + const openingBraceIndex = template.indexOf("{", currentIndex); + if (openingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(currentIndex)); + break; + } + evaluatedTemplateArr.push(template.slice(currentIndex, openingBraceIndex)); + const closingBraceIndex = template.indexOf("}", openingBraceIndex); + if (closingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(openingBraceIndex)); + break; + } + if (template[openingBraceIndex + 1] === "{" && template[closingBraceIndex + 1] === "}") { + evaluatedTemplateArr.push(template.slice(openingBraceIndex + 1, closingBraceIndex)); + currentIndex = closingBraceIndex + 2; + } + const parameterName = template.substring(openingBraceIndex + 1, closingBraceIndex); + if (parameterName.includes("#")) { + const [refName, attrName] = parameterName.split("#"); + evaluatedTemplateArr.push(getAttr(templateContext[refName], attrName)); + } else { + evaluatedTemplateArr.push(templateContext[parameterName]); + } + currentIndex = closingBraceIndex + 1; + } + return evaluatedTemplateArr.join(""); +}, "evaluateTemplate"); + +// src/utils/getReferenceValue.ts +var getReferenceValue = /* @__PURE__ */ __name(({ ref }, options) => { + const referenceRecord = { + ...options.endpointParams, + ...options.referenceRecord + }; + return referenceRecord[ref]; +}, "getReferenceValue"); + +// src/utils/evaluateExpression.ts +var evaluateExpression = /* @__PURE__ */ __name((obj, keyName, options) => { + if (typeof obj === "string") { + return evaluateTemplate(obj, options); + } else if (obj["fn"]) { + return callFunction(obj, options); + } else if (obj["ref"]) { + return getReferenceValue(obj, options); + } + throw new EndpointError(`'${keyName}': ${String(obj)} is not a string, function or reference.`); +}, "evaluateExpression"); + +// src/utils/callFunction.ts +var callFunction = /* @__PURE__ */ __name(({ fn, argv }, options) => { + const evaluatedArgs = argv.map( + (arg) => ["boolean", "number"].includes(typeof arg) ? arg : evaluateExpression(arg, "arg", options) + ); + const fnSegments = fn.split("."); + if (fnSegments[0] in customEndpointFunctions && fnSegments[1] != null) { + return customEndpointFunctions[fnSegments[0]][fnSegments[1]](...evaluatedArgs); + } + return endpointFunctions[fn](...evaluatedArgs); +}, "callFunction"); + +// src/utils/evaluateCondition.ts +var evaluateCondition = /* @__PURE__ */ __name(({ assign, ...fnArgs }, options) => { + if (assign && assign in options.referenceRecord) { + throw new EndpointError(`'${assign}' is already defined in Reference Record.`); + } + const value = callFunction(fnArgs, options); + options.logger?.debug?.(`${debugId} evaluateCondition: ${toDebugString(fnArgs)} = ${toDebugString(value)}`); + return { + result: value === "" ? true : !!value, + ...assign != null && { toAssign: { name: assign, value } } + }; +}, "evaluateCondition"); + +// src/utils/evaluateConditions.ts +var evaluateConditions = /* @__PURE__ */ __name((conditions = [], options) => { + const conditionsReferenceRecord = {}; + for (const condition of conditions) { + const { result, toAssign } = evaluateCondition(condition, { + ...options, + referenceRecord: { + ...options.referenceRecord, + ...conditionsReferenceRecord + } + }); + if (!result) { + return { result }; + } + if (toAssign) { + conditionsReferenceRecord[toAssign.name] = toAssign.value; + options.logger?.debug?.(`${debugId} assign: ${toAssign.name} := ${toDebugString(toAssign.value)}`); + } + } + return { result: true, referenceRecord: conditionsReferenceRecord }; +}, "evaluateConditions"); + +// src/utils/getEndpointHeaders.ts +var getEndpointHeaders = /* @__PURE__ */ __name((headers, options) => Object.entries(headers).reduce( + (acc, [headerKey, headerVal]) => ({ + ...acc, + [headerKey]: headerVal.map((headerValEntry) => { + const processedExpr = evaluateExpression(headerValEntry, "Header value entry", options); + if (typeof processedExpr !== "string") { + throw new EndpointError(`Header '${headerKey}' value '${processedExpr}' is not a string`); + } + return processedExpr; + }) + }), + {} +), "getEndpointHeaders"); + +// src/utils/getEndpointProperty.ts +var getEndpointProperty = /* @__PURE__ */ __name((property, options) => { + if (Array.isArray(property)) { + return property.map((propertyEntry) => getEndpointProperty(propertyEntry, options)); + } + switch (typeof property) { + case "string": + return evaluateTemplate(property, options); + case "object": + if (property === null) { + throw new EndpointError(`Unexpected endpoint property: ${property}`); + } + return getEndpointProperties(property, options); + case "boolean": + return property; + default: + throw new EndpointError(`Unexpected endpoint property type: ${typeof property}`); + } +}, "getEndpointProperty"); + +// src/utils/getEndpointProperties.ts +var getEndpointProperties = /* @__PURE__ */ __name((properties, options) => Object.entries(properties).reduce( + (acc, [propertyKey, propertyVal]) => ({ + ...acc, + [propertyKey]: getEndpointProperty(propertyVal, options) + }), + {} +), "getEndpointProperties"); + +// src/utils/getEndpointUrl.ts +var getEndpointUrl = /* @__PURE__ */ __name((endpointUrl, options) => { + const expression = evaluateExpression(endpointUrl, "Endpoint URL", options); + if (typeof expression === "string") { + try { + return new URL(expression); + } catch (error) { + console.error(`Failed to construct URL with ${expression}`, error); + throw error; + } + } + throw new EndpointError(`Endpoint URL must be a string, got ${typeof expression}`); +}, "getEndpointUrl"); + +// src/utils/evaluateEndpointRule.ts +var evaluateEndpointRule = /* @__PURE__ */ __name((endpointRule, options) => { + const { conditions, endpoint } = endpointRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + const endpointRuleOptions = { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord } + }; + const { url, properties, headers } = endpoint; + options.logger?.debug?.(`${debugId} Resolving endpoint from template: ${toDebugString(endpoint)}`); + return { + ...headers != void 0 && { + headers: getEndpointHeaders(headers, endpointRuleOptions) + }, + ...properties != void 0 && { + properties: getEndpointProperties(properties, endpointRuleOptions) + }, + url: getEndpointUrl(url, endpointRuleOptions) + }; +}, "evaluateEndpointRule"); + +// src/utils/evaluateErrorRule.ts +var evaluateErrorRule = /* @__PURE__ */ __name((errorRule, options) => { + const { conditions, error } = errorRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + throw new EndpointError( + evaluateExpression(error, "Error", { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord } + }) + ); +}, "evaluateErrorRule"); + +// src/utils/evaluateTreeRule.ts +var evaluateTreeRule = /* @__PURE__ */ __name((treeRule, options) => { + const { conditions, rules } = treeRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + return evaluateRules(rules, { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord } + }); +}, "evaluateTreeRule"); + +// src/utils/evaluateRules.ts +var evaluateRules = /* @__PURE__ */ __name((rules, options) => { + for (const rule of rules) { + if (rule.type === "endpoint") { + const endpointOrUndefined = evaluateEndpointRule(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } else if (rule.type === "error") { + evaluateErrorRule(rule, options); + } else if (rule.type === "tree") { + const endpointOrUndefined = evaluateTreeRule(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } else { + throw new EndpointError(`Unknown endpoint rule: ${rule}`); + } + } + throw new EndpointError(`Rules evaluation failed`); +}, "evaluateRules"); + +// src/resolveEndpoint.ts +var resolveEndpoint = /* @__PURE__ */ __name((ruleSetObject, options) => { + const { endpointParams, logger } = options; + const { parameters, rules } = ruleSetObject; + options.logger?.debug?.(`${debugId} Initial EndpointParams: ${toDebugString(endpointParams)}`); + const paramsWithDefault = Object.entries(parameters).filter(([, v]) => v.default != null).map(([k, v]) => [k, v.default]); + if (paramsWithDefault.length > 0) { + for (const [paramKey, paramDefaultValue] of paramsWithDefault) { + endpointParams[paramKey] = endpointParams[paramKey] ?? paramDefaultValue; + } + } + const requiredParams = Object.entries(parameters).filter(([, v]) => v.required).map(([k]) => k); + for (const requiredParam of requiredParams) { + if (endpointParams[requiredParam] == null) { + throw new EndpointError(`Missing required parameter: '${requiredParam}'`); + } + } + const endpoint = evaluateRules(rules, { endpointParams, logger, referenceRecord: {} }); + options.logger?.debug?.(`${debugId} Resolved endpoint: ${toDebugString(endpoint)}`); + return endpoint; +}, "resolveEndpoint"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + EndpointCache, + isIpAddress, + isValidHostLabel, + customEndpointFunctions, + resolveEndpoint, + EndpointError +}); + diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/lib/booleanEquals.js b/node_modules/@smithy/util-endpoints/dist-cjs/lib/booleanEquals.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/lib/booleanEquals.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttr.js b/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttr.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttr.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttrPathList.js b/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttrPathList.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttrPathList.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/lib/index.js b/node_modules/@smithy/util-endpoints/dist-cjs/lib/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/lib/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/lib/isIpAddress.js b/node_modules/@smithy/util-endpoints/dist-cjs/lib/isIpAddress.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/lib/isIpAddress.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/lib/isSet.js b/node_modules/@smithy/util-endpoints/dist-cjs/lib/isSet.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/lib/isSet.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/lib/isValidHostLabel.js b/node_modules/@smithy/util-endpoints/dist-cjs/lib/isValidHostLabel.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/lib/isValidHostLabel.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/lib/not.js b/node_modules/@smithy/util-endpoints/dist-cjs/lib/not.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/lib/not.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/lib/parseURL.js b/node_modules/@smithy/util-endpoints/dist-cjs/lib/parseURL.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/lib/parseURL.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/lib/stringEquals.js b/node_modules/@smithy/util-endpoints/dist-cjs/lib/stringEquals.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/lib/stringEquals.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/lib/substring.js b/node_modules/@smithy/util-endpoints/dist-cjs/lib/substring.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/lib/substring.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/lib/uriEncode.js b/node_modules/@smithy/util-endpoints/dist-cjs/lib/uriEncode.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/lib/uriEncode.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/resolveEndpoint.js b/node_modules/@smithy/util-endpoints/dist-cjs/resolveEndpoint.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/resolveEndpoint.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointError.js b/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointError.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointError.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointFunctions.js b/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointFunctions.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointFunctions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointRuleObject.js b/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointRuleObject.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/types/ErrorRuleObject.js b/node_modules/@smithy/util-endpoints/dist-cjs/types/ErrorRuleObject.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/types/ErrorRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/types/RuleSetObject.js b/node_modules/@smithy/util-endpoints/dist-cjs/types/RuleSetObject.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/types/RuleSetObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/types/TreeRuleObject.js b/node_modules/@smithy/util-endpoints/dist-cjs/types/TreeRuleObject.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/types/TreeRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/types/index.js b/node_modules/@smithy/util-endpoints/dist-cjs/types/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/types/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/types/shared.js b/node_modules/@smithy/util-endpoints/dist-cjs/types/shared.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/types/shared.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/utils/callFunction.js b/node_modules/@smithy/util-endpoints/dist-cjs/utils/callFunction.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/utils/callFunction.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/utils/customEndpointFunctions.js b/node_modules/@smithy/util-endpoints/dist-cjs/utils/customEndpointFunctions.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/utils/customEndpointFunctions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/utils/endpointFunctions.js b/node_modules/@smithy/util-endpoints/dist-cjs/utils/endpointFunctions.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/utils/endpointFunctions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateCondition.js b/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateCondition.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateCondition.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateConditions.js b/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateConditions.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateConditions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateEndpointRule.js b/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateEndpointRule.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateEndpointRule.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateErrorRule.js b/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateErrorRule.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateErrorRule.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateExpression.js b/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateExpression.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateExpression.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateRules.js b/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateRules.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateRules.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTemplate.js b/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTemplate.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTemplate.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTreeRule.js b/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTreeRule.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTreeRule.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointHeaders.js b/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointHeaders.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointHeaders.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperties.js b/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperties.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperties.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperty.js b/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperty.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperty.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointUrl.js b/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointUrl.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointUrl.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/utils/getReferenceValue.js b/node_modules/@smithy/util-endpoints/dist-cjs/utils/getReferenceValue.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/utils/getReferenceValue.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-cjs/utils/index.js b/node_modules/@smithy/util-endpoints/dist-cjs/utils/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-cjs/utils/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-endpoints/dist-es/cache/EndpointCache.js b/node_modules/@smithy/util-endpoints/dist-es/cache/EndpointCache.js new file mode 100644 index 00000000..ddc7b0df --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/cache/EndpointCache.js @@ -0,0 +1,49 @@ +export class EndpointCache { + constructor({ size, params }) { + this.data = new Map(); + this.parameters = []; + this.capacity = size ?? 50; + if (params) { + this.parameters = params; + } + } + get(endpointParams, resolver) { + const key = this.hash(endpointParams); + if (key === false) { + return resolver(); + } + if (!this.data.has(key)) { + if (this.data.size > this.capacity + 10) { + const keys = this.data.keys(); + let i = 0; + while (true) { + const { value, done } = keys.next(); + this.data.delete(value); + if (done || ++i > 10) { + break; + } + } + } + this.data.set(key, resolver()); + } + return this.data.get(key); + } + size() { + return this.data.size; + } + hash(endpointParams) { + let buffer = ""; + const { parameters } = this; + if (parameters.length === 0) { + return false; + } + for (const param of parameters) { + const val = String(endpointParams[param] ?? ""); + if (val.includes("|;")) { + return false; + } + buffer += val + "|;"; + } + return buffer; + } +} diff --git a/node_modules/@smithy/util-endpoints/dist-es/debug/debugId.js b/node_modules/@smithy/util-endpoints/dist-es/debug/debugId.js new file mode 100644 index 00000000..0d4e27e0 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/debug/debugId.js @@ -0,0 +1 @@ +export const debugId = "endpoints"; diff --git a/node_modules/@smithy/util-endpoints/dist-es/debug/index.js b/node_modules/@smithy/util-endpoints/dist-es/debug/index.js new file mode 100644 index 00000000..70d3b15c --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/debug/index.js @@ -0,0 +1,2 @@ +export * from "./debugId"; +export * from "./toDebugString"; diff --git a/node_modules/@smithy/util-endpoints/dist-es/debug/toDebugString.js b/node_modules/@smithy/util-endpoints/dist-es/debug/toDebugString.js new file mode 100644 index 00000000..33c8fcbb --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/debug/toDebugString.js @@ -0,0 +1,12 @@ +export function toDebugString(input) { + if (typeof input !== "object" || input == null) { + return input; + } + if ("ref" in input) { + return `$${toDebugString(input.ref)}`; + } + if ("fn" in input) { + return `${input.fn}(${(input.argv || []).map(toDebugString).join(", ")})`; + } + return JSON.stringify(input, null, 2); +} diff --git a/node_modules/@smithy/util-endpoints/dist-es/getEndpointUrlConfig.js b/node_modules/@smithy/util-endpoints/dist-es/getEndpointUrlConfig.js new file mode 100644 index 00000000..5069030f --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/getEndpointUrlConfig.js @@ -0,0 +1,21 @@ +const ENV_ENDPOINT_URL = "AWS_ENDPOINT_URL"; +const CONFIG_ENDPOINT_URL = "endpoint_url"; +export const getEndpointUrlConfig = (serviceId) => ({ + environmentVariableSelector: (env) => { + const serviceEndpointUrlSections = [ENV_ENDPOINT_URL, ...serviceId.split(" ").map((w) => w.toUpperCase())]; + const serviceEndpointUrl = env[serviceEndpointUrlSections.join("_")]; + if (serviceEndpointUrl) + return serviceEndpointUrl; + const endpointUrl = env[ENV_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + configFileSelector: (profile) => { + const endpointUrl = profile[CONFIG_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + default: undefined, +}); diff --git a/node_modules/@smithy/util-endpoints/dist-es/index.js b/node_modules/@smithy/util-endpoints/dist-es/index.js new file mode 100644 index 00000000..c39ed2b7 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/index.js @@ -0,0 +1,6 @@ +export * from "./cache/EndpointCache"; +export * from "./lib/isIpAddress"; +export * from "./lib/isValidHostLabel"; +export * from "./utils/customEndpointFunctions"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/node_modules/@smithy/util-endpoints/dist-es/lib/booleanEquals.js b/node_modules/@smithy/util-endpoints/dist-es/lib/booleanEquals.js new file mode 100644 index 00000000..730cbd3b --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/lib/booleanEquals.js @@ -0,0 +1 @@ +export const booleanEquals = (value1, value2) => value1 === value2; diff --git a/node_modules/@smithy/util-endpoints/dist-es/lib/getAttr.js b/node_modules/@smithy/util-endpoints/dist-es/lib/getAttr.js new file mode 100644 index 00000000..d77f1657 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/lib/getAttr.js @@ -0,0 +1,11 @@ +import { EndpointError } from "../types"; +import { getAttrPathList } from "./getAttrPathList"; +export const getAttr = (value, path) => getAttrPathList(path).reduce((acc, index) => { + if (typeof acc !== "object") { + throw new EndpointError(`Index '${index}' in '${path}' not found in '${JSON.stringify(value)}'`); + } + else if (Array.isArray(acc)) { + return acc[parseInt(index)]; + } + return acc[index]; +}, value); diff --git a/node_modules/@smithy/util-endpoints/dist-es/lib/getAttrPathList.js b/node_modules/@smithy/util-endpoints/dist-es/lib/getAttrPathList.js new file mode 100644 index 00000000..5817a2de --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/lib/getAttrPathList.js @@ -0,0 +1,25 @@ +import { EndpointError } from "../types"; +export const getAttrPathList = (path) => { + const parts = path.split("."); + const pathList = []; + for (const part of parts) { + const squareBracketIndex = part.indexOf("["); + if (squareBracketIndex !== -1) { + if (part.indexOf("]") !== part.length - 1) { + throw new EndpointError(`Path: '${path}' does not end with ']'`); + } + const arrayIndex = part.slice(squareBracketIndex + 1, -1); + if (Number.isNaN(parseInt(arrayIndex))) { + throw new EndpointError(`Invalid array index: '${arrayIndex}' in path: '${path}'`); + } + if (squareBracketIndex !== 0) { + pathList.push(part.slice(0, squareBracketIndex)); + } + pathList.push(arrayIndex); + } + else { + pathList.push(part); + } + } + return pathList; +}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/lib/index.js b/node_modules/@smithy/util-endpoints/dist-es/lib/index.js new file mode 100644 index 00000000..99a08449 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/lib/index.js @@ -0,0 +1,9 @@ +export * from "./booleanEquals"; +export * from "./getAttr"; +export * from "./isSet"; +export * from "./isValidHostLabel"; +export * from "./not"; +export * from "./parseURL"; +export * from "./stringEquals"; +export * from "./substring"; +export * from "./uriEncode"; diff --git a/node_modules/@smithy/util-endpoints/dist-es/lib/isIpAddress.js b/node_modules/@smithy/util-endpoints/dist-es/lib/isIpAddress.js new file mode 100644 index 00000000..20be5a3e --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/lib/isIpAddress.js @@ -0,0 +1,2 @@ +const IP_V4_REGEX = new RegExp(`^(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}$`); +export const isIpAddress = (value) => IP_V4_REGEX.test(value) || (value.startsWith("[") && value.endsWith("]")); diff --git a/node_modules/@smithy/util-endpoints/dist-es/lib/isSet.js b/node_modules/@smithy/util-endpoints/dist-es/lib/isSet.js new file mode 100644 index 00000000..83ccc7a5 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/lib/isSet.js @@ -0,0 +1 @@ +export const isSet = (value) => value != null; diff --git a/node_modules/@smithy/util-endpoints/dist-es/lib/isValidHostLabel.js b/node_modules/@smithy/util-endpoints/dist-es/lib/isValidHostLabel.js new file mode 100644 index 00000000..78585986 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/lib/isValidHostLabel.js @@ -0,0 +1,13 @@ +const VALID_HOST_LABEL_REGEX = new RegExp(`^(?!.*-$)(?!-)[a-zA-Z0-9-]{1,63}$`); +export const isValidHostLabel = (value, allowSubDomains = false) => { + if (!allowSubDomains) { + return VALID_HOST_LABEL_REGEX.test(value); + } + const labels = value.split("."); + for (const label of labels) { + if (!isValidHostLabel(label)) { + return false; + } + } + return true; +}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/lib/not.js b/node_modules/@smithy/util-endpoints/dist-es/lib/not.js new file mode 100644 index 00000000..180e5dd3 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/lib/not.js @@ -0,0 +1 @@ +export const not = (value) => !value; diff --git a/node_modules/@smithy/util-endpoints/dist-es/lib/parseURL.js b/node_modules/@smithy/util-endpoints/dist-es/lib/parseURL.js new file mode 100644 index 00000000..79f9b24e --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/lib/parseURL.js @@ -0,0 +1,51 @@ +import { EndpointURLScheme } from "@smithy/types"; +import { isIpAddress } from "./isIpAddress"; +const DEFAULT_PORTS = { + [EndpointURLScheme.HTTP]: 80, + [EndpointURLScheme.HTTPS]: 443, +}; +export const parseURL = (value) => { + const whatwgURL = (() => { + try { + if (value instanceof URL) { + return value; + } + if (typeof value === "object" && "hostname" in value) { + const { hostname, port, protocol = "", path = "", query = {} } = value; + const url = new URL(`${protocol}//${hostname}${port ? `:${port}` : ""}${path}`); + url.search = Object.entries(query) + .map(([k, v]) => `${k}=${v}`) + .join("&"); + return url; + } + return new URL(value); + } + catch (error) { + return null; + } + })(); + if (!whatwgURL) { + console.error(`Unable to parse ${JSON.stringify(value)} as a whatwg URL.`); + return null; + } + const urlString = whatwgURL.href; + const { host, hostname, pathname, protocol, search } = whatwgURL; + if (search) { + return null; + } + const scheme = protocol.slice(0, -1); + if (!Object.values(EndpointURLScheme).includes(scheme)) { + return null; + } + const isIp = isIpAddress(hostname); + const inputContainsDefaultPort = urlString.includes(`${host}:${DEFAULT_PORTS[scheme]}`) || + (typeof value === "string" && value.includes(`${host}:${DEFAULT_PORTS[scheme]}`)); + const authority = `${host}${inputContainsDefaultPort ? `:${DEFAULT_PORTS[scheme]}` : ``}`; + return { + scheme, + authority, + path: pathname, + normalizedPath: pathname.endsWith("/") ? pathname : `${pathname}/`, + isIp, + }; +}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/lib/stringEquals.js b/node_modules/@smithy/util-endpoints/dist-es/lib/stringEquals.js new file mode 100644 index 00000000..ee414269 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/lib/stringEquals.js @@ -0,0 +1 @@ +export const stringEquals = (value1, value2) => value1 === value2; diff --git a/node_modules/@smithy/util-endpoints/dist-es/lib/substring.js b/node_modules/@smithy/util-endpoints/dist-es/lib/substring.js new file mode 100644 index 00000000..942dde4d --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/lib/substring.js @@ -0,0 +1,9 @@ +export const substring = (input, start, stop, reverse) => { + if (start >= stop || input.length < stop) { + return null; + } + if (!reverse) { + return input.substring(start, stop); + } + return input.substring(input.length - stop, input.length - start); +}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/lib/uriEncode.js b/node_modules/@smithy/util-endpoints/dist-es/lib/uriEncode.js new file mode 100644 index 00000000..ae226dc7 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/lib/uriEncode.js @@ -0,0 +1 @@ +export const uriEncode = (value) => encodeURIComponent(value).replace(/[!*'()]/g, (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`); diff --git a/node_modules/@smithy/util-endpoints/dist-es/resolveEndpoint.js b/node_modules/@smithy/util-endpoints/dist-es/resolveEndpoint.js new file mode 100644 index 00000000..ac12096b --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/resolveEndpoint.js @@ -0,0 +1,27 @@ +import { debugId, toDebugString } from "./debug"; +import { EndpointError } from "./types"; +import { evaluateRules } from "./utils"; +export const resolveEndpoint = (ruleSetObject, options) => { + const { endpointParams, logger } = options; + const { parameters, rules } = ruleSetObject; + options.logger?.debug?.(`${debugId} Initial EndpointParams: ${toDebugString(endpointParams)}`); + const paramsWithDefault = Object.entries(parameters) + .filter(([, v]) => v.default != null) + .map(([k, v]) => [k, v.default]); + if (paramsWithDefault.length > 0) { + for (const [paramKey, paramDefaultValue] of paramsWithDefault) { + endpointParams[paramKey] = endpointParams[paramKey] ?? paramDefaultValue; + } + } + const requiredParams = Object.entries(parameters) + .filter(([, v]) => v.required) + .map(([k]) => k); + for (const requiredParam of requiredParams) { + if (endpointParams[requiredParam] == null) { + throw new EndpointError(`Missing required parameter: '${requiredParam}'`); + } + } + const endpoint = evaluateRules(rules, { endpointParams, logger, referenceRecord: {} }); + options.logger?.debug?.(`${debugId} Resolved endpoint: ${toDebugString(endpoint)}`); + return endpoint; +}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/types/EndpointError.js b/node_modules/@smithy/util-endpoints/dist-es/types/EndpointError.js new file mode 100644 index 00000000..1ce597d7 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/types/EndpointError.js @@ -0,0 +1,6 @@ +export class EndpointError extends Error { + constructor(message) { + super(message); + this.name = "EndpointError"; + } +} diff --git a/node_modules/@smithy/util-endpoints/dist-es/types/EndpointFunctions.js b/node_modules/@smithy/util-endpoints/dist-es/types/EndpointFunctions.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/types/EndpointFunctions.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/types/EndpointRuleObject.js b/node_modules/@smithy/util-endpoints/dist-es/types/EndpointRuleObject.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/types/EndpointRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/types/ErrorRuleObject.js b/node_modules/@smithy/util-endpoints/dist-es/types/ErrorRuleObject.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/types/ErrorRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/types/RuleSetObject.js b/node_modules/@smithy/util-endpoints/dist-es/types/RuleSetObject.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/types/RuleSetObject.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/types/TreeRuleObject.js b/node_modules/@smithy/util-endpoints/dist-es/types/TreeRuleObject.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/types/TreeRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/types/index.js b/node_modules/@smithy/util-endpoints/dist-es/types/index.js new file mode 100644 index 00000000..a49f9840 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/types/index.js @@ -0,0 +1,7 @@ +export * from "./EndpointError"; +export * from "./EndpointFunctions"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/node_modules/@smithy/util-endpoints/dist-es/types/shared.js b/node_modules/@smithy/util-endpoints/dist-es/types/shared.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/types/shared.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/utils/callFunction.js b/node_modules/@smithy/util-endpoints/dist-es/utils/callFunction.js new file mode 100644 index 00000000..bf0747a3 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/utils/callFunction.js @@ -0,0 +1,11 @@ +import { customEndpointFunctions } from "./customEndpointFunctions"; +import { endpointFunctions } from "./endpointFunctions"; +import { evaluateExpression } from "./evaluateExpression"; +export const callFunction = ({ fn, argv }, options) => { + const evaluatedArgs = argv.map((arg) => ["boolean", "number"].includes(typeof arg) ? arg : evaluateExpression(arg, "arg", options)); + const fnSegments = fn.split("."); + if (fnSegments[0] in customEndpointFunctions && fnSegments[1] != null) { + return customEndpointFunctions[fnSegments[0]][fnSegments[1]](...evaluatedArgs); + } + return endpointFunctions[fn](...evaluatedArgs); +}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/utils/customEndpointFunctions.js b/node_modules/@smithy/util-endpoints/dist-es/utils/customEndpointFunctions.js new file mode 100644 index 00000000..0c26493a --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/utils/customEndpointFunctions.js @@ -0,0 +1 @@ +export const customEndpointFunctions = {}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/utils/endpointFunctions.js b/node_modules/@smithy/util-endpoints/dist-es/utils/endpointFunctions.js new file mode 100644 index 00000000..e2215ff5 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/utils/endpointFunctions.js @@ -0,0 +1,12 @@ +import { booleanEquals, getAttr, isSet, isValidHostLabel, not, parseURL, stringEquals, substring, uriEncode, } from "../lib"; +export const endpointFunctions = { + booleanEquals, + getAttr, + isSet, + isValidHostLabel, + not, + parseURL, + stringEquals, + substring, + uriEncode, +}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateCondition.js b/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateCondition.js new file mode 100644 index 00000000..8e84f087 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateCondition.js @@ -0,0 +1,14 @@ +import { debugId, toDebugString } from "../debug"; +import { EndpointError } from "../types"; +import { callFunction } from "./callFunction"; +export const evaluateCondition = ({ assign, ...fnArgs }, options) => { + if (assign && assign in options.referenceRecord) { + throw new EndpointError(`'${assign}' is already defined in Reference Record.`); + } + const value = callFunction(fnArgs, options); + options.logger?.debug?.(`${debugId} evaluateCondition: ${toDebugString(fnArgs)} = ${toDebugString(value)}`); + return { + result: value === "" ? true : !!value, + ...(assign != null && { toAssign: { name: assign, value } }), + }; +}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateConditions.js b/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateConditions.js new file mode 100644 index 00000000..55420761 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateConditions.js @@ -0,0 +1,22 @@ +import { debugId, toDebugString } from "../debug"; +import { evaluateCondition } from "./evaluateCondition"; +export const evaluateConditions = (conditions = [], options) => { + const conditionsReferenceRecord = {}; + for (const condition of conditions) { + const { result, toAssign } = evaluateCondition(condition, { + ...options, + referenceRecord: { + ...options.referenceRecord, + ...conditionsReferenceRecord, + }, + }); + if (!result) { + return { result }; + } + if (toAssign) { + conditionsReferenceRecord[toAssign.name] = toAssign.value; + options.logger?.debug?.(`${debugId} assign: ${toAssign.name} := ${toDebugString(toAssign.value)}`); + } + } + return { result: true, referenceRecord: conditionsReferenceRecord }; +}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateEndpointRule.js b/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateEndpointRule.js new file mode 100644 index 00000000..ba6307b1 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateEndpointRule.js @@ -0,0 +1,27 @@ +import { debugId, toDebugString } from "../debug"; +import { evaluateConditions } from "./evaluateConditions"; +import { getEndpointHeaders } from "./getEndpointHeaders"; +import { getEndpointProperties } from "./getEndpointProperties"; +import { getEndpointUrl } from "./getEndpointUrl"; +export const evaluateEndpointRule = (endpointRule, options) => { + const { conditions, endpoint } = endpointRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + const endpointRuleOptions = { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + }; + const { url, properties, headers } = endpoint; + options.logger?.debug?.(`${debugId} Resolving endpoint from template: ${toDebugString(endpoint)}`); + return { + ...(headers != undefined && { + headers: getEndpointHeaders(headers, endpointRuleOptions), + }), + ...(properties != undefined && { + properties: getEndpointProperties(properties, endpointRuleOptions), + }), + url: getEndpointUrl(url, endpointRuleOptions), + }; +}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateErrorRule.js b/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateErrorRule.js new file mode 100644 index 00000000..1a578604 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateErrorRule.js @@ -0,0 +1,14 @@ +import { EndpointError } from "../types"; +import { evaluateConditions } from "./evaluateConditions"; +import { evaluateExpression } from "./evaluateExpression"; +export const evaluateErrorRule = (errorRule, options) => { + const { conditions, error } = errorRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + throw new EndpointError(evaluateExpression(error, "Error", { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + })); +}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateExpression.js b/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateExpression.js new file mode 100644 index 00000000..7f69658e --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateExpression.js @@ -0,0 +1,16 @@ +import { EndpointError } from "../types"; +import { callFunction } from "./callFunction"; +import { evaluateTemplate } from "./evaluateTemplate"; +import { getReferenceValue } from "./getReferenceValue"; +export const evaluateExpression = (obj, keyName, options) => { + if (typeof obj === "string") { + return evaluateTemplate(obj, options); + } + else if (obj["fn"]) { + return callFunction(obj, options); + } + else if (obj["ref"]) { + return getReferenceValue(obj, options); + } + throw new EndpointError(`'${keyName}': ${String(obj)} is not a string, function or reference.`); +}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateRules.js b/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateRules.js new file mode 100644 index 00000000..58a40a08 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateRules.js @@ -0,0 +1,27 @@ +import { EndpointError } from "../types"; +import { evaluateEndpointRule } from "./evaluateEndpointRule"; +import { evaluateErrorRule } from "./evaluateErrorRule"; +import { evaluateTreeRule } from "./evaluateTreeRule"; +export const evaluateRules = (rules, options) => { + for (const rule of rules) { + if (rule.type === "endpoint") { + const endpointOrUndefined = evaluateEndpointRule(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } + else if (rule.type === "error") { + evaluateErrorRule(rule, options); + } + else if (rule.type === "tree") { + const endpointOrUndefined = evaluateTreeRule(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } + else { + throw new EndpointError(`Unknown endpoint rule: ${rule}`); + } + } + throw new EndpointError(`Rules evaluation failed`); +}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTemplate.js b/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTemplate.js new file mode 100644 index 00000000..70058091 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTemplate.js @@ -0,0 +1,36 @@ +import { getAttr } from "../lib"; +export const evaluateTemplate = (template, options) => { + const evaluatedTemplateArr = []; + const templateContext = { + ...options.endpointParams, + ...options.referenceRecord, + }; + let currentIndex = 0; + while (currentIndex < template.length) { + const openingBraceIndex = template.indexOf("{", currentIndex); + if (openingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(currentIndex)); + break; + } + evaluatedTemplateArr.push(template.slice(currentIndex, openingBraceIndex)); + const closingBraceIndex = template.indexOf("}", openingBraceIndex); + if (closingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(openingBraceIndex)); + break; + } + if (template[openingBraceIndex + 1] === "{" && template[closingBraceIndex + 1] === "}") { + evaluatedTemplateArr.push(template.slice(openingBraceIndex + 1, closingBraceIndex)); + currentIndex = closingBraceIndex + 2; + } + const parameterName = template.substring(openingBraceIndex + 1, closingBraceIndex); + if (parameterName.includes("#")) { + const [refName, attrName] = parameterName.split("#"); + evaluatedTemplateArr.push(getAttr(templateContext[refName], attrName)); + } + else { + evaluatedTemplateArr.push(templateContext[parameterName]); + } + currentIndex = closingBraceIndex + 1; + } + return evaluatedTemplateArr.join(""); +}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTreeRule.js b/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTreeRule.js new file mode 100644 index 00000000..427c1fa9 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTreeRule.js @@ -0,0 +1,13 @@ +import { evaluateConditions } from "./evaluateConditions"; +import { evaluateRules } from "./evaluateRules"; +export const evaluateTreeRule = (treeRule, options) => { + const { conditions, rules } = treeRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + return evaluateRules(rules, { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + }); +}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointHeaders.js b/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointHeaders.js new file mode 100644 index 00000000..f94cf553 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointHeaders.js @@ -0,0 +1,12 @@ +import { EndpointError } from "../types"; +import { evaluateExpression } from "./evaluateExpression"; +export const getEndpointHeaders = (headers, options) => Object.entries(headers).reduce((acc, [headerKey, headerVal]) => ({ + ...acc, + [headerKey]: headerVal.map((headerValEntry) => { + const processedExpr = evaluateExpression(headerValEntry, "Header value entry", options); + if (typeof processedExpr !== "string") { + throw new EndpointError(`Header '${headerKey}' value '${processedExpr}' is not a string`); + } + return processedExpr; + }), +}), {}); diff --git a/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperties.js b/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperties.js new file mode 100644 index 00000000..e7afe888 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperties.js @@ -0,0 +1,5 @@ +import { getEndpointProperty } from "./getEndpointProperty"; +export const getEndpointProperties = (properties, options) => Object.entries(properties).reduce((acc, [propertyKey, propertyVal]) => ({ + ...acc, + [propertyKey]: getEndpointProperty(propertyVal, options), +}), {}); diff --git a/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperty.js b/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperty.js new file mode 100644 index 00000000..06009699 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperty.js @@ -0,0 +1,21 @@ +import { EndpointError } from "../types"; +import { evaluateTemplate } from "./evaluateTemplate"; +import { getEndpointProperties } from "./getEndpointProperties"; +export const getEndpointProperty = (property, options) => { + if (Array.isArray(property)) { + return property.map((propertyEntry) => getEndpointProperty(propertyEntry, options)); + } + switch (typeof property) { + case "string": + return evaluateTemplate(property, options); + case "object": + if (property === null) { + throw new EndpointError(`Unexpected endpoint property: ${property}`); + } + return getEndpointProperties(property, options); + case "boolean": + return property; + default: + throw new EndpointError(`Unexpected endpoint property type: ${typeof property}`); + } +}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointUrl.js b/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointUrl.js new file mode 100644 index 00000000..8f1301e2 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointUrl.js @@ -0,0 +1,15 @@ +import { EndpointError } from "../types"; +import { evaluateExpression } from "./evaluateExpression"; +export const getEndpointUrl = (endpointUrl, options) => { + const expression = evaluateExpression(endpointUrl, "Endpoint URL", options); + if (typeof expression === "string") { + try { + return new URL(expression); + } + catch (error) { + console.error(`Failed to construct URL with ${expression}`, error); + throw error; + } + } + throw new EndpointError(`Endpoint URL must be a string, got ${typeof expression}`); +}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/utils/getReferenceValue.js b/node_modules/@smithy/util-endpoints/dist-es/utils/getReferenceValue.js new file mode 100644 index 00000000..759f4d40 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/utils/getReferenceValue.js @@ -0,0 +1,7 @@ +export const getReferenceValue = ({ ref }, options) => { + const referenceRecord = { + ...options.endpointParams, + ...options.referenceRecord, + }; + return referenceRecord[ref]; +}; diff --git a/node_modules/@smithy/util-endpoints/dist-es/utils/index.js b/node_modules/@smithy/util-endpoints/dist-es/utils/index.js new file mode 100644 index 00000000..b571d021 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-es/utils/index.js @@ -0,0 +1,2 @@ +export * from "./customEndpointFunctions"; +export * from "./evaluateRules"; diff --git a/node_modules/@smithy/util-endpoints/dist-types/cache/EndpointCache.d.ts b/node_modules/@smithy/util-endpoints/dist-types/cache/EndpointCache.d.ts new file mode 100644 index 00000000..19a338fd --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/cache/EndpointCache.d.ts @@ -0,0 +1,34 @@ +import type { EndpointParams, EndpointV2 } from "@smithy/types"; +/** + * @internal + * + * Cache for endpoint ruleSet resolution. + */ +export declare class EndpointCache { + private capacity; + private data; + private parameters; + /** + * @param [size] - desired average maximum capacity. A buffer of 10 additional keys will be allowed + * before keys are dropped. + * @param [params] - list of params to consider as part of the cache key. + * + * If the params list is not populated, no caching will happen. + * This may be out of order depending on how the object is created and arrives to this class. + */ + constructor({ size, params }: { + size?: number; + params?: string[]; + }); + /** + * @param endpointParams - query for endpoint. + * @param resolver - provider of the value if not present. + * @returns endpoint corresponding to the query. + */ + get(endpointParams: EndpointParams, resolver: () => EndpointV2): EndpointV2; + size(): number; + /** + * @returns cache key or false if not cachable. + */ + private hash; +} diff --git a/node_modules/@smithy/util-endpoints/dist-types/debug/debugId.d.ts b/node_modules/@smithy/util-endpoints/dist-types/debug/debugId.d.ts new file mode 100644 index 00000000..d39f408f --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/debug/debugId.d.ts @@ -0,0 +1 @@ +export declare const debugId = "endpoints"; diff --git a/node_modules/@smithy/util-endpoints/dist-types/debug/index.d.ts b/node_modules/@smithy/util-endpoints/dist-types/debug/index.d.ts new file mode 100644 index 00000000..70d3b15c --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/debug/index.d.ts @@ -0,0 +1,2 @@ +export * from "./debugId"; +export * from "./toDebugString"; diff --git a/node_modules/@smithy/util-endpoints/dist-types/debug/toDebugString.d.ts b/node_modules/@smithy/util-endpoints/dist-types/debug/toDebugString.d.ts new file mode 100644 index 00000000..6bf1d3a1 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/debug/toDebugString.d.ts @@ -0,0 +1,9 @@ +import { EndpointParameters, EndpointV2 } from "@smithy/types"; +import { GetAttrValue } from "../lib"; +import { EndpointObject, FunctionObject, FunctionReturn } from "../types"; +export declare function toDebugString(input: EndpointParameters): string; +export declare function toDebugString(input: EndpointV2): string; +export declare function toDebugString(input: GetAttrValue): string; +export declare function toDebugString(input: FunctionObject): string; +export declare function toDebugString(input: FunctionReturn): string; +export declare function toDebugString(input: EndpointObject): string; diff --git a/node_modules/@smithy/util-endpoints/dist-types/getEndpointUrlConfig.d.ts b/node_modules/@smithy/util-endpoints/dist-types/getEndpointUrlConfig.d.ts new file mode 100644 index 00000000..0971010f --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/getEndpointUrlConfig.d.ts @@ -0,0 +1,2 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const getEndpointUrlConfig: (serviceId: string) => LoadedConfigSelectors; diff --git a/node_modules/@smithy/util-endpoints/dist-types/index.d.ts b/node_modules/@smithy/util-endpoints/dist-types/index.d.ts new file mode 100644 index 00000000..c39ed2b7 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/index.d.ts @@ -0,0 +1,6 @@ +export * from "./cache/EndpointCache"; +export * from "./lib/isIpAddress"; +export * from "./lib/isValidHostLabel"; +export * from "./utils/customEndpointFunctions"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/node_modules/@smithy/util-endpoints/dist-types/lib/booleanEquals.d.ts b/node_modules/@smithy/util-endpoints/dist-types/lib/booleanEquals.d.ts new file mode 100644 index 00000000..7eac5613 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/lib/booleanEquals.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates two boolean values value1 and value2 for equality and returns + * true if both values match. + */ +export declare const booleanEquals: (value1: boolean, value2: boolean) => boolean; diff --git a/node_modules/@smithy/util-endpoints/dist-types/lib/getAttr.d.ts b/node_modules/@smithy/util-endpoints/dist-types/lib/getAttr.d.ts new file mode 100644 index 00000000..a8088c5c --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/lib/getAttr.d.ts @@ -0,0 +1,7 @@ +export type GetAttrValue = string | boolean | { + [key: string]: GetAttrValue; +} | Array; +/** + * Returns value corresponding to pathing string for an array or object. + */ +export declare const getAttr: (value: GetAttrValue, path: string) => GetAttrValue; diff --git a/node_modules/@smithy/util-endpoints/dist-types/lib/getAttrPathList.d.ts b/node_modules/@smithy/util-endpoints/dist-types/lib/getAttrPathList.d.ts new file mode 100644 index 00000000..e6c49797 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/lib/getAttrPathList.d.ts @@ -0,0 +1,4 @@ +/** + * Parses path as a getAttr expression, returning a list of strings. + */ +export declare const getAttrPathList: (path: string) => Array; diff --git a/node_modules/@smithy/util-endpoints/dist-types/lib/index.d.ts b/node_modules/@smithy/util-endpoints/dist-types/lib/index.d.ts new file mode 100644 index 00000000..99a08449 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/lib/index.d.ts @@ -0,0 +1,9 @@ +export * from "./booleanEquals"; +export * from "./getAttr"; +export * from "./isSet"; +export * from "./isValidHostLabel"; +export * from "./not"; +export * from "./parseURL"; +export * from "./stringEquals"; +export * from "./substring"; +export * from "./uriEncode"; diff --git a/node_modules/@smithy/util-endpoints/dist-types/lib/isIpAddress.d.ts b/node_modules/@smithy/util-endpoints/dist-types/lib/isIpAddress.d.ts new file mode 100644 index 00000000..28aba976 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/lib/isIpAddress.d.ts @@ -0,0 +1,4 @@ +/** + * Validates if the provided value is an IP address. + */ +export declare const isIpAddress: (value: string) => boolean; diff --git a/node_modules/@smithy/util-endpoints/dist-types/lib/isSet.d.ts b/node_modules/@smithy/util-endpoints/dist-types/lib/isSet.d.ts new file mode 100644 index 00000000..7c74ec53 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/lib/isSet.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates whether a value is set (aka not null or undefined). + * Returns true if the value is set, otherwise returns false. + */ +export declare const isSet: (value: unknown) => boolean; diff --git a/node_modules/@smithy/util-endpoints/dist-types/lib/isValidHostLabel.d.ts b/node_modules/@smithy/util-endpoints/dist-types/lib/isValidHostLabel.d.ts new file mode 100644 index 00000000..c05f9e98 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/lib/isValidHostLabel.d.ts @@ -0,0 +1,7 @@ +/** + * Evaluates whether one or more string values are valid host labels per RFC 1123. + * + * If allowSubDomains is true, then the provided value may be zero or more dotted + * subdomains which are each validated per RFC 1123. + */ +export declare const isValidHostLabel: (value: string, allowSubDomains?: boolean) => boolean; diff --git a/node_modules/@smithy/util-endpoints/dist-types/lib/not.d.ts b/node_modules/@smithy/util-endpoints/dist-types/lib/not.d.ts new file mode 100644 index 00000000..1e8e7284 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/lib/not.d.ts @@ -0,0 +1,5 @@ +/** + * Performs logical negation on the provided boolean value, + * returning the negated value. + */ +export declare const not: (value: boolean) => boolean; diff --git a/node_modules/@smithy/util-endpoints/dist-types/lib/parseURL.d.ts b/node_modules/@smithy/util-endpoints/dist-types/lib/parseURL.d.ts new file mode 100644 index 00000000..3e0dce38 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/lib/parseURL.d.ts @@ -0,0 +1,5 @@ +import { Endpoint, EndpointURL } from "@smithy/types"; +/** + * Parses a string, URL, or Endpoint into it’s Endpoint URL components. + */ +export declare const parseURL: (value: string | URL | Endpoint) => EndpointURL | null; diff --git a/node_modules/@smithy/util-endpoints/dist-types/lib/stringEquals.d.ts b/node_modules/@smithy/util-endpoints/dist-types/lib/stringEquals.d.ts new file mode 100644 index 00000000..bdfc98de --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/lib/stringEquals.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates two string values value1 and value2 for equality and returns + * true if both values match. + */ +export declare const stringEquals: (value1: string, value2: string) => boolean; diff --git a/node_modules/@smithy/util-endpoints/dist-types/lib/substring.d.ts b/node_modules/@smithy/util-endpoints/dist-types/lib/substring.d.ts new file mode 100644 index 00000000..5d700355 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/lib/substring.d.ts @@ -0,0 +1,7 @@ +/** + * Computes the substring of a given string, conditionally indexing from the end of the string. + * When the string is long enough to fully include the substring, return the substring. + * Otherwise, return None. The start index is inclusive and the stop index is exclusive. + * The length of the returned string will always be stop-start. + */ +export declare const substring: (input: string, start: number, stop: number, reverse: boolean) => string | null; diff --git a/node_modules/@smithy/util-endpoints/dist-types/lib/uriEncode.d.ts b/node_modules/@smithy/util-endpoints/dist-types/lib/uriEncode.d.ts new file mode 100644 index 00000000..c2a720c7 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/lib/uriEncode.d.ts @@ -0,0 +1,4 @@ +/** + * Performs percent-encoding per RFC3986 section 2.1 + */ +export declare const uriEncode: (value: string) => string; diff --git a/node_modules/@smithy/util-endpoints/dist-types/resolveEndpoint.d.ts b/node_modules/@smithy/util-endpoints/dist-types/resolveEndpoint.d.ts new file mode 100644 index 00000000..b02188b5 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/resolveEndpoint.d.ts @@ -0,0 +1,6 @@ +import { EndpointV2 } from "@smithy/types"; +import { EndpointResolverOptions, RuleSetObject } from "./types"; +/** + * Resolves an endpoint URL by processing the endpoints ruleset and options. + */ +export declare const resolveEndpoint: (ruleSetObject: RuleSetObject, options: EndpointResolverOptions) => EndpointV2; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/cache/EndpointCache.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/cache/EndpointCache.d.ts new file mode 100644 index 00000000..9d622aec --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/cache/EndpointCache.d.ts @@ -0,0 +1,34 @@ +import { EndpointParams, EndpointV2 } from "@smithy/types"; +/** + * @internal + * + * Cache for endpoint ruleSet resolution. + */ +export declare class EndpointCache { + private capacity; + private data; + private parameters; + /** + * @param [size] - desired average maximum capacity. A buffer of 10 additional keys will be allowed + * before keys are dropped. + * @param [params] - list of params to consider as part of the cache key. + * + * If the params list is not populated, no caching will happen. + * This may be out of order depending on how the object is created and arrives to this class. + */ + constructor({ size, params }: { + size?: number; + params?: string[]; + }); + /** + * @param endpointParams - query for endpoint. + * @param resolver - provider of the value if not present. + * @returns endpoint corresponding to the query. + */ + get(endpointParams: EndpointParams, resolver: () => EndpointV2): EndpointV2; + size(): number; + /** + * @returns cache key or false if not cachable. + */ + private hash; +} diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/debugId.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/debugId.d.ts new file mode 100644 index 00000000..f674b8a6 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/debugId.d.ts @@ -0,0 +1 @@ +export declare const debugId = "endpoints"; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/index.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/index.d.ts new file mode 100644 index 00000000..1eb0bf4b --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/index.d.ts @@ -0,0 +1,2 @@ +export * from "./debugId"; +export * from "./toDebugString"; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/toDebugString.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/toDebugString.d.ts new file mode 100644 index 00000000..e295ca0d --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/toDebugString.d.ts @@ -0,0 +1,9 @@ +import { EndpointParameters, EndpointV2 } from "@smithy/types"; +import { GetAttrValue } from "../lib"; +import { EndpointObject, FunctionObject, FunctionReturn } from "../types"; +export declare function toDebugString(input: EndpointParameters): string; +export declare function toDebugString(input: EndpointV2): string; +export declare function toDebugString(input: GetAttrValue): string; +export declare function toDebugString(input: FunctionObject): string; +export declare function toDebugString(input: FunctionReturn): string; +export declare function toDebugString(input: EndpointObject): string; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/getEndpointUrlConfig.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/getEndpointUrlConfig.d.ts new file mode 100644 index 00000000..7b9d0689 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/getEndpointUrlConfig.d.ts @@ -0,0 +1,2 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const getEndpointUrlConfig: (serviceId: string) => LoadedConfigSelectors; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..7b367cff --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/index.d.ts @@ -0,0 +1,6 @@ +export * from "./cache/EndpointCache"; +export * from "./lib/isIpAddress"; +export * from "./lib/isValidHostLabel"; +export * from "./utils/customEndpointFunctions"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/booleanEquals.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/booleanEquals.d.ts new file mode 100644 index 00000000..7aec0018 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/booleanEquals.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates two boolean values value1 and value2 for equality and returns + * true if both values match. + */ +export declare const booleanEquals: (value1: boolean, value2: boolean) => boolean; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttr.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttr.d.ts new file mode 100644 index 00000000..e2f5b431 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttr.d.ts @@ -0,0 +1,7 @@ +export type GetAttrValue = string | boolean | { + [key: string]: GetAttrValue; +} | Array; +/** + * Returns value corresponding to pathing string for an array or object. + */ +export declare const getAttr: (value: GetAttrValue, path: string) => GetAttrValue; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttrPathList.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttrPathList.d.ts new file mode 100644 index 00000000..93bbf31d --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttrPathList.d.ts @@ -0,0 +1,4 @@ +/** + * Parses path as a getAttr expression, returning a list of strings. + */ +export declare const getAttrPathList: (path: string) => Array; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/index.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/index.d.ts new file mode 100644 index 00000000..a28ecaa2 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/index.d.ts @@ -0,0 +1,9 @@ +export * from "./booleanEquals"; +export * from "./getAttr"; +export * from "./isSet"; +export * from "./isValidHostLabel"; +export * from "./not"; +export * from "./parseURL"; +export * from "./stringEquals"; +export * from "./substring"; +export * from "./uriEncode"; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts new file mode 100644 index 00000000..9f378930 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts @@ -0,0 +1,4 @@ +/** + * Validates if the provided value is an IP address. + */ +export declare const isIpAddress: (value: string) => boolean; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isSet.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isSet.d.ts new file mode 100644 index 00000000..6b102dd0 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isSet.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates whether a value is set (aka not null or undefined). + * Returns true if the value is set, otherwise returns false. + */ +export declare const isSet: (value: unknown) => boolean; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isValidHostLabel.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isValidHostLabel.d.ts new file mode 100644 index 00000000..01f7eb9c --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isValidHostLabel.d.ts @@ -0,0 +1,7 @@ +/** + * Evaluates whether one or more string values are valid host labels per RFC 1123. + * + * If allowSubDomains is true, then the provided value may be zero or more dotted + * subdomains which are each validated per RFC 1123. + */ +export declare const isValidHostLabel: (value: string, allowSubDomains?: boolean) => boolean; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/not.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/not.d.ts new file mode 100644 index 00000000..b4e84ac6 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/not.d.ts @@ -0,0 +1,5 @@ +/** + * Performs logical negation on the provided boolean value, + * returning the negated value. + */ +export declare const not: (value: boolean) => boolean; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/parseURL.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/parseURL.d.ts new file mode 100644 index 00000000..0f540666 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/parseURL.d.ts @@ -0,0 +1,5 @@ +import { Endpoint, EndpointURL } from "@smithy/types"; +/** + * Parses a string, URL, or Endpoint into it’s Endpoint URL components. + */ +export declare const parseURL: (value: string | URL | Endpoint) => EndpointURL | null; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/stringEquals.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/stringEquals.d.ts new file mode 100644 index 00000000..9acb10c8 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/stringEquals.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates two string values value1 and value2 for equality and returns + * true if both values match. + */ +export declare const stringEquals: (value1: string, value2: string) => boolean; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/substring.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/substring.d.ts new file mode 100644 index 00000000..a99025cf --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/substring.d.ts @@ -0,0 +1,7 @@ +/** + * Computes the substring of a given string, conditionally indexing from the end of the string. + * When the string is long enough to fully include the substring, return the substring. + * Otherwise, return None. The start index is inclusive and the stop index is exclusive. + * The length of the returned string will always be stop-start. + */ +export declare const substring: (input: string, start: number, stop: number, reverse: boolean) => string | null; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/uriEncode.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/uriEncode.d.ts new file mode 100644 index 00000000..acb75bba --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/uriEncode.d.ts @@ -0,0 +1,4 @@ +/** + * Performs percent-encoding per RFC3986 section 2.1 + */ +export declare const uriEncode: (value: string) => string; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts new file mode 100644 index 00000000..5469fa2a --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts @@ -0,0 +1,6 @@ +import { EndpointV2 } from "@smithy/types"; +import { EndpointResolverOptions, RuleSetObject } from "./types"; +/** + * Resolves an endpoint URL by processing the endpoints ruleset and options. + */ +export declare const resolveEndpoint: (ruleSetObject: RuleSetObject, options: EndpointResolverOptions) => EndpointV2; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts new file mode 100644 index 00000000..4f3c5382 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts @@ -0,0 +1,3 @@ +export declare class EndpointError extends Error { + constructor(message: string); +} diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointFunctions.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointFunctions.d.ts new file mode 100644 index 00000000..7b3cf42b --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointFunctions.d.ts @@ -0,0 +1,2 @@ +import { FunctionReturn } from "./shared"; +export type EndpointFunctions = Record FunctionReturn>; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts new file mode 100644 index 00000000..436001e3 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts @@ -0,0 +1,5 @@ +import { EndpointObject as __EndpointObject, EndpointObjectHeaders as __EndpointObjectHeaders, EndpointObjectProperties as __EndpointObjectProperties, EndpointRuleObject as __EndpointRuleObject } from "@smithy/types"; +export type EndpointObjectProperties = __EndpointObjectProperties; +export type EndpointObjectHeaders = __EndpointObjectHeaders; +export type EndpointObject = __EndpointObject; +export type EndpointRuleObject = __EndpointRuleObject; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts new file mode 100644 index 00000000..1540835a --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts @@ -0,0 +1,2 @@ +import { ErrorRuleObject as __ErrorRuleObject } from "@smithy/types"; +export type ErrorRuleObject = __ErrorRuleObject; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts new file mode 100644 index 00000000..227b269d --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts @@ -0,0 +1,4 @@ +import { DeprecatedObject as __DeprecatedObject, ParameterObject as __ParameterObject, RuleSetObject as __RuleSetObject } from "@smithy/types"; +export type DeprecatedObject = __DeprecatedObject; +export type ParameterObject = __ParameterObject; +export type RuleSetObject = __RuleSetObject; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts new file mode 100644 index 00000000..ecdb6b4c --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts @@ -0,0 +1,3 @@ +import { RuleSetRules as __RuleSetRules, TreeRuleObject as __TreeRuleObject } from "@smithy/types"; +export type RuleSetRules = __RuleSetRules; +export type TreeRuleObject = __TreeRuleObject; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/index.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/index.d.ts new file mode 100644 index 00000000..f89fb63d --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./EndpointError"; +export * from "./EndpointFunctions"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/shared.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/shared.d.ts new file mode 100644 index 00000000..052dcf3d --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/shared.d.ts @@ -0,0 +1,25 @@ +import { EndpointARN, EndpointPartition, Logger } from "@smithy/types"; +export type ReferenceObject = { + ref: string; +}; +export type FunctionObject = { + fn: string; + argv: FunctionArgv; +}; +export type FunctionArgv = Array; +export type FunctionReturn = string | boolean | number | EndpointARN | EndpointPartition | { + [key: string]: FunctionReturn; +} | null; +export type ConditionObject = FunctionObject & { + assign?: string; +}; +export type Expression = string | ReferenceObject | FunctionObject; +export type EndpointParams = Record; +export type EndpointResolverOptions = { + endpointParams: EndpointParams; + logger?: Logger; +}; +export type ReferenceRecord = Record; +export type EvaluateOptions = EndpointResolverOptions & { + referenceRecord: ReferenceRecord; +}; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/callFunction.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/callFunction.d.ts new file mode 100644 index 00000000..bfdf5436 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/callFunction.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, FunctionObject, FunctionReturn } from "../types"; +export declare const callFunction: ({ fn, argv }: FunctionObject, options: EvaluateOptions) => FunctionReturn; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/customEndpointFunctions.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/customEndpointFunctions.d.ts new file mode 100644 index 00000000..1cd2240b --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/customEndpointFunctions.d.ts @@ -0,0 +1,4 @@ +import { EndpointFunctions } from "../types/EndpointFunctions"; +export declare const customEndpointFunctions: { + [key: string]: EndpointFunctions; +}; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/endpointFunctions.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/endpointFunctions.d.ts new file mode 100644 index 00000000..cde57d19 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/endpointFunctions.d.ts @@ -0,0 +1,11 @@ +export declare const endpointFunctions: { + booleanEquals: (value1: boolean, value2: boolean) => boolean; + getAttr: (value: import("../lib").GetAttrValue, path: string) => import("../lib").GetAttrValue; + isSet: (value: unknown) => boolean; + isValidHostLabel: (value: string, allowSubDomains?: boolean) => boolean; + not: (value: boolean) => boolean; + parseURL: (value: string | URL | import("@smithy/types").Endpoint) => import("@smithy/types").EndpointURL | null; + stringEquals: (value1: string, value2: string) => boolean; + substring: (input: string, start: number, stop: number, reverse: boolean) => string | null; + uriEncode: (value: string) => string; +}; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateCondition.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateCondition.d.ts new file mode 100644 index 00000000..ba2c0bef --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateCondition.d.ts @@ -0,0 +1,8 @@ +import { ConditionObject, EvaluateOptions } from "../types"; +export declare const evaluateCondition: ({ assign, ...fnArgs }: ConditionObject, options: EvaluateOptions) => { + toAssign?: { + name: string; + value: import("../types").FunctionReturn; + } | undefined; + result: boolean; +}; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateConditions.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateConditions.d.ts new file mode 100644 index 00000000..a7fbc5f2 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateConditions.d.ts @@ -0,0 +1,8 @@ +import { ConditionObject, EvaluateOptions, FunctionReturn } from "../types"; +export declare const evaluateConditions: (conditions: ConditionObject[] | undefined, options: EvaluateOptions) => { + result: false; + referenceRecord?: undefined; +} | { + result: boolean; + referenceRecord: Record; +}; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateEndpointRule.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateEndpointRule.d.ts new file mode 100644 index 00000000..32f23ffb --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateEndpointRule.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EndpointRuleObject, EvaluateOptions } from "../types"; +export declare const evaluateEndpointRule: (endpointRule: EndpointRuleObject, options: EvaluateOptions) => EndpointV2 | undefined; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateErrorRule.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateErrorRule.d.ts new file mode 100644 index 00000000..eef15e3f --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateErrorRule.d.ts @@ -0,0 +1,2 @@ +import { ErrorRuleObject, EvaluateOptions } from "../types"; +export declare const evaluateErrorRule: (errorRule: ErrorRuleObject, options: EvaluateOptions) => void; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateExpression.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateExpression.d.ts new file mode 100644 index 00000000..8bbd358c --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateExpression.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, Expression } from "../types"; +export declare const evaluateExpression: (obj: Expression, keyName: string, options: EvaluateOptions) => import("../types").FunctionReturn; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateRules.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateRules.d.ts new file mode 100644 index 00000000..a37fe07e --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateRules.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EvaluateOptions } from "../types"; +export declare const evaluateRules: (rules: import("@smithy/types").RuleSetRules, options: EvaluateOptions) => EndpointV2; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTemplate.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTemplate.d.ts new file mode 100644 index 00000000..e6ae9c37 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTemplate.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions } from "../types"; +export declare const evaluateTemplate: (template: string, options: EvaluateOptions) => string; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTreeRule.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTreeRule.d.ts new file mode 100644 index 00000000..8518f7bd --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTreeRule.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EvaluateOptions, TreeRuleObject } from "../types"; +export declare const evaluateTreeRule: (treeRule: TreeRuleObject, options: EvaluateOptions) => EndpointV2 | undefined; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointHeaders.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointHeaders.d.ts new file mode 100644 index 00000000..2775159c --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointHeaders.d.ts @@ -0,0 +1,2 @@ +import { EndpointObjectHeaders, EvaluateOptions } from "../types"; +export declare const getEndpointHeaders: (headers: EndpointObjectHeaders, options: EvaluateOptions) => {}; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperties.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperties.d.ts new file mode 100644 index 00000000..944b39d4 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperties.d.ts @@ -0,0 +1,2 @@ +import { EndpointObjectProperties, EvaluateOptions } from "../types"; +export declare const getEndpointProperties: (properties: EndpointObjectProperties, options: EvaluateOptions) => {}; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperty.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperty.d.ts new file mode 100644 index 00000000..50023777 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperty.d.ts @@ -0,0 +1,3 @@ +import { EndpointObjectProperty } from "@smithy/types"; +import { EvaluateOptions } from "../types"; +export declare const getEndpointProperty: (property: EndpointObjectProperty, options: EvaluateOptions) => EndpointObjectProperty; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointUrl.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointUrl.d.ts new file mode 100644 index 00000000..9c93422e --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointUrl.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, Expression } from "../types"; +export declare const getEndpointUrl: (endpointUrl: Expression, options: EvaluateOptions) => URL; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getReferenceValue.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getReferenceValue.d.ts new file mode 100644 index 00000000..2ebfda3c --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getReferenceValue.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, ReferenceObject } from "../types"; +export declare const getReferenceValue: ({ ref }: ReferenceObject, options: EvaluateOptions) => import("../types").FunctionReturn; diff --git a/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/index.d.ts b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/index.d.ts new file mode 100644 index 00000000..bd481df4 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/index.d.ts @@ -0,0 +1,2 @@ +export * from "./customEndpointFunctions"; +export * from "./evaluateRules"; diff --git a/node_modules/@smithy/util-endpoints/dist-types/types/EndpointError.d.ts b/node_modules/@smithy/util-endpoints/dist-types/types/EndpointError.d.ts new file mode 100644 index 00000000..89132f21 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/types/EndpointError.d.ts @@ -0,0 +1,3 @@ +export declare class EndpointError extends Error { + constructor(message: string); +} diff --git a/node_modules/@smithy/util-endpoints/dist-types/types/EndpointFunctions.d.ts b/node_modules/@smithy/util-endpoints/dist-types/types/EndpointFunctions.d.ts new file mode 100644 index 00000000..33b1a0bd --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/types/EndpointFunctions.d.ts @@ -0,0 +1,2 @@ +import { FunctionReturn } from "./shared"; +export type EndpointFunctions = Record FunctionReturn>; diff --git a/node_modules/@smithy/util-endpoints/dist-types/types/EndpointRuleObject.d.ts b/node_modules/@smithy/util-endpoints/dist-types/types/EndpointRuleObject.d.ts new file mode 100644 index 00000000..d24545ff --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/types/EndpointRuleObject.d.ts @@ -0,0 +1,5 @@ +import { EndpointObject as __EndpointObject, EndpointObjectHeaders as __EndpointObjectHeaders, EndpointObjectProperties as __EndpointObjectProperties, EndpointRuleObject as __EndpointRuleObject } from "@smithy/types"; +export type EndpointObjectProperties = __EndpointObjectProperties; +export type EndpointObjectHeaders = __EndpointObjectHeaders; +export type EndpointObject = __EndpointObject; +export type EndpointRuleObject = __EndpointRuleObject; diff --git a/node_modules/@smithy/util-endpoints/dist-types/types/ErrorRuleObject.d.ts b/node_modules/@smithy/util-endpoints/dist-types/types/ErrorRuleObject.d.ts new file mode 100644 index 00000000..51fe1386 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/types/ErrorRuleObject.d.ts @@ -0,0 +1,2 @@ +import { ErrorRuleObject as __ErrorRuleObject } from "@smithy/types"; +export type ErrorRuleObject = __ErrorRuleObject; diff --git a/node_modules/@smithy/util-endpoints/dist-types/types/RuleSetObject.d.ts b/node_modules/@smithy/util-endpoints/dist-types/types/RuleSetObject.d.ts new file mode 100644 index 00000000..3335b80d --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/types/RuleSetObject.d.ts @@ -0,0 +1,4 @@ +import { DeprecatedObject as __DeprecatedObject, ParameterObject as __ParameterObject, RuleSetObject as __RuleSetObject } from "@smithy/types"; +export type DeprecatedObject = __DeprecatedObject; +export type ParameterObject = __ParameterObject; +export type RuleSetObject = __RuleSetObject; diff --git a/node_modules/@smithy/util-endpoints/dist-types/types/TreeRuleObject.d.ts b/node_modules/@smithy/util-endpoints/dist-types/types/TreeRuleObject.d.ts new file mode 100644 index 00000000..3d902d0e --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/types/TreeRuleObject.d.ts @@ -0,0 +1,3 @@ +import { RuleSetRules as __RuleSetRules, TreeRuleObject as __TreeRuleObject } from "@smithy/types"; +export type RuleSetRules = __RuleSetRules; +export type TreeRuleObject = __TreeRuleObject; diff --git a/node_modules/@smithy/util-endpoints/dist-types/types/index.d.ts b/node_modules/@smithy/util-endpoints/dist-types/types/index.d.ts new file mode 100644 index 00000000..a49f9840 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./EndpointError"; +export * from "./EndpointFunctions"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/node_modules/@smithy/util-endpoints/dist-types/types/shared.d.ts b/node_modules/@smithy/util-endpoints/dist-types/types/shared.d.ts new file mode 100644 index 00000000..8351a928 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/types/shared.d.ts @@ -0,0 +1,25 @@ +import { EndpointARN, EndpointPartition, Logger } from "@smithy/types"; +export type ReferenceObject = { + ref: string; +}; +export type FunctionObject = { + fn: string; + argv: FunctionArgv; +}; +export type FunctionArgv = Array; +export type FunctionReturn = string | boolean | number | EndpointARN | EndpointPartition | { + [key: string]: FunctionReturn; +} | null; +export type ConditionObject = FunctionObject & { + assign?: string; +}; +export type Expression = string | ReferenceObject | FunctionObject; +export type EndpointParams = Record; +export type EndpointResolverOptions = { + endpointParams: EndpointParams; + logger?: Logger; +}; +export type ReferenceRecord = Record; +export type EvaluateOptions = EndpointResolverOptions & { + referenceRecord: ReferenceRecord; +}; diff --git a/node_modules/@smithy/util-endpoints/dist-types/utils/callFunction.d.ts b/node_modules/@smithy/util-endpoints/dist-types/utils/callFunction.d.ts new file mode 100644 index 00000000..729a206b --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/utils/callFunction.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, FunctionObject, FunctionReturn } from "../types"; +export declare const callFunction: ({ fn, argv }: FunctionObject, options: EvaluateOptions) => FunctionReturn; diff --git a/node_modules/@smithy/util-endpoints/dist-types/utils/customEndpointFunctions.d.ts b/node_modules/@smithy/util-endpoints/dist-types/utils/customEndpointFunctions.d.ts new file mode 100644 index 00000000..d8971d0d --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/utils/customEndpointFunctions.d.ts @@ -0,0 +1,4 @@ +import { EndpointFunctions } from "../types/EndpointFunctions"; +export declare const customEndpointFunctions: { + [key: string]: EndpointFunctions; +}; diff --git a/node_modules/@smithy/util-endpoints/dist-types/utils/endpointFunctions.d.ts b/node_modules/@smithy/util-endpoints/dist-types/utils/endpointFunctions.d.ts new file mode 100644 index 00000000..12d75b97 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/utils/endpointFunctions.d.ts @@ -0,0 +1,11 @@ +export declare const endpointFunctions: { + booleanEquals: (value1: boolean, value2: boolean) => boolean; + getAttr: (value: import("../lib").GetAttrValue, path: string) => import("../lib").GetAttrValue; + isSet: (value: unknown) => boolean; + isValidHostLabel: (value: string, allowSubDomains?: boolean) => boolean; + not: (value: boolean) => boolean; + parseURL: (value: string | URL | import("@smithy/types").Endpoint) => import("@smithy/types").EndpointURL | null; + stringEquals: (value1: string, value2: string) => boolean; + substring: (input: string, start: number, stop: number, reverse: boolean) => string | null; + uriEncode: (value: string) => string; +}; diff --git a/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateCondition.d.ts b/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateCondition.d.ts new file mode 100644 index 00000000..5fbe59f5 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateCondition.d.ts @@ -0,0 +1,8 @@ +import { ConditionObject, EvaluateOptions } from "../types"; +export declare const evaluateCondition: ({ assign, ...fnArgs }: ConditionObject, options: EvaluateOptions) => { + toAssign?: { + name: string; + value: import("../types").FunctionReturn; + } | undefined; + result: boolean; +}; diff --git a/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateConditions.d.ts b/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateConditions.d.ts new file mode 100644 index 00000000..4131beba --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateConditions.d.ts @@ -0,0 +1,8 @@ +import { ConditionObject, EvaluateOptions, FunctionReturn } from "../types"; +export declare const evaluateConditions: (conditions: ConditionObject[] | undefined, options: EvaluateOptions) => { + result: false; + referenceRecord?: undefined; +} | { + result: boolean; + referenceRecord: Record; +}; diff --git a/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateEndpointRule.d.ts b/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateEndpointRule.d.ts new file mode 100644 index 00000000..da9496ea --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateEndpointRule.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EndpointRuleObject, EvaluateOptions } from "../types"; +export declare const evaluateEndpointRule: (endpointRule: EndpointRuleObject, options: EvaluateOptions) => EndpointV2 | undefined; diff --git a/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateErrorRule.d.ts b/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateErrorRule.d.ts new file mode 100644 index 00000000..df4973da --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateErrorRule.d.ts @@ -0,0 +1,2 @@ +import { ErrorRuleObject, EvaluateOptions } from "../types"; +export declare const evaluateErrorRule: (errorRule: ErrorRuleObject, options: EvaluateOptions) => void; diff --git a/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateExpression.d.ts b/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateExpression.d.ts new file mode 100644 index 00000000..25419605 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateExpression.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, Expression } from "../types"; +export declare const evaluateExpression: (obj: Expression, keyName: string, options: EvaluateOptions) => import("../types").FunctionReturn; diff --git a/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateRules.d.ts b/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateRules.d.ts new file mode 100644 index 00000000..d38c8beb --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateRules.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EvaluateOptions } from "../types"; +export declare const evaluateRules: (rules: import("@smithy/types").RuleSetRules, options: EvaluateOptions) => EndpointV2; diff --git a/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTemplate.d.ts b/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTemplate.d.ts new file mode 100644 index 00000000..9b0b9ad5 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTemplate.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions } from "../types"; +export declare const evaluateTemplate: (template: string, options: EvaluateOptions) => string; diff --git a/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTreeRule.d.ts b/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTreeRule.d.ts new file mode 100644 index 00000000..2564388a --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTreeRule.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EvaluateOptions, TreeRuleObject } from "../types"; +export declare const evaluateTreeRule: (treeRule: TreeRuleObject, options: EvaluateOptions) => EndpointV2 | undefined; diff --git a/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointHeaders.d.ts b/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointHeaders.d.ts new file mode 100644 index 00000000..a8025657 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointHeaders.d.ts @@ -0,0 +1,2 @@ +import { EndpointObjectHeaders, EvaluateOptions } from "../types"; +export declare const getEndpointHeaders: (headers: EndpointObjectHeaders, options: EvaluateOptions) => {}; diff --git a/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperties.d.ts b/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperties.d.ts new file mode 100644 index 00000000..9c83bb0c --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperties.d.ts @@ -0,0 +1,2 @@ +import { EndpointObjectProperties, EvaluateOptions } from "../types"; +export declare const getEndpointProperties: (properties: EndpointObjectProperties, options: EvaluateOptions) => {}; diff --git a/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperty.d.ts b/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperty.d.ts new file mode 100644 index 00000000..7bc5b82c --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperty.d.ts @@ -0,0 +1,3 @@ +import { EndpointObjectProperty } from "@smithy/types"; +import { EvaluateOptions } from "../types"; +export declare const getEndpointProperty: (property: EndpointObjectProperty, options: EvaluateOptions) => EndpointObjectProperty; diff --git a/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointUrl.d.ts b/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointUrl.d.ts new file mode 100644 index 00000000..4ab22895 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointUrl.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, Expression } from "../types"; +export declare const getEndpointUrl: (endpointUrl: Expression, options: EvaluateOptions) => URL; diff --git a/node_modules/@smithy/util-endpoints/dist-types/utils/getReferenceValue.d.ts b/node_modules/@smithy/util-endpoints/dist-types/utils/getReferenceValue.d.ts new file mode 100644 index 00000000..3699ec1d --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/utils/getReferenceValue.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, ReferenceObject } from "../types"; +export declare const getReferenceValue: ({ ref }: ReferenceObject, options: EvaluateOptions) => import("../types").FunctionReturn; diff --git a/node_modules/@smithy/util-endpoints/dist-types/utils/index.d.ts b/node_modules/@smithy/util-endpoints/dist-types/utils/index.d.ts new file mode 100644 index 00000000..b571d021 --- /dev/null +++ b/node_modules/@smithy/util-endpoints/dist-types/utils/index.d.ts @@ -0,0 +1,2 @@ +export * from "./customEndpointFunctions"; +export * from "./evaluateRules"; diff --git a/node_modules/@smithy/util-endpoints/package.json b/node_modules/@smithy/util-endpoints/package.json new file mode 100644 index 00000000..7c6f9b1f --- /dev/null +++ b/node_modules/@smithy/util-endpoints/package.json @@ -0,0 +1,68 @@ +{ + "name": "@smithy/util-endpoints", + "version": "3.0.2", + "description": "Utilities to help with endpoint resolution.", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-endpoints", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "keywords": [ + "endpoint" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "types/*": [ + "types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/master/packages/util-endpoints", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-endpoints" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/util-hex-encoding/LICENSE b/node_modules/@smithy/util-hex-encoding/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/util-hex-encoding/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/util-hex-encoding/README.md b/node_modules/@smithy/util-hex-encoding/README.md new file mode 100644 index 00000000..67e4499b --- /dev/null +++ b/node_modules/@smithy/util-hex-encoding/README.md @@ -0,0 +1,4 @@ +# @smithy/util-hex-encoding + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-hex-encoding/latest.svg)](https://www.npmjs.com/package/@smithy/util-hex-encoding) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-hex-encoding.svg)](https://www.npmjs.com/package/@smithy/util-hex-encoding) diff --git a/node_modules/@smithy/util-hex-encoding/dist-cjs/index.js b/node_modules/@smithy/util-hex-encoding/dist-cjs/index.js new file mode 100644 index 00000000..78a59ea8 --- /dev/null +++ b/node_modules/@smithy/util-hex-encoding/dist-cjs/index.js @@ -0,0 +1,67 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromHex: () => fromHex, + toHex: () => toHex +}); +module.exports = __toCommonJS(src_exports); +var SHORT_TO_HEX = {}; +var HEX_TO_SHORT = {}; +for (let i = 0; i < 256; i++) { + let encodedByte = i.toString(16).toLowerCase(); + if (encodedByte.length === 1) { + encodedByte = `0${encodedByte}`; + } + SHORT_TO_HEX[i] = encodedByte; + HEX_TO_SHORT[encodedByte] = i; +} +function fromHex(encoded) { + if (encoded.length % 2 !== 0) { + throw new Error("Hex encoded strings must have an even number length"); + } + const out = new Uint8Array(encoded.length / 2); + for (let i = 0; i < encoded.length; i += 2) { + const encodedByte = encoded.slice(i, i + 2).toLowerCase(); + if (encodedByte in HEX_TO_SHORT) { + out[i / 2] = HEX_TO_SHORT[encodedByte]; + } else { + throw new Error(`Cannot decode unrecognized sequence ${encodedByte} as hexadecimal`); + } + } + return out; +} +__name(fromHex, "fromHex"); +function toHex(bytes) { + let out = ""; + for (let i = 0; i < bytes.byteLength; i++) { + out += SHORT_TO_HEX[bytes[i]]; + } + return out; +} +__name(toHex, "toHex"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromHex, + toHex +}); + diff --git a/node_modules/@smithy/util-hex-encoding/dist-es/index.js b/node_modules/@smithy/util-hex-encoding/dist-es/index.js new file mode 100644 index 00000000..e47b3aa2 --- /dev/null +++ b/node_modules/@smithy/util-hex-encoding/dist-es/index.js @@ -0,0 +1,33 @@ +const SHORT_TO_HEX = {}; +const HEX_TO_SHORT = {}; +for (let i = 0; i < 256; i++) { + let encodedByte = i.toString(16).toLowerCase(); + if (encodedByte.length === 1) { + encodedByte = `0${encodedByte}`; + } + SHORT_TO_HEX[i] = encodedByte; + HEX_TO_SHORT[encodedByte] = i; +} +export function fromHex(encoded) { + if (encoded.length % 2 !== 0) { + throw new Error("Hex encoded strings must have an even number length"); + } + const out = new Uint8Array(encoded.length / 2); + for (let i = 0; i < encoded.length; i += 2) { + const encodedByte = encoded.slice(i, i + 2).toLowerCase(); + if (encodedByte in HEX_TO_SHORT) { + out[i / 2] = HEX_TO_SHORT[encodedByte]; + } + else { + throw new Error(`Cannot decode unrecognized sequence ${encodedByte} as hexadecimal`); + } + } + return out; +} +export function toHex(bytes) { + let out = ""; + for (let i = 0; i < bytes.byteLength; i++) { + out += SHORT_TO_HEX[bytes[i]]; + } + return out; +} diff --git a/node_modules/@smithy/util-hex-encoding/dist-types/index.d.ts b/node_modules/@smithy/util-hex-encoding/dist-types/index.d.ts new file mode 100644 index 00000000..9d4307ad --- /dev/null +++ b/node_modules/@smithy/util-hex-encoding/dist-types/index.d.ts @@ -0,0 +1,12 @@ +/** + * Converts a hexadecimal encoded string to a Uint8Array of bytes. + * + * @param encoded The hexadecimal encoded string + */ +export declare function fromHex(encoded: string): Uint8Array; +/** + * Converts a Uint8Array of binary data to a hexadecimal encoded string. + * + * @param bytes The binary data to encode + */ +export declare function toHex(bytes: Uint8Array): string; diff --git a/node_modules/@smithy/util-hex-encoding/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/util-hex-encoding/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..02a88489 --- /dev/null +++ b/node_modules/@smithy/util-hex-encoding/dist-types/ts3.4/index.d.ts @@ -0,0 +1,12 @@ +/** + * Converts a hexadecimal encoded string to a Uint8Array of bytes. + * + * @param encoded The hexadecimal encoded string + */ +export declare function fromHex(encoded: string): Uint8Array; +/** + * Converts a Uint8Array of binary data to a hexadecimal encoded string. + * + * @param bytes The binary data to encode + */ +export declare function toHex(bytes: Uint8Array): string; diff --git a/node_modules/@smithy/util-hex-encoding/package.json b/node_modules/@smithy/util-hex-encoding/package.json new file mode 100644 index 00000000..2c1ba3db --- /dev/null +++ b/node_modules/@smithy/util-hex-encoding/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/util-hex-encoding", + "version": "4.0.0", + "description": "Converts binary buffers to and from lowercase hexadecimal encoding", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-hex-encoding", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "dependencies": { + "tslib": "^2.6.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-hex-encoding", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-hex-encoding" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/util-middleware/LICENSE b/node_modules/@smithy/util-middleware/LICENSE new file mode 100644 index 00000000..a1895fac --- /dev/null +++ b/node_modules/@smithy/util-middleware/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/util-middleware/README.md b/node_modules/@smithy/util-middleware/README.md new file mode 100644 index 00000000..f043cfa2 --- /dev/null +++ b/node_modules/@smithy/util-middleware/README.md @@ -0,0 +1,12 @@ +# @smithy/util-middleware + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-middleware/latest.svg)](https://www.npmjs.com/package/@smithy/util-middleware) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-middleware.svg)](https://www.npmjs.com/package/@smithy/util-middleware) + +> An internal package + +This package provides shared utilities for middleware. + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/util-middleware/dist-cjs/getSmithyContext.js b/node_modules/@smithy/util-middleware/dist-cjs/getSmithyContext.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-middleware/dist-cjs/getSmithyContext.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-middleware/dist-cjs/index.js b/node_modules/@smithy/util-middleware/dist-cjs/index.js new file mode 100644 index 00000000..dfccf176 --- /dev/null +++ b/node_modules/@smithy/util-middleware/dist-cjs/index.js @@ -0,0 +1,45 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + getSmithyContext: () => getSmithyContext, + normalizeProvider: () => normalizeProvider +}); +module.exports = __toCommonJS(src_exports); + +// src/getSmithyContext.ts +var import_types = require("@smithy/types"); +var getSmithyContext = /* @__PURE__ */ __name((context) => context[import_types.SMITHY_CONTEXT_KEY] || (context[import_types.SMITHY_CONTEXT_KEY] = {}), "getSmithyContext"); + +// src/normalizeProvider.ts +var normalizeProvider = /* @__PURE__ */ __name((input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}, "normalizeProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getSmithyContext, + normalizeProvider +}); + diff --git a/node_modules/@smithy/util-middleware/dist-cjs/normalizeProvider.js b/node_modules/@smithy/util-middleware/dist-cjs/normalizeProvider.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-middleware/dist-cjs/normalizeProvider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-middleware/dist-es/getSmithyContext.js b/node_modules/@smithy/util-middleware/dist-es/getSmithyContext.js new file mode 100644 index 00000000..3848a0c2 --- /dev/null +++ b/node_modules/@smithy/util-middleware/dist-es/getSmithyContext.js @@ -0,0 +1,2 @@ +import { SMITHY_CONTEXT_KEY } from "@smithy/types"; +export const getSmithyContext = (context) => context[SMITHY_CONTEXT_KEY] || (context[SMITHY_CONTEXT_KEY] = {}); diff --git a/node_modules/@smithy/util-middleware/dist-es/index.js b/node_modules/@smithy/util-middleware/dist-es/index.js new file mode 100644 index 00000000..484290d4 --- /dev/null +++ b/node_modules/@smithy/util-middleware/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./getSmithyContext"; +export * from "./normalizeProvider"; diff --git a/node_modules/@smithy/util-middleware/dist-es/normalizeProvider.js b/node_modules/@smithy/util-middleware/dist-es/normalizeProvider.js new file mode 100644 index 00000000..a83ea99e --- /dev/null +++ b/node_modules/@smithy/util-middleware/dist-es/normalizeProvider.js @@ -0,0 +1,6 @@ +export const normalizeProvider = (input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}; diff --git a/node_modules/@smithy/util-middleware/dist-types/getSmithyContext.d.ts b/node_modules/@smithy/util-middleware/dist-types/getSmithyContext.d.ts new file mode 100644 index 00000000..523ee47d --- /dev/null +++ b/node_modules/@smithy/util-middleware/dist-types/getSmithyContext.d.ts @@ -0,0 +1,5 @@ +import { HandlerExecutionContext } from "@smithy/types"; +/** + * @internal + */ +export declare const getSmithyContext: (context: HandlerExecutionContext) => Record; diff --git a/node_modules/@smithy/util-middleware/dist-types/index.d.ts b/node_modules/@smithy/util-middleware/dist-types/index.d.ts new file mode 100644 index 00000000..3869284e --- /dev/null +++ b/node_modules/@smithy/util-middleware/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./getSmithyContext"; +/** + * @internal + */ +export * from "./normalizeProvider"; diff --git a/node_modules/@smithy/util-middleware/dist-types/normalizeProvider.d.ts b/node_modules/@smithy/util-middleware/dist-types/normalizeProvider.d.ts new file mode 100644 index 00000000..4fe2d9ad --- /dev/null +++ b/node_modules/@smithy/util-middleware/dist-types/normalizeProvider.d.ts @@ -0,0 +1,7 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * @returns a provider function for the input value if it isn't already one. + */ +export declare const normalizeProvider: (input: T | Provider) => Provider; diff --git a/node_modules/@smithy/util-middleware/dist-types/ts3.4/getSmithyContext.d.ts b/node_modules/@smithy/util-middleware/dist-types/ts3.4/getSmithyContext.d.ts new file mode 100644 index 00000000..14cd7c4e --- /dev/null +++ b/node_modules/@smithy/util-middleware/dist-types/ts3.4/getSmithyContext.d.ts @@ -0,0 +1,5 @@ +import { HandlerExecutionContext } from "@smithy/types"; +/** + * @internal + */ +export declare const getSmithyContext: (context: HandlerExecutionContext) => Record; diff --git a/node_modules/@smithy/util-middleware/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/util-middleware/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..ab071596 --- /dev/null +++ b/node_modules/@smithy/util-middleware/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./getSmithyContext"; +/** + * @internal + */ +export * from "./normalizeProvider"; diff --git a/node_modules/@smithy/util-middleware/dist-types/ts3.4/normalizeProvider.d.ts b/node_modules/@smithy/util-middleware/dist-types/ts3.4/normalizeProvider.d.ts new file mode 100644 index 00000000..594e8fa8 --- /dev/null +++ b/node_modules/@smithy/util-middleware/dist-types/ts3.4/normalizeProvider.d.ts @@ -0,0 +1,7 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * @returns a provider function for the input value if it isn't already one. + */ +export declare const normalizeProvider: (input: T | Provider) => Provider; diff --git a/node_modules/@smithy/util-middleware/package.json b/node_modules/@smithy/util-middleware/package.json new file mode 100644 index 00000000..b9aa172b --- /dev/null +++ b/node_modules/@smithy/util-middleware/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-middleware", + "version": "4.0.2", + "description": "Shared utilities for to be used in middleware packages.", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-middleware", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "middleware" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "types/*": [ + "types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/master/packages/util-middleware", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-middleware" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/util-retry/LICENSE b/node_modules/@smithy/util-retry/LICENSE new file mode 100644 index 00000000..a1895fac --- /dev/null +++ b/node_modules/@smithy/util-retry/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/util-retry/README.md b/node_modules/@smithy/util-retry/README.md new file mode 100644 index 00000000..bcf11a94 --- /dev/null +++ b/node_modules/@smithy/util-retry/README.md @@ -0,0 +1,78 @@ +# @smithy/util-retry + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-retry/latest.svg)](https://www.npmjs.com/package/@smithy/util-retry) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-retry.svg)](https://www.npmjs.com/package/@smithy/util-retry) + +This package provides shared utilities for retries. + +## Usage + +### Default + +By default, each client already has a default retry strategy. The default retry count is 3, and +only retryable errors will be retried. + +[AWS Documentation: Retry behavior](https://docs.aws.amazon.com/sdkref/latest/guide/feature-retry-behavior.html). + +```js +import { S3Client } from "@aws-sdk/client-s3"; + +const client = new S3Client({}); // default retry strategy included. +``` + +### MaxAttempts + +If you want to change the number of attempts, you can provide `maxAttempts` configuration during client creation. + +```js +import { S3Client } from "@aws-sdk/client-s3"; + +const client = new S3Client({ maxAttempts: 4 }); +``` + +This is recommended because the `StandardRetryStrategy` includes backoff calculation, +deciding whether an error should be retried, and a retry token counter. + +### MaxAttempts and BackoffComputation + +If you want to change the number of attempts and use a custom delay +computation, you can use the `ConfiguredRetryStrategy` from `@smithy/util-retry`. + +```js +import { S3Client } from "@aws-sdk/client-s3"; +import { ConfiguredRetryStrategy } from "@smithy/util-retry"; + +const client = new S3Client({ + retryStrategy: new ConfiguredRetryStrategy( + 4, // max attempts. + (attempt: number) => 100 + attempt * 1000 // backoff function. + ), +}); +``` + +This example sets the backoff at 100ms plus 1s per attempt. + +### MaxAttempts and RetryStrategy + +If you provide both `maxAttempts` and `retryStrategy`, the `retryStrategy` will +get precedence as it's more specific. + +```js +import { S3Client } from "@aws-sdk/client-s3"; +import { ConfiguredRetryStrategy } from "@smithy/util-retry"; + +const client = new S3Client({ + maxAttempts: 2, // ignored. + retryStrategy: new ConfiguredRetryStrategy( + 4, // used. + (attempt: number) => 100 + attempt * 1000 // backoff function. + ), +}); +``` + +### Further customization + +You can implement the `RetryStrategyV2` interface. + +Source: https://github.com/smithy-lang/smithy-typescript/blob/main/packages/types/src/retry.ts +API Docs: https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-types/Interface/RetryStrategyV2/ diff --git a/node_modules/@smithy/util-retry/dist-cjs/AdaptiveRetryStrategy.js b/node_modules/@smithy/util-retry/dist-cjs/AdaptiveRetryStrategy.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-cjs/AdaptiveRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-retry/dist-cjs/ConfiguredRetryStrategy.js b/node_modules/@smithy/util-retry/dist-cjs/ConfiguredRetryStrategy.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-cjs/ConfiguredRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-retry/dist-cjs/DefaultRateLimiter.js b/node_modules/@smithy/util-retry/dist-cjs/DefaultRateLimiter.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-cjs/DefaultRateLimiter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-retry/dist-cjs/StandardRetryStrategy.js b/node_modules/@smithy/util-retry/dist-cjs/StandardRetryStrategy.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-cjs/StandardRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-retry/dist-cjs/config.js b/node_modules/@smithy/util-retry/dist-cjs/config.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-cjs/config.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-retry/dist-cjs/constants.js b/node_modules/@smithy/util-retry/dist-cjs/constants.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-retry/dist-cjs/defaultRetryBackoffStrategy.js b/node_modules/@smithy/util-retry/dist-cjs/defaultRetryBackoffStrategy.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-cjs/defaultRetryBackoffStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-retry/dist-cjs/defaultRetryToken.js b/node_modules/@smithy/util-retry/dist-cjs/defaultRetryToken.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-cjs/defaultRetryToken.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-retry/dist-cjs/index.js b/node_modules/@smithy/util-retry/dist-cjs/index.js new file mode 100644 index 00000000..699447a7 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-cjs/index.js @@ -0,0 +1,358 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AdaptiveRetryStrategy: () => AdaptiveRetryStrategy, + ConfiguredRetryStrategy: () => ConfiguredRetryStrategy, + DEFAULT_MAX_ATTEMPTS: () => DEFAULT_MAX_ATTEMPTS, + DEFAULT_RETRY_DELAY_BASE: () => DEFAULT_RETRY_DELAY_BASE, + DEFAULT_RETRY_MODE: () => DEFAULT_RETRY_MODE, + DefaultRateLimiter: () => DefaultRateLimiter, + INITIAL_RETRY_TOKENS: () => INITIAL_RETRY_TOKENS, + INVOCATION_ID_HEADER: () => INVOCATION_ID_HEADER, + MAXIMUM_RETRY_DELAY: () => MAXIMUM_RETRY_DELAY, + NO_RETRY_INCREMENT: () => NO_RETRY_INCREMENT, + REQUEST_HEADER: () => REQUEST_HEADER, + RETRY_COST: () => RETRY_COST, + RETRY_MODES: () => RETRY_MODES, + StandardRetryStrategy: () => StandardRetryStrategy, + THROTTLING_RETRY_DELAY_BASE: () => THROTTLING_RETRY_DELAY_BASE, + TIMEOUT_RETRY_COST: () => TIMEOUT_RETRY_COST +}); +module.exports = __toCommonJS(src_exports); + +// src/config.ts +var RETRY_MODES = /* @__PURE__ */ ((RETRY_MODES2) => { + RETRY_MODES2["STANDARD"] = "standard"; + RETRY_MODES2["ADAPTIVE"] = "adaptive"; + return RETRY_MODES2; +})(RETRY_MODES || {}); +var DEFAULT_MAX_ATTEMPTS = 3; +var DEFAULT_RETRY_MODE = "standard" /* STANDARD */; + +// src/DefaultRateLimiter.ts +var import_service_error_classification = require("@smithy/service-error-classification"); +var DefaultRateLimiter = class _DefaultRateLimiter { + constructor(options) { + // Pre-set state variables + this.currentCapacity = 0; + this.enabled = false; + this.lastMaxRate = 0; + this.measuredTxRate = 0; + this.requestCount = 0; + this.lastTimestamp = 0; + this.timeWindow = 0; + this.beta = options?.beta ?? 0.7; + this.minCapacity = options?.minCapacity ?? 1; + this.minFillRate = options?.minFillRate ?? 0.5; + this.scaleConstant = options?.scaleConstant ?? 0.4; + this.smooth = options?.smooth ?? 0.8; + const currentTimeInSeconds = this.getCurrentTimeInSeconds(); + this.lastThrottleTime = currentTimeInSeconds; + this.lastTxRateBucket = Math.floor(this.getCurrentTimeInSeconds()); + this.fillRate = this.minFillRate; + this.maxCapacity = this.minCapacity; + } + static { + __name(this, "DefaultRateLimiter"); + } + static { + /** + * Only used in testing. + */ + this.setTimeoutFn = setTimeout; + } + getCurrentTimeInSeconds() { + return Date.now() / 1e3; + } + async getSendToken() { + return this.acquireTokenBucket(1); + } + async acquireTokenBucket(amount) { + if (!this.enabled) { + return; + } + this.refillTokenBucket(); + if (amount > this.currentCapacity) { + const delay = (amount - this.currentCapacity) / this.fillRate * 1e3; + await new Promise((resolve) => _DefaultRateLimiter.setTimeoutFn(resolve, delay)); + } + this.currentCapacity = this.currentCapacity - amount; + } + refillTokenBucket() { + const timestamp = this.getCurrentTimeInSeconds(); + if (!this.lastTimestamp) { + this.lastTimestamp = timestamp; + return; + } + const fillAmount = (timestamp - this.lastTimestamp) * this.fillRate; + this.currentCapacity = Math.min(this.maxCapacity, this.currentCapacity + fillAmount); + this.lastTimestamp = timestamp; + } + updateClientSendingRate(response) { + let calculatedRate; + this.updateMeasuredRate(); + if ((0, import_service_error_classification.isThrottlingError)(response)) { + const rateToUse = !this.enabled ? this.measuredTxRate : Math.min(this.measuredTxRate, this.fillRate); + this.lastMaxRate = rateToUse; + this.calculateTimeWindow(); + this.lastThrottleTime = this.getCurrentTimeInSeconds(); + calculatedRate = this.cubicThrottle(rateToUse); + this.enableTokenBucket(); + } else { + this.calculateTimeWindow(); + calculatedRate = this.cubicSuccess(this.getCurrentTimeInSeconds()); + } + const newRate = Math.min(calculatedRate, 2 * this.measuredTxRate); + this.updateTokenBucketRate(newRate); + } + calculateTimeWindow() { + this.timeWindow = this.getPrecise(Math.pow(this.lastMaxRate * (1 - this.beta) / this.scaleConstant, 1 / 3)); + } + cubicThrottle(rateToUse) { + return this.getPrecise(rateToUse * this.beta); + } + cubicSuccess(timestamp) { + return this.getPrecise( + this.scaleConstant * Math.pow(timestamp - this.lastThrottleTime - this.timeWindow, 3) + this.lastMaxRate + ); + } + enableTokenBucket() { + this.enabled = true; + } + updateTokenBucketRate(newRate) { + this.refillTokenBucket(); + this.fillRate = Math.max(newRate, this.minFillRate); + this.maxCapacity = Math.max(newRate, this.minCapacity); + this.currentCapacity = Math.min(this.currentCapacity, this.maxCapacity); + } + updateMeasuredRate() { + const t = this.getCurrentTimeInSeconds(); + const timeBucket = Math.floor(t * 2) / 2; + this.requestCount++; + if (timeBucket > this.lastTxRateBucket) { + const currentRate = this.requestCount / (timeBucket - this.lastTxRateBucket); + this.measuredTxRate = this.getPrecise(currentRate * this.smooth + this.measuredTxRate * (1 - this.smooth)); + this.requestCount = 0; + this.lastTxRateBucket = timeBucket; + } + } + getPrecise(num) { + return parseFloat(num.toFixed(8)); + } +}; + +// src/constants.ts +var DEFAULT_RETRY_DELAY_BASE = 100; +var MAXIMUM_RETRY_DELAY = 20 * 1e3; +var THROTTLING_RETRY_DELAY_BASE = 500; +var INITIAL_RETRY_TOKENS = 500; +var RETRY_COST = 5; +var TIMEOUT_RETRY_COST = 10; +var NO_RETRY_INCREMENT = 1; +var INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +var REQUEST_HEADER = "amz-sdk-request"; + +// src/defaultRetryBackoffStrategy.ts +var getDefaultRetryBackoffStrategy = /* @__PURE__ */ __name(() => { + let delayBase = DEFAULT_RETRY_DELAY_BASE; + const computeNextBackoffDelay = /* @__PURE__ */ __name((attempts) => { + return Math.floor(Math.min(MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); + }, "computeNextBackoffDelay"); + const setDelayBase = /* @__PURE__ */ __name((delay) => { + delayBase = delay; + }, "setDelayBase"); + return { + computeNextBackoffDelay, + setDelayBase + }; +}, "getDefaultRetryBackoffStrategy"); + +// src/defaultRetryToken.ts +var createDefaultRetryToken = /* @__PURE__ */ __name(({ + retryDelay, + retryCount, + retryCost +}) => { + const getRetryCount = /* @__PURE__ */ __name(() => retryCount, "getRetryCount"); + const getRetryDelay = /* @__PURE__ */ __name(() => Math.min(MAXIMUM_RETRY_DELAY, retryDelay), "getRetryDelay"); + const getRetryCost = /* @__PURE__ */ __name(() => retryCost, "getRetryCost"); + return { + getRetryCount, + getRetryDelay, + getRetryCost + }; +}, "createDefaultRetryToken"); + +// src/StandardRetryStrategy.ts +var StandardRetryStrategy = class { + constructor(maxAttempts) { + this.maxAttempts = maxAttempts; + this.mode = "standard" /* STANDARD */; + this.capacity = INITIAL_RETRY_TOKENS; + this.retryBackoffStrategy = getDefaultRetryBackoffStrategy(); + this.maxAttemptsProvider = typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts; + } + static { + __name(this, "StandardRetryStrategy"); + } + // eslint-disable-next-line @typescript-eslint/no-unused-vars + async acquireInitialRetryToken(retryTokenScope) { + return createDefaultRetryToken({ + retryDelay: DEFAULT_RETRY_DELAY_BASE, + retryCount: 0 + }); + } + async refreshRetryTokenForRetry(token, errorInfo) { + const maxAttempts = await this.getMaxAttempts(); + if (this.shouldRetry(token, errorInfo, maxAttempts)) { + const errorType = errorInfo.errorType; + this.retryBackoffStrategy.setDelayBase( + errorType === "THROTTLING" ? THROTTLING_RETRY_DELAY_BASE : DEFAULT_RETRY_DELAY_BASE + ); + const delayFromErrorType = this.retryBackoffStrategy.computeNextBackoffDelay(token.getRetryCount()); + const retryDelay = errorInfo.retryAfterHint ? Math.max(errorInfo.retryAfterHint.getTime() - Date.now() || 0, delayFromErrorType) : delayFromErrorType; + const capacityCost = this.getCapacityCost(errorType); + this.capacity -= capacityCost; + return createDefaultRetryToken({ + retryDelay, + retryCount: token.getRetryCount() + 1, + retryCost: capacityCost + }); + } + throw new Error("No retry token available"); + } + recordSuccess(token) { + this.capacity = Math.max(INITIAL_RETRY_TOKENS, this.capacity + (token.getRetryCost() ?? NO_RETRY_INCREMENT)); + } + /** + * @returns the current available retry capacity. + * + * This number decreases when retries are executed and refills when requests or retries succeed. + */ + getCapacity() { + return this.capacity; + } + async getMaxAttempts() { + try { + return await this.maxAttemptsProvider(); + } catch (error) { + console.warn(`Max attempts provider could not resolve. Using default of ${DEFAULT_MAX_ATTEMPTS}`); + return DEFAULT_MAX_ATTEMPTS; + } + } + shouldRetry(tokenToRenew, errorInfo, maxAttempts) { + const attempts = tokenToRenew.getRetryCount() + 1; + return attempts < maxAttempts && this.capacity >= this.getCapacityCost(errorInfo.errorType) && this.isRetryableError(errorInfo.errorType); + } + getCapacityCost(errorType) { + return errorType === "TRANSIENT" ? TIMEOUT_RETRY_COST : RETRY_COST; + } + isRetryableError(errorType) { + return errorType === "THROTTLING" || errorType === "TRANSIENT"; + } +}; + +// src/AdaptiveRetryStrategy.ts +var AdaptiveRetryStrategy = class { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = "adaptive" /* ADAPTIVE */; + const { rateLimiter } = options ?? {}; + this.rateLimiter = rateLimiter ?? new DefaultRateLimiter(); + this.standardRetryStrategy = new StandardRetryStrategy(maxAttemptsProvider); + } + static { + __name(this, "AdaptiveRetryStrategy"); + } + async acquireInitialRetryToken(retryTokenScope) { + await this.rateLimiter.getSendToken(); + return this.standardRetryStrategy.acquireInitialRetryToken(retryTokenScope); + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + this.rateLimiter.updateClientSendingRate(errorInfo); + return this.standardRetryStrategy.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + } + recordSuccess(token) { + this.rateLimiter.updateClientSendingRate({}); + this.standardRetryStrategy.recordSuccess(token); + } +}; + +// src/ConfiguredRetryStrategy.ts +var ConfiguredRetryStrategy = class extends StandardRetryStrategy { + static { + __name(this, "ConfiguredRetryStrategy"); + } + /** + * @param maxAttempts - the maximum number of retry attempts allowed. + * e.g., if set to 3, then 4 total requests are possible. + * @param computeNextBackoffDelay - a millisecond delay for each retry or a function that takes the retry attempt + * and returns the delay. + * + * @example exponential backoff. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, (attempt) => attempt ** 2) + * }); + * ``` + * @example constant delay. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, 2000) + * }); + * ``` + */ + constructor(maxAttempts, computeNextBackoffDelay = DEFAULT_RETRY_DELAY_BASE) { + super(typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts); + if (typeof computeNextBackoffDelay === "number") { + this.computeNextBackoffDelay = () => computeNextBackoffDelay; + } else { + this.computeNextBackoffDelay = computeNextBackoffDelay; + } + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + const token = await super.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + token.getRetryDelay = () => this.computeNextBackoffDelay(token.getRetryCount()); + return token; + } +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + AdaptiveRetryStrategy, + ConfiguredRetryStrategy, + DefaultRateLimiter, + StandardRetryStrategy, + RETRY_MODES, + DEFAULT_MAX_ATTEMPTS, + DEFAULT_RETRY_MODE, + DEFAULT_RETRY_DELAY_BASE, + MAXIMUM_RETRY_DELAY, + THROTTLING_RETRY_DELAY_BASE, + INITIAL_RETRY_TOKENS, + RETRY_COST, + TIMEOUT_RETRY_COST, + NO_RETRY_INCREMENT, + INVOCATION_ID_HEADER, + REQUEST_HEADER +}); + diff --git a/node_modules/@smithy/util-retry/dist-cjs/types.js b/node_modules/@smithy/util-retry/dist-cjs/types.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-retry/dist-es/AdaptiveRetryStrategy.js b/node_modules/@smithy/util-retry/dist-es/AdaptiveRetryStrategy.js new file mode 100644 index 00000000..e20cf0f8 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-es/AdaptiveRetryStrategy.js @@ -0,0 +1,24 @@ +import { RETRY_MODES } from "./config"; +import { DefaultRateLimiter } from "./DefaultRateLimiter"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +export class AdaptiveRetryStrategy { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = RETRY_MODES.ADAPTIVE; + const { rateLimiter } = options ?? {}; + this.rateLimiter = rateLimiter ?? new DefaultRateLimiter(); + this.standardRetryStrategy = new StandardRetryStrategy(maxAttemptsProvider); + } + async acquireInitialRetryToken(retryTokenScope) { + await this.rateLimiter.getSendToken(); + return this.standardRetryStrategy.acquireInitialRetryToken(retryTokenScope); + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + this.rateLimiter.updateClientSendingRate(errorInfo); + return this.standardRetryStrategy.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + } + recordSuccess(token) { + this.rateLimiter.updateClientSendingRate({}); + this.standardRetryStrategy.recordSuccess(token); + } +} diff --git a/node_modules/@smithy/util-retry/dist-es/ConfiguredRetryStrategy.js b/node_modules/@smithy/util-retry/dist-es/ConfiguredRetryStrategy.js new file mode 100644 index 00000000..541bdb24 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-es/ConfiguredRetryStrategy.js @@ -0,0 +1,18 @@ +import { DEFAULT_RETRY_DELAY_BASE } from "./constants"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +export class ConfiguredRetryStrategy extends StandardRetryStrategy { + constructor(maxAttempts, computeNextBackoffDelay = DEFAULT_RETRY_DELAY_BASE) { + super(typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts); + if (typeof computeNextBackoffDelay === "number") { + this.computeNextBackoffDelay = () => computeNextBackoffDelay; + } + else { + this.computeNextBackoffDelay = computeNextBackoffDelay; + } + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + const token = await super.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + token.getRetryDelay = () => this.computeNextBackoffDelay(token.getRetryCount()); + return token; + } +} diff --git a/node_modules/@smithy/util-retry/dist-es/DefaultRateLimiter.js b/node_modules/@smithy/util-retry/dist-es/DefaultRateLimiter.js new file mode 100644 index 00000000..15240c84 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-es/DefaultRateLimiter.js @@ -0,0 +1,100 @@ +import { isThrottlingError } from "@smithy/service-error-classification"; +export class DefaultRateLimiter { + constructor(options) { + this.currentCapacity = 0; + this.enabled = false; + this.lastMaxRate = 0; + this.measuredTxRate = 0; + this.requestCount = 0; + this.lastTimestamp = 0; + this.timeWindow = 0; + this.beta = options?.beta ?? 0.7; + this.minCapacity = options?.minCapacity ?? 1; + this.minFillRate = options?.minFillRate ?? 0.5; + this.scaleConstant = options?.scaleConstant ?? 0.4; + this.smooth = options?.smooth ?? 0.8; + const currentTimeInSeconds = this.getCurrentTimeInSeconds(); + this.lastThrottleTime = currentTimeInSeconds; + this.lastTxRateBucket = Math.floor(this.getCurrentTimeInSeconds()); + this.fillRate = this.minFillRate; + this.maxCapacity = this.minCapacity; + } + getCurrentTimeInSeconds() { + return Date.now() / 1000; + } + async getSendToken() { + return this.acquireTokenBucket(1); + } + async acquireTokenBucket(amount) { + if (!this.enabled) { + return; + } + this.refillTokenBucket(); + if (amount > this.currentCapacity) { + const delay = ((amount - this.currentCapacity) / this.fillRate) * 1000; + await new Promise((resolve) => DefaultRateLimiter.setTimeoutFn(resolve, delay)); + } + this.currentCapacity = this.currentCapacity - amount; + } + refillTokenBucket() { + const timestamp = this.getCurrentTimeInSeconds(); + if (!this.lastTimestamp) { + this.lastTimestamp = timestamp; + return; + } + const fillAmount = (timestamp - this.lastTimestamp) * this.fillRate; + this.currentCapacity = Math.min(this.maxCapacity, this.currentCapacity + fillAmount); + this.lastTimestamp = timestamp; + } + updateClientSendingRate(response) { + let calculatedRate; + this.updateMeasuredRate(); + if (isThrottlingError(response)) { + const rateToUse = !this.enabled ? this.measuredTxRate : Math.min(this.measuredTxRate, this.fillRate); + this.lastMaxRate = rateToUse; + this.calculateTimeWindow(); + this.lastThrottleTime = this.getCurrentTimeInSeconds(); + calculatedRate = this.cubicThrottle(rateToUse); + this.enableTokenBucket(); + } + else { + this.calculateTimeWindow(); + calculatedRate = this.cubicSuccess(this.getCurrentTimeInSeconds()); + } + const newRate = Math.min(calculatedRate, 2 * this.measuredTxRate); + this.updateTokenBucketRate(newRate); + } + calculateTimeWindow() { + this.timeWindow = this.getPrecise(Math.pow((this.lastMaxRate * (1 - this.beta)) / this.scaleConstant, 1 / 3)); + } + cubicThrottle(rateToUse) { + return this.getPrecise(rateToUse * this.beta); + } + cubicSuccess(timestamp) { + return this.getPrecise(this.scaleConstant * Math.pow(timestamp - this.lastThrottleTime - this.timeWindow, 3) + this.lastMaxRate); + } + enableTokenBucket() { + this.enabled = true; + } + updateTokenBucketRate(newRate) { + this.refillTokenBucket(); + this.fillRate = Math.max(newRate, this.minFillRate); + this.maxCapacity = Math.max(newRate, this.minCapacity); + this.currentCapacity = Math.min(this.currentCapacity, this.maxCapacity); + } + updateMeasuredRate() { + const t = this.getCurrentTimeInSeconds(); + const timeBucket = Math.floor(t * 2) / 2; + this.requestCount++; + if (timeBucket > this.lastTxRateBucket) { + const currentRate = this.requestCount / (timeBucket - this.lastTxRateBucket); + this.measuredTxRate = this.getPrecise(currentRate * this.smooth + this.measuredTxRate * (1 - this.smooth)); + this.requestCount = 0; + this.lastTxRateBucket = timeBucket; + } + } + getPrecise(num) { + return parseFloat(num.toFixed(8)); + } +} +DefaultRateLimiter.setTimeoutFn = setTimeout; diff --git a/node_modules/@smithy/util-retry/dist-es/StandardRetryStrategy.js b/node_modules/@smithy/util-retry/dist-es/StandardRetryStrategy.js new file mode 100644 index 00000000..07adde02 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-es/StandardRetryStrategy.js @@ -0,0 +1,65 @@ +import { DEFAULT_MAX_ATTEMPTS, RETRY_MODES } from "./config"; +import { DEFAULT_RETRY_DELAY_BASE, INITIAL_RETRY_TOKENS, NO_RETRY_INCREMENT, RETRY_COST, THROTTLING_RETRY_DELAY_BASE, TIMEOUT_RETRY_COST, } from "./constants"; +import { getDefaultRetryBackoffStrategy } from "./defaultRetryBackoffStrategy"; +import { createDefaultRetryToken } from "./defaultRetryToken"; +export class StandardRetryStrategy { + constructor(maxAttempts) { + this.maxAttempts = maxAttempts; + this.mode = RETRY_MODES.STANDARD; + this.capacity = INITIAL_RETRY_TOKENS; + this.retryBackoffStrategy = getDefaultRetryBackoffStrategy(); + this.maxAttemptsProvider = typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts; + } + async acquireInitialRetryToken(retryTokenScope) { + return createDefaultRetryToken({ + retryDelay: DEFAULT_RETRY_DELAY_BASE, + retryCount: 0, + }); + } + async refreshRetryTokenForRetry(token, errorInfo) { + const maxAttempts = await this.getMaxAttempts(); + if (this.shouldRetry(token, errorInfo, maxAttempts)) { + const errorType = errorInfo.errorType; + this.retryBackoffStrategy.setDelayBase(errorType === "THROTTLING" ? THROTTLING_RETRY_DELAY_BASE : DEFAULT_RETRY_DELAY_BASE); + const delayFromErrorType = this.retryBackoffStrategy.computeNextBackoffDelay(token.getRetryCount()); + const retryDelay = errorInfo.retryAfterHint + ? Math.max(errorInfo.retryAfterHint.getTime() - Date.now() || 0, delayFromErrorType) + : delayFromErrorType; + const capacityCost = this.getCapacityCost(errorType); + this.capacity -= capacityCost; + return createDefaultRetryToken({ + retryDelay, + retryCount: token.getRetryCount() + 1, + retryCost: capacityCost, + }); + } + throw new Error("No retry token available"); + } + recordSuccess(token) { + this.capacity = Math.max(INITIAL_RETRY_TOKENS, this.capacity + (token.getRetryCost() ?? NO_RETRY_INCREMENT)); + } + getCapacity() { + return this.capacity; + } + async getMaxAttempts() { + try { + return await this.maxAttemptsProvider(); + } + catch (error) { + console.warn(`Max attempts provider could not resolve. Using default of ${DEFAULT_MAX_ATTEMPTS}`); + return DEFAULT_MAX_ATTEMPTS; + } + } + shouldRetry(tokenToRenew, errorInfo, maxAttempts) { + const attempts = tokenToRenew.getRetryCount() + 1; + return (attempts < maxAttempts && + this.capacity >= this.getCapacityCost(errorInfo.errorType) && + this.isRetryableError(errorInfo.errorType)); + } + getCapacityCost(errorType) { + return errorType === "TRANSIENT" ? TIMEOUT_RETRY_COST : RETRY_COST; + } + isRetryableError(errorType) { + return errorType === "THROTTLING" || errorType === "TRANSIENT"; + } +} diff --git a/node_modules/@smithy/util-retry/dist-es/config.js b/node_modules/@smithy/util-retry/dist-es/config.js new file mode 100644 index 00000000..438d42da --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-es/config.js @@ -0,0 +1,7 @@ +export var RETRY_MODES; +(function (RETRY_MODES) { + RETRY_MODES["STANDARD"] = "standard"; + RETRY_MODES["ADAPTIVE"] = "adaptive"; +})(RETRY_MODES || (RETRY_MODES = {})); +export const DEFAULT_MAX_ATTEMPTS = 3; +export const DEFAULT_RETRY_MODE = RETRY_MODES.STANDARD; diff --git a/node_modules/@smithy/util-retry/dist-es/constants.js b/node_modules/@smithy/util-retry/dist-es/constants.js new file mode 100644 index 00000000..0876f8e2 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-es/constants.js @@ -0,0 +1,9 @@ +export const DEFAULT_RETRY_DELAY_BASE = 100; +export const MAXIMUM_RETRY_DELAY = 20 * 1000; +export const THROTTLING_RETRY_DELAY_BASE = 500; +export const INITIAL_RETRY_TOKENS = 500; +export const RETRY_COST = 5; +export const TIMEOUT_RETRY_COST = 10; +export const NO_RETRY_INCREMENT = 1; +export const INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +export const REQUEST_HEADER = "amz-sdk-request"; diff --git a/node_modules/@smithy/util-retry/dist-es/defaultRetryBackoffStrategy.js b/node_modules/@smithy/util-retry/dist-es/defaultRetryBackoffStrategy.js new file mode 100644 index 00000000..ce04bc5e --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-es/defaultRetryBackoffStrategy.js @@ -0,0 +1,14 @@ +import { DEFAULT_RETRY_DELAY_BASE, MAXIMUM_RETRY_DELAY } from "./constants"; +export const getDefaultRetryBackoffStrategy = () => { + let delayBase = DEFAULT_RETRY_DELAY_BASE; + const computeNextBackoffDelay = (attempts) => { + return Math.floor(Math.min(MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); + }; + const setDelayBase = (delay) => { + delayBase = delay; + }; + return { + computeNextBackoffDelay, + setDelayBase, + }; +}; diff --git a/node_modules/@smithy/util-retry/dist-es/defaultRetryToken.js b/node_modules/@smithy/util-retry/dist-es/defaultRetryToken.js new file mode 100644 index 00000000..203bb662 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-es/defaultRetryToken.js @@ -0,0 +1,11 @@ +import { MAXIMUM_RETRY_DELAY } from "./constants"; +export const createDefaultRetryToken = ({ retryDelay, retryCount, retryCost, }) => { + const getRetryCount = () => retryCount; + const getRetryDelay = () => Math.min(MAXIMUM_RETRY_DELAY, retryDelay); + const getRetryCost = () => retryCost; + return { + getRetryCount, + getRetryDelay, + getRetryCost, + }; +}; diff --git a/node_modules/@smithy/util-retry/dist-es/index.js b/node_modules/@smithy/util-retry/dist-es/index.js new file mode 100644 index 00000000..8637ced0 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-es/index.js @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./ConfiguredRetryStrategy"; +export * from "./DefaultRateLimiter"; +export * from "./StandardRetryStrategy"; +export * from "./config"; +export * from "./constants"; +export * from "./types"; diff --git a/node_modules/@smithy/util-retry/dist-es/types.js b/node_modules/@smithy/util-retry/dist-es/types.js new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@smithy/util-retry/dist-types/AdaptiveRetryStrategy.d.ts b/node_modules/@smithy/util-retry/dist-types/AdaptiveRetryStrategy.d.ts new file mode 100644 index 00000000..80925193 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/AdaptiveRetryStrategy.d.ts @@ -0,0 +1,33 @@ +import { Provider, RetryErrorInfo, RetryStrategyV2, RetryToken, StandardRetryToken } from "@smithy/types"; +import { RateLimiter } from "./types"; +/** + * @public + * + * Strategy options to be passed to AdaptiveRetryStrategy + */ +export interface AdaptiveRetryStrategyOptions { + rateLimiter?: RateLimiter; +} +/** + * @public + * + * The AdaptiveRetryStrategy is a retry strategy for executing against a very + * resource constrained set of resources. Care should be taken when using this + * retry strategy. By default, it uses a dynamic backoff delay based on load + * currently perceived against the downstream resource and performs circuit + * breaking to disable retries in the event of high downstream failures using + * the DefaultRateLimiter. + * + * @see {@link StandardRetryStrategy} + * @see {@link DefaultRateLimiter } + */ +export declare class AdaptiveRetryStrategy implements RetryStrategyV2 { + private readonly maxAttemptsProvider; + private rateLimiter; + private standardRetryStrategy; + readonly mode: string; + constructor(maxAttemptsProvider: Provider, options?: AdaptiveRetryStrategyOptions); + acquireInitialRetryToken(retryTokenScope: string): Promise; + refreshRetryTokenForRetry(tokenToRenew: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; + recordSuccess(token: StandardRetryToken): void; +} diff --git a/node_modules/@smithy/util-retry/dist-types/ConfiguredRetryStrategy.d.ts b/node_modules/@smithy/util-retry/dist-types/ConfiguredRetryStrategy.d.ts new file mode 100644 index 00000000..3250c6d5 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/ConfiguredRetryStrategy.d.ts @@ -0,0 +1,32 @@ +import type { Provider, RetryBackoffStrategy, RetryErrorInfo, RetryStrategyV2, StandardRetryToken } from "@smithy/types"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +/** + * @public + * + * This extension of the StandardRetryStrategy allows customizing the + * backoff computation. + */ +export declare class ConfiguredRetryStrategy extends StandardRetryStrategy implements RetryStrategyV2 { + private readonly computeNextBackoffDelay; + /** + * @param maxAttempts - the maximum number of retry attempts allowed. + * e.g., if set to 3, then 4 total requests are possible. + * @param computeNextBackoffDelay - a millisecond delay for each retry or a function that takes the retry attempt + * and returns the delay. + * + * @example exponential backoff. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, (attempt) => attempt ** 2) + * }); + * ``` + * @example constant delay. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, 2000) + * }); + * ``` + */ + constructor(maxAttempts: number | Provider, computeNextBackoffDelay?: number | RetryBackoffStrategy["computeNextBackoffDelay"]); + refreshRetryTokenForRetry(tokenToRenew: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; +} diff --git a/node_modules/@smithy/util-retry/dist-types/DefaultRateLimiter.d.ts b/node_modules/@smithy/util-retry/dist-types/DefaultRateLimiter.d.ts new file mode 100644 index 00000000..9d689fc9 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/DefaultRateLimiter.d.ts @@ -0,0 +1,49 @@ +import { RateLimiter } from "./types"; +/** + * @public + */ +export interface DefaultRateLimiterOptions { + beta?: number; + minCapacity?: number; + minFillRate?: number; + scaleConstant?: number; + smooth?: number; +} +/** + * @public + */ +export declare class DefaultRateLimiter implements RateLimiter { + /** + * Only used in testing. + */ + private static setTimeoutFn; + private beta; + private minCapacity; + private minFillRate; + private scaleConstant; + private smooth; + private currentCapacity; + private enabled; + private lastMaxRate; + private measuredTxRate; + private requestCount; + private fillRate; + private lastThrottleTime; + private lastTimestamp; + private lastTxRateBucket; + private maxCapacity; + private timeWindow; + constructor(options?: DefaultRateLimiterOptions); + private getCurrentTimeInSeconds; + getSendToken(): Promise; + private acquireTokenBucket; + private refillTokenBucket; + updateClientSendingRate(response: any): void; + private calculateTimeWindow; + private cubicThrottle; + private cubicSuccess; + private enableTokenBucket; + private updateTokenBucketRate; + private updateMeasuredRate; + private getPrecise; +} diff --git a/node_modules/@smithy/util-retry/dist-types/StandardRetryStrategy.d.ts b/node_modules/@smithy/util-retry/dist-types/StandardRetryStrategy.d.ts new file mode 100644 index 00000000..c100ebca --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/StandardRetryStrategy.d.ts @@ -0,0 +1,26 @@ +import { Provider, RetryErrorInfo, RetryStrategyV2, StandardRetryToken } from "@smithy/types"; +/** + * @public + */ +export declare class StandardRetryStrategy implements RetryStrategyV2 { + private readonly maxAttempts; + readonly mode: string; + private capacity; + private readonly retryBackoffStrategy; + private readonly maxAttemptsProvider; + constructor(maxAttempts: number); + constructor(maxAttemptsProvider: Provider); + acquireInitialRetryToken(retryTokenScope: string): Promise; + refreshRetryTokenForRetry(token: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; + recordSuccess(token: StandardRetryToken): void; + /** + * @returns the current available retry capacity. + * + * This number decreases when retries are executed and refills when requests or retries succeed. + */ + getCapacity(): number; + private getMaxAttempts; + private shouldRetry; + private getCapacityCost; + private isRetryableError; +} diff --git a/node_modules/@smithy/util-retry/dist-types/config.d.ts b/node_modules/@smithy/util-retry/dist-types/config.d.ts new file mode 100644 index 00000000..e4e74b3a --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/config.d.ts @@ -0,0 +1,20 @@ +/** + * @public + */ +export declare enum RETRY_MODES { + STANDARD = "standard", + ADAPTIVE = "adaptive" +} +/** + * @public + * + * The default value for how many HTTP requests an SDK should make for a + * single SDK operation invocation before giving up + */ +export declare const DEFAULT_MAX_ATTEMPTS = 3; +/** + * @public + * + * The default retry algorithm to use. + */ +export declare const DEFAULT_RETRY_MODE = RETRY_MODES.STANDARD; diff --git a/node_modules/@smithy/util-retry/dist-types/constants.d.ts b/node_modules/@smithy/util-retry/dist-types/constants.d.ts new file mode 100644 index 00000000..bc7fec85 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/constants.d.ts @@ -0,0 +1,59 @@ +/** + * @public + * + * The base number of milliseconds to use in calculating a suitable cool-down + * time when a retryable error is encountered. + */ +export declare const DEFAULT_RETRY_DELAY_BASE = 100; +/** + * @public + * + * The maximum amount of time (in milliseconds) that will be used as a delay + * between retry attempts. + */ +export declare const MAXIMUM_RETRY_DELAY: number; +/** + * @public + * + * The retry delay base (in milliseconds) to use when a throttling error is + * encountered. + */ +export declare const THROTTLING_RETRY_DELAY_BASE = 500; +/** + * @public + * + * Initial number of retry tokens in Retry Quota + */ +export declare const INITIAL_RETRY_TOKENS = 500; +/** + * @public + * + * The total amount of retry tokens to be decremented from retry token balance. + */ +export declare const RETRY_COST = 5; +/** + * @public + * + * The total amount of retry tokens to be decremented from retry token balance + * when a throttling error is encountered. + */ +export declare const TIMEOUT_RETRY_COST = 10; +/** + * @public + * + * The total amount of retry token to be incremented from retry token balance + * if an SDK operation invocation succeeds without requiring a retry request. + */ +export declare const NO_RETRY_INCREMENT = 1; +/** + * @public + * + * Header name for SDK invocation ID + */ +export declare const INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +/** + * @public + * + * Header name for request retry information. + */ +export declare const REQUEST_HEADER = "amz-sdk-request"; diff --git a/node_modules/@smithy/util-retry/dist-types/defaultRetryBackoffStrategy.d.ts b/node_modules/@smithy/util-retry/dist-types/defaultRetryBackoffStrategy.d.ts new file mode 100644 index 00000000..b70eb2dd --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/defaultRetryBackoffStrategy.d.ts @@ -0,0 +1,5 @@ +import { StandardRetryBackoffStrategy } from "@smithy/types"; +/** + * @internal + */ +export declare const getDefaultRetryBackoffStrategy: () => StandardRetryBackoffStrategy; diff --git a/node_modules/@smithy/util-retry/dist-types/defaultRetryToken.d.ts b/node_modules/@smithy/util-retry/dist-types/defaultRetryToken.d.ts new file mode 100644 index 00000000..947b68fa --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/defaultRetryToken.d.ts @@ -0,0 +1,9 @@ +import { StandardRetryToken } from "@smithy/types"; +/** + * @internal + */ +export declare const createDefaultRetryToken: ({ retryDelay, retryCount, retryCost, }: { + retryDelay: number; + retryCount: number; + retryCost?: number | undefined; +}) => StandardRetryToken; diff --git a/node_modules/@smithy/util-retry/dist-types/index.d.ts b/node_modules/@smithy/util-retry/dist-types/index.d.ts new file mode 100644 index 00000000..8637ced0 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./ConfiguredRetryStrategy"; +export * from "./DefaultRateLimiter"; +export * from "./StandardRetryStrategy"; +export * from "./config"; +export * from "./constants"; +export * from "./types"; diff --git a/node_modules/@smithy/util-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts b/node_modules/@smithy/util-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts new file mode 100644 index 00000000..f6b0ef41 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts @@ -0,0 +1,33 @@ +import { Provider, RetryErrorInfo, RetryStrategyV2, RetryToken, StandardRetryToken } from "@smithy/types"; +import { RateLimiter } from "./types"; +/** + * @public + * + * Strategy options to be passed to AdaptiveRetryStrategy + */ +export interface AdaptiveRetryStrategyOptions { + rateLimiter?: RateLimiter; +} +/** + * @public + * + * The AdaptiveRetryStrategy is a retry strategy for executing against a very + * resource constrained set of resources. Care should be taken when using this + * retry strategy. By default, it uses a dynamic backoff delay based on load + * currently perceived against the downstream resource and performs circuit + * breaking to disable retries in the event of high downstream failures using + * the DefaultRateLimiter. + * + * @see {@link StandardRetryStrategy} + * @see {@link DefaultRateLimiter } + */ +export declare class AdaptiveRetryStrategy implements RetryStrategyV2 { + private readonly maxAttemptsProvider; + private rateLimiter; + private standardRetryStrategy; + readonly mode: string; + constructor(maxAttemptsProvider: Provider, options?: AdaptiveRetryStrategyOptions); + acquireInitialRetryToken(retryTokenScope: string): Promise; + refreshRetryTokenForRetry(tokenToRenew: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; + recordSuccess(token: StandardRetryToken): void; +} diff --git a/node_modules/@smithy/util-retry/dist-types/ts3.4/ConfiguredRetryStrategy.d.ts b/node_modules/@smithy/util-retry/dist-types/ts3.4/ConfiguredRetryStrategy.d.ts new file mode 100644 index 00000000..7df29833 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/ts3.4/ConfiguredRetryStrategy.d.ts @@ -0,0 +1,32 @@ +import { Provider, RetryBackoffStrategy, RetryErrorInfo, RetryStrategyV2, StandardRetryToken } from "@smithy/types"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +/** + * @public + * + * This extension of the StandardRetryStrategy allows customizing the + * backoff computation. + */ +export declare class ConfiguredRetryStrategy extends StandardRetryStrategy implements RetryStrategyV2 { + private readonly computeNextBackoffDelay; + /** + * @param maxAttempts - the maximum number of retry attempts allowed. + * e.g., if set to 3, then 4 total requests are possible. + * @param computeNextBackoffDelay - a millisecond delay for each retry or a function that takes the retry attempt + * and returns the delay. + * + * @example exponential backoff. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, (attempt) => attempt ** 2) + * }); + * ``` + * @example constant delay. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, 2000) + * }); + * ``` + */ + constructor(maxAttempts: number | Provider, computeNextBackoffDelay?: number | RetryBackoffStrategy["computeNextBackoffDelay"]); + refreshRetryTokenForRetry(tokenToRenew: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; +} diff --git a/node_modules/@smithy/util-retry/dist-types/ts3.4/DefaultRateLimiter.d.ts b/node_modules/@smithy/util-retry/dist-types/ts3.4/DefaultRateLimiter.d.ts new file mode 100644 index 00000000..9c239d62 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/ts3.4/DefaultRateLimiter.d.ts @@ -0,0 +1,49 @@ +import { RateLimiter } from "./types"; +/** + * @public + */ +export interface DefaultRateLimiterOptions { + beta?: number; + minCapacity?: number; + minFillRate?: number; + scaleConstant?: number; + smooth?: number; +} +/** + * @public + */ +export declare class DefaultRateLimiter implements RateLimiter { + /** + * Only used in testing. + */ + private static setTimeoutFn; + private beta; + private minCapacity; + private minFillRate; + private scaleConstant; + private smooth; + private currentCapacity; + private enabled; + private lastMaxRate; + private measuredTxRate; + private requestCount; + private fillRate; + private lastThrottleTime; + private lastTimestamp; + private lastTxRateBucket; + private maxCapacity; + private timeWindow; + constructor(options?: DefaultRateLimiterOptions); + private getCurrentTimeInSeconds; + getSendToken(): Promise; + private acquireTokenBucket; + private refillTokenBucket; + updateClientSendingRate(response: any): void; + private calculateTimeWindow; + private cubicThrottle; + private cubicSuccess; + private enableTokenBucket; + private updateTokenBucketRate; + private updateMeasuredRate; + private getPrecise; +} diff --git a/node_modules/@smithy/util-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts b/node_modules/@smithy/util-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts new file mode 100644 index 00000000..c22f8b84 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts @@ -0,0 +1,26 @@ +import { Provider, RetryErrorInfo, RetryStrategyV2, StandardRetryToken } from "@smithy/types"; +/** + * @public + */ +export declare class StandardRetryStrategy implements RetryStrategyV2 { + private readonly maxAttempts; + readonly mode: string; + private capacity; + private readonly retryBackoffStrategy; + private readonly maxAttemptsProvider; + constructor(maxAttempts: number); + constructor(maxAttemptsProvider: Provider); + acquireInitialRetryToken(retryTokenScope: string): Promise; + refreshRetryTokenForRetry(token: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; + recordSuccess(token: StandardRetryToken): void; + /** + * @returns the current available retry capacity. + * + * This number decreases when retries are executed and refills when requests or retries succeed. + */ + getCapacity(): number; + private getMaxAttempts; + private shouldRetry; + private getCapacityCost; + private isRetryableError; +} diff --git a/node_modules/@smithy/util-retry/dist-types/ts3.4/config.d.ts b/node_modules/@smithy/util-retry/dist-types/ts3.4/config.d.ts new file mode 100644 index 00000000..6727a380 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/ts3.4/config.d.ts @@ -0,0 +1,20 @@ +/** + * @public + */ +export declare enum RETRY_MODES { + STANDARD = "standard", + ADAPTIVE = "adaptive" +} +/** + * @public + * + * The default value for how many HTTP requests an SDK should make for a + * single SDK operation invocation before giving up + */ +export declare const DEFAULT_MAX_ATTEMPTS = 3; +/** + * @public + * + * The default retry algorithm to use. + */ +export declare const DEFAULT_RETRY_MODE = RETRY_MODES.STANDARD; diff --git a/node_modules/@smithy/util-retry/dist-types/ts3.4/constants.d.ts b/node_modules/@smithy/util-retry/dist-types/ts3.4/constants.d.ts new file mode 100644 index 00000000..5c1a5ce2 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,59 @@ +/** + * @public + * + * The base number of milliseconds to use in calculating a suitable cool-down + * time when a retryable error is encountered. + */ +export declare const DEFAULT_RETRY_DELAY_BASE = 100; +/** + * @public + * + * The maximum amount of time (in milliseconds) that will be used as a delay + * between retry attempts. + */ +export declare const MAXIMUM_RETRY_DELAY: number; +/** + * @public + * + * The retry delay base (in milliseconds) to use when a throttling error is + * encountered. + */ +export declare const THROTTLING_RETRY_DELAY_BASE = 500; +/** + * @public + * + * Initial number of retry tokens in Retry Quota + */ +export declare const INITIAL_RETRY_TOKENS = 500; +/** + * @public + * + * The total amount of retry tokens to be decremented from retry token balance. + */ +export declare const RETRY_COST = 5; +/** + * @public + * + * The total amount of retry tokens to be decremented from retry token balance + * when a throttling error is encountered. + */ +export declare const TIMEOUT_RETRY_COST = 10; +/** + * @public + * + * The total amount of retry token to be incremented from retry token balance + * if an SDK operation invocation succeeds without requiring a retry request. + */ +export declare const NO_RETRY_INCREMENT = 1; +/** + * @public + * + * Header name for SDK invocation ID + */ +export declare const INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +/** + * @public + * + * Header name for request retry information. + */ +export declare const REQUEST_HEADER = "amz-sdk-request"; diff --git a/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryBackoffStrategy.d.ts b/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryBackoffStrategy.d.ts new file mode 100644 index 00000000..1d632caa --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryBackoffStrategy.d.ts @@ -0,0 +1,5 @@ +import { StandardRetryBackoffStrategy } from "@smithy/types"; +/** + * @internal + */ +export declare const getDefaultRetryBackoffStrategy: () => StandardRetryBackoffStrategy; diff --git a/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryToken.d.ts b/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryToken.d.ts new file mode 100644 index 00000000..fd4b75e6 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryToken.d.ts @@ -0,0 +1,9 @@ +import { StandardRetryToken } from "@smithy/types"; +/** + * @internal + */ +export declare const createDefaultRetryToken: ({ retryDelay, retryCount, retryCost, }: { + retryDelay: number; + retryCount: number; + retryCost?: number | undefined; +}) => StandardRetryToken; diff --git a/node_modules/@smithy/util-retry/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/util-retry/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..de9af3d9 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/ts3.4/index.d.ts @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./ConfiguredRetryStrategy"; +export * from "./DefaultRateLimiter"; +export * from "./StandardRetryStrategy"; +export * from "./config"; +export * from "./constants"; +export * from "./types"; diff --git a/node_modules/@smithy/util-retry/dist-types/ts3.4/types.d.ts b/node_modules/@smithy/util-retry/dist-types/ts3.4/types.d.ts new file mode 100644 index 00000000..5a20c01b --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/ts3.4/types.d.ts @@ -0,0 +1,19 @@ +/** + * @internal + */ +export interface RateLimiter { + /** + * If there is sufficient capacity (tokens) available, it immediately returns. + * If there is not sufficient capacity, it will either sleep a certain amount + * of time until the rate limiter can retrieve a token from its token bucket + * or raise an exception indicating there is insufficient capacity. + */ + getSendToken: () => Promise; + /** + * Updates the client sending rate based on response. + * If the response was successful, the capacity and fill rate are increased. + * If the response was a throttling response, the capacity and fill rate are + * decreased. Transient errors do not affect the rate limiter. + */ + updateClientSendingRate: (response: any) => void; +} diff --git a/node_modules/@smithy/util-retry/dist-types/types.d.ts b/node_modules/@smithy/util-retry/dist-types/types.d.ts new file mode 100644 index 00000000..b3f2bd14 --- /dev/null +++ b/node_modules/@smithy/util-retry/dist-types/types.d.ts @@ -0,0 +1,19 @@ +/** + * @internal + */ +export interface RateLimiter { + /** + * If there is sufficient capacity (tokens) available, it immediately returns. + * If there is not sufficient capacity, it will either sleep a certain amount + * of time until the rate limiter can retrieve a token from its token bucket + * or raise an exception indicating there is insufficient capacity. + */ + getSendToken: () => Promise; + /** + * Updates the client sending rate based on response. + * If the response was successful, the capacity and fill rate are increased. + * If the response was a throttling response, the capacity and fill rate are + * decreased. Transient errors do not affect the rate limiter. + */ + updateClientSendingRate: (response: any) => void; +} diff --git a/node_modules/@smithy/util-retry/package.json b/node_modules/@smithy/util-retry/package.json new file mode 100644 index 00000000..63797270 --- /dev/null +++ b/node_modules/@smithy/util-retry/package.json @@ -0,0 +1,68 @@ +{ + "name": "@smithy/util-retry", + "version": "4.0.3", + "description": "Shared retry utilities to be used in middleware packages.", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-retry", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "retry" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/service-error-classification": "^4.0.3", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "types/*": [ + "types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/master/packages/util-retry", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-retry" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/util-stream/LICENSE b/node_modules/@smithy/util-stream/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/util-stream/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/util-stream/README.md b/node_modules/@smithy/util-stream/README.md new file mode 100644 index 00000000..6fcd9f6e --- /dev/null +++ b/node_modules/@smithy/util-stream/README.md @@ -0,0 +1,6 @@ +# @smithy/util-stream + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-stream/latest.svg)](https://www.npmjs.com/package/@smithy/util-stream) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-stream.svg)](https://www.npmjs.com/package/@smithy/util-stream) + +Package with utilities to operate on streams. diff --git a/node_modules/@smithy/util-stream/dist-cjs/ByteArrayCollector.js b/node_modules/@smithy/util-stream/dist-cjs/ByteArrayCollector.js new file mode 100644 index 00000000..ea8baaca --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-cjs/ByteArrayCollector.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ByteArrayCollector = void 0; +class ByteArrayCollector { + constructor(allocByteArray) { + this.allocByteArray = allocByteArray; + this.byteLength = 0; + this.byteArrays = []; + } + push(byteArray) { + this.byteArrays.push(byteArray); + this.byteLength += byteArray.byteLength; + } + flush() { + if (this.byteArrays.length === 1) { + const bytes = this.byteArrays[0]; + this.reset(); + return bytes; + } + const aggregation = this.allocByteArray(this.byteLength); + let cursor = 0; + for (let i = 0; i < this.byteArrays.length; ++i) { + const bytes = this.byteArrays[i]; + aggregation.set(bytes, cursor); + cursor += bytes.byteLength; + } + this.reset(); + return aggregation; + } + reset() { + this.byteArrays = []; + this.byteLength = 0; + } +} +exports.ByteArrayCollector = ByteArrayCollector; diff --git a/node_modules/@smithy/util-stream/dist-cjs/blob/Uint8ArrayBlobAdapter.js b/node_modules/@smithy/util-stream/dist-cjs/blob/Uint8ArrayBlobAdapter.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-cjs/blob/Uint8ArrayBlobAdapter.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-stream/dist-cjs/blob/transforms.js b/node_modules/@smithy/util-stream/dist-cjs/blob/transforms.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-cjs/blob/transforms.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.browser.js b/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.browser.js new file mode 100644 index 00000000..b73363a5 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.browser.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ChecksumStream = void 0; +const ReadableStreamRef = typeof ReadableStream === "function" ? ReadableStream : function () { }; +class ChecksumStream extends ReadableStreamRef { +} +exports.ChecksumStream = ChecksumStream; diff --git a/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.js b/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.js new file mode 100644 index 00000000..92d0bc0f --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.js @@ -0,0 +1,49 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ChecksumStream = void 0; +const util_base64_1 = require("@smithy/util-base64"); +const stream_1 = require("stream"); +class ChecksumStream extends stream_1.Duplex { + constructor({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) { + var _a, _b; + super(); + if (typeof source.pipe === "function") { + this.source = source; + } + else { + throw new Error(`@smithy/util-stream: unsupported source type ${(_b = (_a = source === null || source === void 0 ? void 0 : source.constructor) === null || _a === void 0 ? void 0 : _a.name) !== null && _b !== void 0 ? _b : source} in ChecksumStream.`); + } + this.base64Encoder = base64Encoder !== null && base64Encoder !== void 0 ? base64Encoder : util_base64_1.toBase64; + this.expectedChecksum = expectedChecksum; + this.checksum = checksum; + this.checksumSourceLocation = checksumSourceLocation; + this.source.pipe(this); + } + _read(size) { } + _write(chunk, encoding, callback) { + try { + this.checksum.update(chunk); + this.push(chunk); + } + catch (e) { + return callback(e); + } + return callback(); + } + async _final(callback) { + try { + const digest = await this.checksum.digest(); + const received = this.base64Encoder(digest); + if (this.expectedChecksum !== received) { + return callback(new Error(`Checksum mismatch: expected "${this.expectedChecksum}" but received "${received}"` + + ` in response header "${this.checksumSourceLocation}".`)); + } + } + catch (e) { + return callback(e); + } + this.push(null); + return callback(); + } +} +exports.ChecksumStream = ChecksumStream; diff --git a/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.browser.js b/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.browser.js new file mode 100644 index 00000000..2f6cf12a --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.browser.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createChecksumStream = void 0; +const util_base64_1 = require("@smithy/util-base64"); +const stream_type_check_1 = require("../stream-type-check"); +const ChecksumStream_browser_1 = require("./ChecksumStream.browser"); +const createChecksumStream = ({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) => { + var _a, _b; + if (!(0, stream_type_check_1.isReadableStream)(source)) { + throw new Error(`@smithy/util-stream: unsupported source type ${(_b = (_a = source === null || source === void 0 ? void 0 : source.constructor) === null || _a === void 0 ? void 0 : _a.name) !== null && _b !== void 0 ? _b : source} in ChecksumStream.`); + } + const encoder = base64Encoder !== null && base64Encoder !== void 0 ? base64Encoder : util_base64_1.toBase64; + if (typeof TransformStream !== "function") { + throw new Error("@smithy/util-stream: unable to instantiate ChecksumStream because API unavailable: ReadableStream/TransformStream."); + } + const transform = new TransformStream({ + start() { }, + async transform(chunk, controller) { + checksum.update(chunk); + controller.enqueue(chunk); + }, + async flush(controller) { + const digest = await checksum.digest(); + const received = encoder(digest); + if (expectedChecksum !== received) { + const error = new Error(`Checksum mismatch: expected "${expectedChecksum}" but received "${received}"` + + ` in response header "${checksumSourceLocation}".`); + controller.error(error); + } + else { + controller.terminate(); + } + }, + }); + source.pipeThrough(transform); + const readable = transform.readable; + Object.setPrototypeOf(readable, ChecksumStream_browser_1.ChecksumStream.prototype); + return readable; +}; +exports.createChecksumStream = createChecksumStream; diff --git a/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.js b/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.js new file mode 100644 index 00000000..57e2a2fe --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createChecksumStream = void 0; +const stream_type_check_1 = require("../stream-type-check"); +const ChecksumStream_1 = require("./ChecksumStream"); +const createChecksumStream_browser_1 = require("./createChecksumStream.browser"); +function createChecksumStream(init) { + if (typeof ReadableStream === "function" && (0, stream_type_check_1.isReadableStream)(init.source)) { + return (0, createChecksumStream_browser_1.createChecksumStream)(init); + } + return new ChecksumStream_1.ChecksumStream(init); +} +exports.createChecksumStream = createChecksumStream; diff --git a/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadable.js b/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadable.js new file mode 100644 index 00000000..4c10847a --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadable.js @@ -0,0 +1,61 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createBufferedReadable = void 0; +const node_stream_1 = require("node:stream"); +const ByteArrayCollector_1 = require("./ByteArrayCollector"); +const createBufferedReadableStream_1 = require("./createBufferedReadableStream"); +const stream_type_check_1 = require("./stream-type-check"); +function createBufferedReadable(upstream, size, logger) { + if ((0, stream_type_check_1.isReadableStream)(upstream)) { + return (0, createBufferedReadableStream_1.createBufferedReadableStream)(upstream, size, logger); + } + const downstream = new node_stream_1.Readable({ read() { } }); + let streamBufferingLoggedWarning = false; + let bytesSeen = 0; + const buffers = [ + "", + new ByteArrayCollector_1.ByteArrayCollector((size) => new Uint8Array(size)), + new ByteArrayCollector_1.ByteArrayCollector((size) => Buffer.from(new Uint8Array(size))), + ]; + let mode = -1; + upstream.on("data", (chunk) => { + const chunkMode = (0, createBufferedReadableStream_1.modeOf)(chunk, true); + if (mode !== chunkMode) { + if (mode >= 0) { + downstream.push((0, createBufferedReadableStream_1.flush)(buffers, mode)); + } + mode = chunkMode; + } + if (mode === -1) { + downstream.push(chunk); + return; + } + const chunkSize = (0, createBufferedReadableStream_1.sizeOf)(chunk); + bytesSeen += chunkSize; + const bufferSize = (0, createBufferedReadableStream_1.sizeOf)(buffers[mode]); + if (chunkSize >= size && bufferSize === 0) { + downstream.push(chunk); + } + else { + const newSize = (0, createBufferedReadableStream_1.merge)(buffers, mode, chunk); + if (!streamBufferingLoggedWarning && bytesSeen > size * 2) { + streamBufferingLoggedWarning = true; + logger === null || logger === void 0 ? void 0 : logger.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`); + } + if (newSize >= size) { + downstream.push((0, createBufferedReadableStream_1.flush)(buffers, mode)); + } + } + }); + upstream.on("end", () => { + if (mode !== -1) { + const remainder = (0, createBufferedReadableStream_1.flush)(buffers, mode); + if ((0, createBufferedReadableStream_1.sizeOf)(remainder) > 0) { + downstream.push(remainder); + } + } + downstream.push(null); + }); + return downstream; +} +exports.createBufferedReadable = createBufferedReadable; diff --git a/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadableStream.js b/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadableStream.js new file mode 100644 index 00000000..2cd72aa0 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadableStream.js @@ -0,0 +1,104 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.modeOf = exports.sizeOf = exports.flush = exports.merge = exports.createBufferedReadable = exports.createBufferedReadableStream = void 0; +const ByteArrayCollector_1 = require("./ByteArrayCollector"); +function createBufferedReadableStream(upstream, size, logger) { + const reader = upstream.getReader(); + let streamBufferingLoggedWarning = false; + let bytesSeen = 0; + const buffers = ["", new ByteArrayCollector_1.ByteArrayCollector((size) => new Uint8Array(size))]; + let mode = -1; + const pull = async (controller) => { + const { value, done } = await reader.read(); + const chunk = value; + if (done) { + if (mode !== -1) { + const remainder = flush(buffers, mode); + if (sizeOf(remainder) > 0) { + controller.enqueue(remainder); + } + } + controller.close(); + } + else { + const chunkMode = modeOf(chunk, false); + if (mode !== chunkMode) { + if (mode >= 0) { + controller.enqueue(flush(buffers, mode)); + } + mode = chunkMode; + } + if (mode === -1) { + controller.enqueue(chunk); + return; + } + const chunkSize = sizeOf(chunk); + bytesSeen += chunkSize; + const bufferSize = sizeOf(buffers[mode]); + if (chunkSize >= size && bufferSize === 0) { + controller.enqueue(chunk); + } + else { + const newSize = merge(buffers, mode, chunk); + if (!streamBufferingLoggedWarning && bytesSeen > size * 2) { + streamBufferingLoggedWarning = true; + logger === null || logger === void 0 ? void 0 : logger.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`); + } + if (newSize >= size) { + controller.enqueue(flush(buffers, mode)); + } + else { + await pull(controller); + } + } + } + }; + return new ReadableStream({ + pull, + }); +} +exports.createBufferedReadableStream = createBufferedReadableStream; +exports.createBufferedReadable = createBufferedReadableStream; +function merge(buffers, mode, chunk) { + switch (mode) { + case 0: + buffers[0] += chunk; + return sizeOf(buffers[0]); + case 1: + case 2: + buffers[mode].push(chunk); + return sizeOf(buffers[mode]); + } +} +exports.merge = merge; +function flush(buffers, mode) { + switch (mode) { + case 0: + const s = buffers[0]; + buffers[0] = ""; + return s; + case 1: + case 2: + return buffers[mode].flush(); + } + throw new Error(`@smithy/util-stream - invalid index ${mode} given to flush()`); +} +exports.flush = flush; +function sizeOf(chunk) { + var _a, _b; + return (_b = (_a = chunk === null || chunk === void 0 ? void 0 : chunk.byteLength) !== null && _a !== void 0 ? _a : chunk === null || chunk === void 0 ? void 0 : chunk.length) !== null && _b !== void 0 ? _b : 0; +} +exports.sizeOf = sizeOf; +function modeOf(chunk, allowBuffer = true) { + if (allowBuffer && typeof Buffer !== "undefined" && chunk instanceof Buffer) { + return 2; + } + if (chunk instanceof Uint8Array) { + return 1; + } + if (typeof chunk === "string") { + return 0; + } + return -1; +} +exports.modeOf = modeOf; diff --git a/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.browser.js b/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.browser.js new file mode 100644 index 00000000..d8e540c2 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.browser.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getAwsChunkedEncodingStream = void 0; +const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + bodyLengthChecker !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const reader = readableStream.getReader(); + return new ReadableStream({ + async pull(controller) { + const { value, done } = await reader.read(); + if (done) { + controller.enqueue(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + controller.enqueue(`${checksumLocationName}:${checksum}\r\n`); + controller.enqueue(`\r\n`); + } + controller.close(); + } + else { + controller.enqueue(`${(bodyLengthChecker(value) || 0).toString(16)}\r\n${value}\r\n`); + } + }, + }); +}; +exports.getAwsChunkedEncodingStream = getAwsChunkedEncodingStream; diff --git a/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.js b/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.js new file mode 100644 index 00000000..4f3f9e73 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getAwsChunkedEncodingStream = void 0; +const stream_1 = require("stream"); +const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const awsChunkedEncodingStream = new stream_1.Readable({ read: () => { } }); + readableStream.on("data", (data) => { + const length = bodyLengthChecker(data) || 0; + awsChunkedEncodingStream.push(`${length.toString(16)}\r\n`); + awsChunkedEncodingStream.push(data); + awsChunkedEncodingStream.push("\r\n"); + }); + readableStream.on("end", async () => { + awsChunkedEncodingStream.push(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + awsChunkedEncodingStream.push(`${checksumLocationName}:${checksum}\r\n`); + awsChunkedEncodingStream.push(`\r\n`); + } + awsChunkedEncodingStream.push(null); + }); + return awsChunkedEncodingStream; +}; +exports.getAwsChunkedEncodingStream = getAwsChunkedEncodingStream; diff --git a/node_modules/@smithy/util-stream/dist-cjs/headStream.browser.js b/node_modules/@smithy/util-stream/dist-cjs/headStream.browser.js new file mode 100644 index 00000000..38512c11 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-cjs/headStream.browser.js @@ -0,0 +1,36 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.headStream = void 0; +async function headStream(stream, bytes) { + var _a; + let byteLengthCounter = 0; + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + byteLengthCounter += (_a = value === null || value === void 0 ? void 0 : value.byteLength) !== null && _a !== void 0 ? _a : 0; + } + if (byteLengthCounter >= bytes) { + break; + } + isDone = done; + } + reader.releaseLock(); + const collected = new Uint8Array(Math.min(bytes, byteLengthCounter)); + let offset = 0; + for (const chunk of chunks) { + if (chunk.byteLength > collected.byteLength - offset) { + collected.set(chunk.subarray(0, collected.byteLength - offset), offset); + break; + } + else { + collected.set(chunk, offset); + } + offset += chunk.length; + } + return collected; +} +exports.headStream = headStream; diff --git a/node_modules/@smithy/util-stream/dist-cjs/headStream.js b/node_modules/@smithy/util-stream/dist-cjs/headStream.js new file mode 100644 index 00000000..86103b3c --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-cjs/headStream.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.headStream = void 0; +const stream_1 = require("stream"); +const headStream_browser_1 = require("./headStream.browser"); +const stream_type_check_1 = require("./stream-type-check"); +const headStream = (stream, bytes) => { + if ((0, stream_type_check_1.isReadableStream)(stream)) { + return (0, headStream_browser_1.headStream)(stream, bytes); + } + return new Promise((resolve, reject) => { + const collector = new Collector(); + collector.limit = bytes; + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function () { + const bytes = new Uint8Array(Buffer.concat(this.buffers)); + resolve(bytes); + }); + }); +}; +exports.headStream = headStream; +class Collector extends stream_1.Writable { + constructor() { + super(...arguments); + this.buffers = []; + this.limit = Infinity; + this.bytesBuffered = 0; + } + _write(chunk, encoding, callback) { + var _a; + this.buffers.push(chunk); + this.bytesBuffered += (_a = chunk.byteLength) !== null && _a !== void 0 ? _a : 0; + if (this.bytesBuffered >= this.limit) { + const excess = this.bytesBuffered - this.limit; + const tailBuffer = this.buffers[this.buffers.length - 1]; + this.buffers[this.buffers.length - 1] = tailBuffer.subarray(0, tailBuffer.byteLength - excess); + this.emit("finish"); + } + callback(); + } +} diff --git a/node_modules/@smithy/util-stream/dist-cjs/index.js b/node_modules/@smithy/util-stream/dist-cjs/index.js new file mode 100644 index 00000000..d42fe104 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-cjs/index.js @@ -0,0 +1,103 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Uint8ArrayBlobAdapter: () => Uint8ArrayBlobAdapter +}); +module.exports = __toCommonJS(src_exports); + +// src/blob/transforms.ts +var import_util_base64 = require("@smithy/util-base64"); +var import_util_utf8 = require("@smithy/util-utf8"); +function transformToString(payload, encoding = "utf-8") { + if (encoding === "base64") { + return (0, import_util_base64.toBase64)(payload); + } + return (0, import_util_utf8.toUtf8)(payload); +} +__name(transformToString, "transformToString"); +function transformFromString(str, encoding) { + if (encoding === "base64") { + return Uint8ArrayBlobAdapter.mutate((0, import_util_base64.fromBase64)(str)); + } + return Uint8ArrayBlobAdapter.mutate((0, import_util_utf8.fromUtf8)(str)); +} +__name(transformFromString, "transformFromString"); + +// src/blob/Uint8ArrayBlobAdapter.ts +var Uint8ArrayBlobAdapter = class _Uint8ArrayBlobAdapter extends Uint8Array { + static { + __name(this, "Uint8ArrayBlobAdapter"); + } + /** + * @param source - such as a string or Stream. + * @returns a new Uint8ArrayBlobAdapter extending Uint8Array. + */ + static fromString(source, encoding = "utf-8") { + switch (typeof source) { + case "string": + return transformFromString(source, encoding); + default: + throw new Error(`Unsupported conversion from ${typeof source} to Uint8ArrayBlobAdapter.`); + } + } + /** + * @param source - Uint8Array to be mutated. + * @returns the same Uint8Array but with prototype switched to Uint8ArrayBlobAdapter. + */ + static mutate(source) { + Object.setPrototypeOf(source, _Uint8ArrayBlobAdapter.prototype); + return source; + } + /** + * @param encoding - default 'utf-8'. + * @returns the blob as string. + */ + transformToString(encoding = "utf-8") { + return transformToString(this, encoding); + } +}; + +// src/index.ts +__reExport(src_exports, require("./checksum/ChecksumStream"), module.exports); +__reExport(src_exports, require("./checksum/createChecksumStream"), module.exports); +__reExport(src_exports, require("././createBufferedReadable"), module.exports); +__reExport(src_exports, require("././getAwsChunkedEncodingStream"), module.exports); +__reExport(src_exports, require("././headStream"), module.exports); +__reExport(src_exports, require("././sdk-stream-mixin"), module.exports); +__reExport(src_exports, require("././splitStream"), module.exports); +__reExport(src_exports, require("././stream-type-check"), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + Uint8ArrayBlobAdapter, + ChecksumStream, + createChecksumStream, + createBufferedReadable, + getAwsChunkedEncodingStream, + headStream, + sdkStreamMixin, + splitStream, + isReadableStream, + isBlob +}); + diff --git a/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.browser.js b/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.browser.js new file mode 100644 index 00000000..9309af1c --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.browser.js @@ -0,0 +1,69 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sdkStreamMixin = void 0; +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const util_base64_1 = require("@smithy/util-base64"); +const util_hex_encoding_1 = require("@smithy/util-hex-encoding"); +const util_utf8_1 = require("@smithy/util-utf8"); +const stream_type_check_1 = require("./stream-type-check"); +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +const sdkStreamMixin = (stream) => { + var _a, _b; + if (!isBlobInstance(stream) && !(0, stream_type_check_1.isReadableStream)(stream)) { + const name = ((_b = (_a = stream === null || stream === void 0 ? void 0 : stream.__proto__) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.name) || stream; + throw new Error(`Unexpected stream implementation, expect Blob or ReadableStream, got ${name}`); + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await (0, fetch_http_handler_1.streamCollector)(stream); + }; + const blobToWebStream = (blob) => { + if (typeof blob.stream !== "function") { + throw new Error("Cannot transform payload Blob to web stream. Please make sure the Blob.stream() is polyfilled.\n" + + "If you are using React Native, this API is not yet supported, see: https://react-native.canny.io/feature-requests/p/fetch-streaming-body"); + } + return blob.stream(); + }; + return Object.assign(stream, { + transformToByteArray: transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === "base64") { + return (0, util_base64_1.toBase64)(buf); + } + else if (encoding === "hex") { + return (0, util_hex_encoding_1.toHex)(buf); + } + else if (encoding === undefined || encoding === "utf8" || encoding === "utf-8") { + return (0, util_utf8_1.toUtf8)(buf); + } + else if (typeof TextDecoder === "function") { + return new TextDecoder(encoding).decode(buf); + } + else { + throw new Error("TextDecoder is not available, please make sure polyfill is provided."); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + if (isBlobInstance(stream)) { + return blobToWebStream(stream); + } + else if ((0, stream_type_check_1.isReadableStream)(stream)) { + return stream; + } + else { + throw new Error(`Cannot transform payload to web stream, got ${stream}`); + } + }, + }); +}; +exports.sdkStreamMixin = sdkStreamMixin; +const isBlobInstance = (stream) => typeof Blob === "function" && stream instanceof Blob; diff --git a/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.js b/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.js new file mode 100644 index 00000000..0817eac7 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.js @@ -0,0 +1,55 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sdkStreamMixin = void 0; +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_buffer_from_1 = require("@smithy/util-buffer-from"); +const stream_1 = require("stream"); +const sdk_stream_mixin_browser_1 = require("./sdk-stream-mixin.browser"); +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +const sdkStreamMixin = (stream) => { + var _a, _b; + if (!(stream instanceof stream_1.Readable)) { + try { + return (0, sdk_stream_mixin_browser_1.sdkStreamMixin)(stream); + } + catch (e) { + const name = ((_b = (_a = stream === null || stream === void 0 ? void 0 : stream.__proto__) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.name) || stream; + throw new Error(`Unexpected stream implementation, expect Stream.Readable instance, got ${name}`); + } + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await (0, node_http_handler_1.streamCollector)(stream); + }; + return Object.assign(stream, { + transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === undefined || Buffer.isEncoding(encoding)) { + return (0, util_buffer_from_1.fromArrayBuffer)(buf.buffer, buf.byteOffset, buf.byteLength).toString(encoding); + } + else { + const decoder = new TextDecoder(encoding); + return decoder.decode(buf); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + if (stream.readableFlowing !== null) { + throw new Error("The stream has been consumed by other callbacks."); + } + if (typeof stream_1.Readable.toWeb !== "function") { + throw new Error("Readable.toWeb() is not supported. Please ensure a polyfill is available."); + } + transformed = true; + return stream_1.Readable.toWeb(stream); + }, + }); +}; +exports.sdkStreamMixin = sdkStreamMixin; diff --git a/node_modules/@smithy/util-stream/dist-cjs/splitStream.browser.js b/node_modules/@smithy/util-stream/dist-cjs/splitStream.browser.js new file mode 100644 index 00000000..eb890ccb --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-cjs/splitStream.browser.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.splitStream = void 0; +async function splitStream(stream) { + if (typeof stream.stream === "function") { + stream = stream.stream(); + } + const readableStream = stream; + return readableStream.tee(); +} +exports.splitStream = splitStream; diff --git a/node_modules/@smithy/util-stream/dist-cjs/splitStream.js b/node_modules/@smithy/util-stream/dist-cjs/splitStream.js new file mode 100644 index 00000000..c55b6284 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-cjs/splitStream.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.splitStream = void 0; +const stream_1 = require("stream"); +const splitStream_browser_1 = require("./splitStream.browser"); +const stream_type_check_1 = require("./stream-type-check"); +async function splitStream(stream) { + if ((0, stream_type_check_1.isReadableStream)(stream) || (0, stream_type_check_1.isBlob)(stream)) { + return (0, splitStream_browser_1.splitStream)(stream); + } + const stream1 = new stream_1.PassThrough(); + const stream2 = new stream_1.PassThrough(); + stream.pipe(stream1); + stream.pipe(stream2); + return [stream1, stream2]; +} +exports.splitStream = splitStream; diff --git a/node_modules/@smithy/util-stream/dist-cjs/stream-type-check.js b/node_modules/@smithy/util-stream/dist-cjs/stream-type-check.js new file mode 100644 index 00000000..a4a61387 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-cjs/stream-type-check.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isBlob = exports.isReadableStream = void 0; +const isReadableStream = (stream) => { + var _a; + return typeof ReadableStream === "function" && + (((_a = stream === null || stream === void 0 ? void 0 : stream.constructor) === null || _a === void 0 ? void 0 : _a.name) === ReadableStream.name || stream instanceof ReadableStream); +}; +exports.isReadableStream = isReadableStream; +const isBlob = (blob) => { + var _a; + return typeof Blob === "function" && (((_a = blob === null || blob === void 0 ? void 0 : blob.constructor) === null || _a === void 0 ? void 0 : _a.name) === Blob.name || blob instanceof Blob); +}; +exports.isBlob = isBlob; diff --git a/node_modules/@smithy/util-stream/dist-es/ByteArrayCollector.js b/node_modules/@smithy/util-stream/dist-es/ByteArrayCollector.js new file mode 100644 index 00000000..39af48fa --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-es/ByteArrayCollector.js @@ -0,0 +1,31 @@ +export class ByteArrayCollector { + constructor(allocByteArray) { + this.allocByteArray = allocByteArray; + this.byteLength = 0; + this.byteArrays = []; + } + push(byteArray) { + this.byteArrays.push(byteArray); + this.byteLength += byteArray.byteLength; + } + flush() { + if (this.byteArrays.length === 1) { + const bytes = this.byteArrays[0]; + this.reset(); + return bytes; + } + const aggregation = this.allocByteArray(this.byteLength); + let cursor = 0; + for (let i = 0; i < this.byteArrays.length; ++i) { + const bytes = this.byteArrays[i]; + aggregation.set(bytes, cursor); + cursor += bytes.byteLength; + } + this.reset(); + return aggregation; + } + reset() { + this.byteArrays = []; + this.byteLength = 0; + } +} diff --git a/node_modules/@smithy/util-stream/dist-es/blob/Uint8ArrayBlobAdapter.js b/node_modules/@smithy/util-stream/dist-es/blob/Uint8ArrayBlobAdapter.js new file mode 100644 index 00000000..41746b14 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-es/blob/Uint8ArrayBlobAdapter.js @@ -0,0 +1,18 @@ +import { transformFromString, transformToString } from "./transforms"; +export class Uint8ArrayBlobAdapter extends Uint8Array { + static fromString(source, encoding = "utf-8") { + switch (typeof source) { + case "string": + return transformFromString(source, encoding); + default: + throw new Error(`Unsupported conversion from ${typeof source} to Uint8ArrayBlobAdapter.`); + } + } + static mutate(source) { + Object.setPrototypeOf(source, Uint8ArrayBlobAdapter.prototype); + return source; + } + transformToString(encoding = "utf-8") { + return transformToString(this, encoding); + } +} diff --git a/node_modules/@smithy/util-stream/dist-es/blob/transforms.js b/node_modules/@smithy/util-stream/dist-es/blob/transforms.js new file mode 100644 index 00000000..0d1f74a7 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-es/blob/transforms.js @@ -0,0 +1,15 @@ +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { Uint8ArrayBlobAdapter } from "./Uint8ArrayBlobAdapter"; +export function transformToString(payload, encoding = "utf-8") { + if (encoding === "base64") { + return toBase64(payload); + } + return toUtf8(payload); +} +export function transformFromString(str, encoding) { + if (encoding === "base64") { + return Uint8ArrayBlobAdapter.mutate(fromBase64(str)); + } + return Uint8ArrayBlobAdapter.mutate(fromUtf8(str)); +} diff --git a/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.browser.js b/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.browser.js new file mode 100644 index 00000000..afcf529c --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.browser.js @@ -0,0 +1,3 @@ +const ReadableStreamRef = typeof ReadableStream === "function" ? ReadableStream : function () { }; +export class ChecksumStream extends ReadableStreamRef { +} diff --git a/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.js b/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.js new file mode 100644 index 00000000..e623a09d --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.js @@ -0,0 +1,44 @@ +import { toBase64 } from "@smithy/util-base64"; +import { Duplex } from "stream"; +export class ChecksumStream extends Duplex { + constructor({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) { + super(); + if (typeof source.pipe === "function") { + this.source = source; + } + else { + throw new Error(`@smithy/util-stream: unsupported source type ${source?.constructor?.name ?? source} in ChecksumStream.`); + } + this.base64Encoder = base64Encoder ?? toBase64; + this.expectedChecksum = expectedChecksum; + this.checksum = checksum; + this.checksumSourceLocation = checksumSourceLocation; + this.source.pipe(this); + } + _read(size) { } + _write(chunk, encoding, callback) { + try { + this.checksum.update(chunk); + this.push(chunk); + } + catch (e) { + return callback(e); + } + return callback(); + } + async _final(callback) { + try { + const digest = await this.checksum.digest(); + const received = this.base64Encoder(digest); + if (this.expectedChecksum !== received) { + return callback(new Error(`Checksum mismatch: expected "${this.expectedChecksum}" but received "${received}"` + + ` in response header "${this.checksumSourceLocation}".`)); + } + } + catch (e) { + return callback(e); + } + this.push(null); + return callback(); + } +} diff --git a/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.browser.js b/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.browser.js new file mode 100644 index 00000000..6a41c121 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.browser.js @@ -0,0 +1,35 @@ +import { toBase64 } from "@smithy/util-base64"; +import { isReadableStream } from "../stream-type-check"; +import { ChecksumStream } from "./ChecksumStream.browser"; +export const createChecksumStream = ({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) => { + if (!isReadableStream(source)) { + throw new Error(`@smithy/util-stream: unsupported source type ${source?.constructor?.name ?? source} in ChecksumStream.`); + } + const encoder = base64Encoder ?? toBase64; + if (typeof TransformStream !== "function") { + throw new Error("@smithy/util-stream: unable to instantiate ChecksumStream because API unavailable: ReadableStream/TransformStream."); + } + const transform = new TransformStream({ + start() { }, + async transform(chunk, controller) { + checksum.update(chunk); + controller.enqueue(chunk); + }, + async flush(controller) { + const digest = await checksum.digest(); + const received = encoder(digest); + if (expectedChecksum !== received) { + const error = new Error(`Checksum mismatch: expected "${expectedChecksum}" but received "${received}"` + + ` in response header "${checksumSourceLocation}".`); + controller.error(error); + } + else { + controller.terminate(); + } + }, + }); + source.pipeThrough(transform); + const readable = transform.readable; + Object.setPrototypeOf(readable, ChecksumStream.prototype); + return readable; +}; diff --git a/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.js b/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.js new file mode 100644 index 00000000..d205b82b --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.js @@ -0,0 +1,9 @@ +import { isReadableStream } from "../stream-type-check"; +import { ChecksumStream } from "./ChecksumStream"; +import { createChecksumStream as createChecksumStreamWeb } from "./createChecksumStream.browser"; +export function createChecksumStream(init) { + if (typeof ReadableStream === "function" && isReadableStream(init.source)) { + return createChecksumStreamWeb(init); + } + return new ChecksumStream(init); +} diff --git a/node_modules/@smithy/util-stream/dist-es/createBufferedReadable.js b/node_modules/@smithy/util-stream/dist-es/createBufferedReadable.js new file mode 100644 index 00000000..0e3bbce2 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-es/createBufferedReadable.js @@ -0,0 +1,57 @@ +import { Readable } from "node:stream"; +import { ByteArrayCollector } from "./ByteArrayCollector"; +import { createBufferedReadableStream, flush, merge, modeOf, sizeOf } from "./createBufferedReadableStream"; +import { isReadableStream } from "./stream-type-check"; +export function createBufferedReadable(upstream, size, logger) { + if (isReadableStream(upstream)) { + return createBufferedReadableStream(upstream, size, logger); + } + const downstream = new Readable({ read() { } }); + let streamBufferingLoggedWarning = false; + let bytesSeen = 0; + const buffers = [ + "", + new ByteArrayCollector((size) => new Uint8Array(size)), + new ByteArrayCollector((size) => Buffer.from(new Uint8Array(size))), + ]; + let mode = -1; + upstream.on("data", (chunk) => { + const chunkMode = modeOf(chunk, true); + if (mode !== chunkMode) { + if (mode >= 0) { + downstream.push(flush(buffers, mode)); + } + mode = chunkMode; + } + if (mode === -1) { + downstream.push(chunk); + return; + } + const chunkSize = sizeOf(chunk); + bytesSeen += chunkSize; + const bufferSize = sizeOf(buffers[mode]); + if (chunkSize >= size && bufferSize === 0) { + downstream.push(chunk); + } + else { + const newSize = merge(buffers, mode, chunk); + if (!streamBufferingLoggedWarning && bytesSeen > size * 2) { + streamBufferingLoggedWarning = true; + logger?.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`); + } + if (newSize >= size) { + downstream.push(flush(buffers, mode)); + } + } + }); + upstream.on("end", () => { + if (mode !== -1) { + const remainder = flush(buffers, mode); + if (sizeOf(remainder) > 0) { + downstream.push(remainder); + } + } + downstream.push(null); + }); + return downstream; +} diff --git a/node_modules/@smithy/util-stream/dist-es/createBufferedReadableStream.js b/node_modules/@smithy/util-stream/dist-es/createBufferedReadableStream.js new file mode 100644 index 00000000..698a7573 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-es/createBufferedReadableStream.js @@ -0,0 +1,95 @@ +import { ByteArrayCollector } from "./ByteArrayCollector"; +export function createBufferedReadableStream(upstream, size, logger) { + const reader = upstream.getReader(); + let streamBufferingLoggedWarning = false; + let bytesSeen = 0; + const buffers = ["", new ByteArrayCollector((size) => new Uint8Array(size))]; + let mode = -1; + const pull = async (controller) => { + const { value, done } = await reader.read(); + const chunk = value; + if (done) { + if (mode !== -1) { + const remainder = flush(buffers, mode); + if (sizeOf(remainder) > 0) { + controller.enqueue(remainder); + } + } + controller.close(); + } + else { + const chunkMode = modeOf(chunk, false); + if (mode !== chunkMode) { + if (mode >= 0) { + controller.enqueue(flush(buffers, mode)); + } + mode = chunkMode; + } + if (mode === -1) { + controller.enqueue(chunk); + return; + } + const chunkSize = sizeOf(chunk); + bytesSeen += chunkSize; + const bufferSize = sizeOf(buffers[mode]); + if (chunkSize >= size && bufferSize === 0) { + controller.enqueue(chunk); + } + else { + const newSize = merge(buffers, mode, chunk); + if (!streamBufferingLoggedWarning && bytesSeen > size * 2) { + streamBufferingLoggedWarning = true; + logger?.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`); + } + if (newSize >= size) { + controller.enqueue(flush(buffers, mode)); + } + else { + await pull(controller); + } + } + } + }; + return new ReadableStream({ + pull, + }); +} +export const createBufferedReadable = createBufferedReadableStream; +export function merge(buffers, mode, chunk) { + switch (mode) { + case 0: + buffers[0] += chunk; + return sizeOf(buffers[0]); + case 1: + case 2: + buffers[mode].push(chunk); + return sizeOf(buffers[mode]); + } +} +export function flush(buffers, mode) { + switch (mode) { + case 0: + const s = buffers[0]; + buffers[0] = ""; + return s; + case 1: + case 2: + return buffers[mode].flush(); + } + throw new Error(`@smithy/util-stream - invalid index ${mode} given to flush()`); +} +export function sizeOf(chunk) { + return chunk?.byteLength ?? chunk?.length ?? 0; +} +export function modeOf(chunk, allowBuffer = true) { + if (allowBuffer && typeof Buffer !== "undefined" && chunk instanceof Buffer) { + return 2; + } + if (chunk instanceof Uint8Array) { + return 1; + } + if (typeof chunk === "string") { + return 0; + } + return -1; +} diff --git a/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.browser.js b/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.browser.js new file mode 100644 index 00000000..b5d5fa4c --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.browser.js @@ -0,0 +1,27 @@ +export const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + bodyLengthChecker !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const reader = readableStream.getReader(); + return new ReadableStream({ + async pull(controller) { + const { value, done } = await reader.read(); + if (done) { + controller.enqueue(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + controller.enqueue(`${checksumLocationName}:${checksum}\r\n`); + controller.enqueue(`\r\n`); + } + controller.close(); + } + else { + controller.enqueue(`${(bodyLengthChecker(value) || 0).toString(16)}\r\n${value}\r\n`); + } + }, + }); +}; diff --git a/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.js b/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.js new file mode 100644 index 00000000..7c55116b --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.js @@ -0,0 +1,26 @@ +import { Readable } from "stream"; +export const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const awsChunkedEncodingStream = new Readable({ read: () => { } }); + readableStream.on("data", (data) => { + const length = bodyLengthChecker(data) || 0; + awsChunkedEncodingStream.push(`${length.toString(16)}\r\n`); + awsChunkedEncodingStream.push(data); + awsChunkedEncodingStream.push("\r\n"); + }); + readableStream.on("end", async () => { + awsChunkedEncodingStream.push(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + awsChunkedEncodingStream.push(`${checksumLocationName}:${checksum}\r\n`); + awsChunkedEncodingStream.push(`\r\n`); + } + awsChunkedEncodingStream.push(null); + }); + return awsChunkedEncodingStream; +}; diff --git a/node_modules/@smithy/util-stream/dist-es/headStream.browser.js b/node_modules/@smithy/util-stream/dist-es/headStream.browser.js new file mode 100644 index 00000000..4e7f864e --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-es/headStream.browser.js @@ -0,0 +1,31 @@ +export async function headStream(stream, bytes) { + let byteLengthCounter = 0; + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + byteLengthCounter += value?.byteLength ?? 0; + } + if (byteLengthCounter >= bytes) { + break; + } + isDone = done; + } + reader.releaseLock(); + const collected = new Uint8Array(Math.min(bytes, byteLengthCounter)); + let offset = 0; + for (const chunk of chunks) { + if (chunk.byteLength > collected.byteLength - offset) { + collected.set(chunk.subarray(0, collected.byteLength - offset), offset); + break; + } + else { + collected.set(chunk, offset); + } + offset += chunk.length; + } + return collected; +} diff --git a/node_modules/@smithy/util-stream/dist-es/headStream.js b/node_modules/@smithy/util-stream/dist-es/headStream.js new file mode 100644 index 00000000..27b28ea0 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-es/headStream.js @@ -0,0 +1,41 @@ +import { Writable } from "stream"; +import { headStream as headWebStream } from "./headStream.browser"; +import { isReadableStream } from "./stream-type-check"; +export const headStream = (stream, bytes) => { + if (isReadableStream(stream)) { + return headWebStream(stream, bytes); + } + return new Promise((resolve, reject) => { + const collector = new Collector(); + collector.limit = bytes; + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function () { + const bytes = new Uint8Array(Buffer.concat(this.buffers)); + resolve(bytes); + }); + }); +}; +class Collector extends Writable { + constructor() { + super(...arguments); + this.buffers = []; + this.limit = Infinity; + this.bytesBuffered = 0; + } + _write(chunk, encoding, callback) { + this.buffers.push(chunk); + this.bytesBuffered += chunk.byteLength ?? 0; + if (this.bytesBuffered >= this.limit) { + const excess = this.bytesBuffered - this.limit; + const tailBuffer = this.buffers[this.buffers.length - 1]; + this.buffers[this.buffers.length - 1] = tailBuffer.subarray(0, tailBuffer.byteLength - excess); + this.emit("finish"); + } + callback(); + } +} diff --git a/node_modules/@smithy/util-stream/dist-es/index.js b/node_modules/@smithy/util-stream/dist-es/index.js new file mode 100644 index 00000000..1b5b599b --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-es/index.js @@ -0,0 +1,9 @@ +export * from "./blob/Uint8ArrayBlobAdapter"; +export * from "./checksum/ChecksumStream"; +export * from "./checksum/createChecksumStream"; +export * from "./createBufferedReadable"; +export * from "./getAwsChunkedEncodingStream"; +export * from "./headStream"; +export * from "./sdk-stream-mixin"; +export * from "./splitStream"; +export * from "./stream-type-check"; diff --git a/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.browser.js b/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.browser.js new file mode 100644 index 00000000..f21ff669 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.browser.js @@ -0,0 +1,64 @@ +import { streamCollector } from "@smithy/fetch-http-handler"; +import { toBase64 } from "@smithy/util-base64"; +import { toHex } from "@smithy/util-hex-encoding"; +import { toUtf8 } from "@smithy/util-utf8"; +import { isReadableStream } from "./stream-type-check"; +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +export const sdkStreamMixin = (stream) => { + if (!isBlobInstance(stream) && !isReadableStream(stream)) { + const name = stream?.__proto__?.constructor?.name || stream; + throw new Error(`Unexpected stream implementation, expect Blob or ReadableStream, got ${name}`); + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await streamCollector(stream); + }; + const blobToWebStream = (blob) => { + if (typeof blob.stream !== "function") { + throw new Error("Cannot transform payload Blob to web stream. Please make sure the Blob.stream() is polyfilled.\n" + + "If you are using React Native, this API is not yet supported, see: https://react-native.canny.io/feature-requests/p/fetch-streaming-body"); + } + return blob.stream(); + }; + return Object.assign(stream, { + transformToByteArray: transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === "base64") { + return toBase64(buf); + } + else if (encoding === "hex") { + return toHex(buf); + } + else if (encoding === undefined || encoding === "utf8" || encoding === "utf-8") { + return toUtf8(buf); + } + else if (typeof TextDecoder === "function") { + return new TextDecoder(encoding).decode(buf); + } + else { + throw new Error("TextDecoder is not available, please make sure polyfill is provided."); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + if (isBlobInstance(stream)) { + return blobToWebStream(stream); + } + else if (isReadableStream(stream)) { + return stream; + } + else { + throw new Error(`Cannot transform payload to web stream, got ${stream}`); + } + }, + }); +}; +const isBlobInstance = (stream) => typeof Blob === "function" && stream instanceof Blob; diff --git a/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.js b/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.js new file mode 100644 index 00000000..47313338 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.js @@ -0,0 +1,50 @@ +import { streamCollector } from "@smithy/node-http-handler"; +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +import { Readable } from "stream"; +import { sdkStreamMixin as sdkStreamMixinReadableStream } from "./sdk-stream-mixin.browser"; +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +export const sdkStreamMixin = (stream) => { + if (!(stream instanceof Readable)) { + try { + return sdkStreamMixinReadableStream(stream); + } + catch (e) { + const name = stream?.__proto__?.constructor?.name || stream; + throw new Error(`Unexpected stream implementation, expect Stream.Readable instance, got ${name}`); + } + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await streamCollector(stream); + }; + return Object.assign(stream, { + transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === undefined || Buffer.isEncoding(encoding)) { + return fromArrayBuffer(buf.buffer, buf.byteOffset, buf.byteLength).toString(encoding); + } + else { + const decoder = new TextDecoder(encoding); + return decoder.decode(buf); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + if (stream.readableFlowing !== null) { + throw new Error("The stream has been consumed by other callbacks."); + } + if (typeof Readable.toWeb !== "function") { + throw new Error("Readable.toWeb() is not supported. Please ensure a polyfill is available."); + } + transformed = true; + return Readable.toWeb(stream); + }, + }); +}; diff --git a/node_modules/@smithy/util-stream/dist-es/splitStream.browser.js b/node_modules/@smithy/util-stream/dist-es/splitStream.browser.js new file mode 100644 index 00000000..6f06b0ed --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-es/splitStream.browser.js @@ -0,0 +1,7 @@ +export async function splitStream(stream) { + if (typeof stream.stream === "function") { + stream = stream.stream(); + } + const readableStream = stream; + return readableStream.tee(); +} diff --git a/node_modules/@smithy/util-stream/dist-es/splitStream.js b/node_modules/@smithy/util-stream/dist-es/splitStream.js new file mode 100644 index 00000000..1a8c0320 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-es/splitStream.js @@ -0,0 +1,13 @@ +import { PassThrough } from "stream"; +import { splitStream as splitWebStream } from "./splitStream.browser"; +import { isBlob, isReadableStream } from "./stream-type-check"; +export async function splitStream(stream) { + if (isReadableStream(stream) || isBlob(stream)) { + return splitWebStream(stream); + } + const stream1 = new PassThrough(); + const stream2 = new PassThrough(); + stream.pipe(stream1); + stream.pipe(stream2); + return [stream1, stream2]; +} diff --git a/node_modules/@smithy/util-stream/dist-es/stream-type-check.js b/node_modules/@smithy/util-stream/dist-es/stream-type-check.js new file mode 100644 index 00000000..6ee93a38 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-es/stream-type-check.js @@ -0,0 +1,5 @@ +export const isReadableStream = (stream) => typeof ReadableStream === "function" && + (stream?.constructor?.name === ReadableStream.name || stream instanceof ReadableStream); +export const isBlob = (blob) => { + return typeof Blob === "function" && (blob?.constructor?.name === Blob.name || blob instanceof Blob); +}; diff --git a/node_modules/@smithy/util-stream/dist-types/ByteArrayCollector.d.ts b/node_modules/@smithy/util-stream/dist-types/ByteArrayCollector.d.ts new file mode 100644 index 00000000..a1bbd532 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ByteArrayCollector.d.ts @@ -0,0 +1,13 @@ +/** + * Aggregates byteArrays on demand. + * @internal + */ +export declare class ByteArrayCollector { + readonly allocByteArray: (size: number) => Uint8Array; + byteLength: number; + private byteArrays; + constructor(allocByteArray: (size: number) => Uint8Array); + push(byteArray: Uint8Array): void; + flush(): Uint8Array; + private reset; +} diff --git a/node_modules/@smithy/util-stream/dist-types/blob/Uint8ArrayBlobAdapter.d.ts b/node_modules/@smithy/util-stream/dist-types/blob/Uint8ArrayBlobAdapter.d.ts new file mode 100644 index 00000000..c3d994d7 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/blob/Uint8ArrayBlobAdapter.d.ts @@ -0,0 +1,21 @@ +/** + * Adapter for conversions of the native Uint8Array type. + * @public + */ +export declare class Uint8ArrayBlobAdapter extends Uint8Array { + /** + * @param source - such as a string or Stream. + * @returns a new Uint8ArrayBlobAdapter extending Uint8Array. + */ + static fromString(source: string, encoding?: string): Uint8ArrayBlobAdapter; + /** + * @param source - Uint8Array to be mutated. + * @returns the same Uint8Array but with prototype switched to Uint8ArrayBlobAdapter. + */ + static mutate(source: Uint8Array): Uint8ArrayBlobAdapter; + /** + * @param encoding - default 'utf-8'. + * @returns the blob as string. + */ + transformToString(encoding?: string): string; +} diff --git a/node_modules/@smithy/util-stream/dist-types/blob/transforms.d.ts b/node_modules/@smithy/util-stream/dist-types/blob/transforms.d.ts new file mode 100644 index 00000000..c54a18bf --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/blob/transforms.d.ts @@ -0,0 +1,9 @@ +import { Uint8ArrayBlobAdapter } from "./Uint8ArrayBlobAdapter"; +/** + * @internal + */ +export declare function transformToString(payload: Uint8Array, encoding?: string): string; +/** + * @internal + */ +export declare function transformFromString(str: string, encoding?: string): Uint8ArrayBlobAdapter; diff --git a/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.browser.d.ts b/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.browser.d.ts new file mode 100644 index 00000000..0c5fbd4e --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.browser.d.ts @@ -0,0 +1,37 @@ +import { Checksum, Encoder } from "@smithy/types"; +/** + * @internal + */ +export interface ChecksumStreamInit { + /** + * Base64 value of the expected checksum. + */ + expectedChecksum: string; + /** + * For error messaging, the location from which the checksum value was read. + */ + checksumSourceLocation: string; + /** + * The checksum calculator. + */ + checksum: Checksum; + /** + * The stream to be checked. + */ + source: ReadableStream; + /** + * Optional base 64 encoder if calling from a request context. + */ + base64Encoder?: Encoder; +} +declare const ChecksumStream_base: any; +/** + * This stub exists so that the readable returned by createChecksumStream + * identifies as "ChecksumStream" in alignment with the Node.js + * implementation. + * + * @extends ReadableStream + */ +export declare class ChecksumStream extends ChecksumStream_base { +} +export {}; diff --git a/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.d.ts b/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.d.ts new file mode 100644 index 00000000..6893e55d --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.d.ts @@ -0,0 +1,62 @@ +/// +/// +/// +import { Checksum, Encoder } from "@smithy/types"; +import { Duplex, Readable } from "stream"; +/** + * @internal + */ +export interface ChecksumStreamInit { + /** + * Base64 value of the expected checksum. + */ + expectedChecksum: string; + /** + * For error messaging, the location from which the checksum value was read. + */ + checksumSourceLocation: string; + /** + * The checksum calculator. + */ + checksum: Checksum; + /** + * The stream to be checked. + */ + source: T; + /** + * Optional base 64 encoder if calling from a request context. + */ + base64Encoder?: Encoder; +} +/** + * @internal + * + * Wrapper for throwing checksum errors for streams without + * buffering the stream. + * + */ +export declare class ChecksumStream extends Duplex { + private expectedChecksum; + private checksumSourceLocation; + private checksum; + private source?; + private base64Encoder; + constructor({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }: ChecksumStreamInit); + /** + * @internal do not call this directly. + */ + _read(size: number): void; + /** + * @internal do not call this directly. + * + * When the upstream source flows data to this stream, + * calculate a step update of the checksum. + */ + _write(chunk: Buffer, encoding: string, callback: (err?: Error) => void): void; + /** + * @internal do not call this directly. + * + * When the upstream source finishes, perform the checksum comparison. + */ + _final(callback: (err?: Error) => void): Promise; +} diff --git a/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.browser.d.ts b/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.browser.d.ts new file mode 100644 index 00000000..1874987b --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.browser.d.ts @@ -0,0 +1,15 @@ +import { ChecksumStreamInit } from "./ChecksumStream.browser"; +/** + * @internal + * Alias prevents compiler from turning + * ReadableStream into ReadableStream, which is incompatible + * with the NodeJS.ReadableStream global type. + */ +export type ReadableStreamType = ReadableStream; +/** + * @internal + * + * Creates a stream adapter for throwing checksum errors for streams without + * buffering the stream. + */ +export declare const createChecksumStream: ({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }: ChecksumStreamInit) => ReadableStreamType; diff --git a/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.d.ts b/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.d.ts new file mode 100644 index 00000000..db09f80e --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.d.ts @@ -0,0 +1,12 @@ +/// +import { Readable } from "stream"; +import { ChecksumStreamInit } from "./ChecksumStream"; +import { ReadableStreamType } from "./createChecksumStream.browser"; +/** + * @internal + * + * Creates a stream mirroring the input stream's interface, but + * performs checksumming when reading to the end of the stream. + */ +export declare function createChecksumStream(init: ChecksumStreamInit): ReadableStreamType; +export declare function createChecksumStream(init: ChecksumStreamInit): Readable; diff --git a/node_modules/@smithy/util-stream/dist-types/createBufferedReadable.d.ts b/node_modules/@smithy/util-stream/dist-types/createBufferedReadable.d.ts new file mode 100644 index 00000000..b173636a --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/createBufferedReadable.d.ts @@ -0,0 +1,13 @@ +/// +import type { Logger } from "@smithy/types"; +import { Readable } from "node:stream"; +/** + * @internal + * @param upstream - any Readable or ReadableStream. + * @param size - byte or character length minimum. Buffering occurs when a chunk fails to meet this value. + * @param onBuffer - for emitting warnings when buffering occurs. + * @returns another stream of the same data and stream class, but buffers chunks until + * the minimum size is met, except for the last chunk. + */ +export declare function createBufferedReadable(upstream: Readable, size: number, logger?: Logger): Readable; +export declare function createBufferedReadable(upstream: ReadableStream, size: number, logger?: Logger): ReadableStream; diff --git a/node_modules/@smithy/util-stream/dist-types/createBufferedReadableStream.d.ts b/node_modules/@smithy/util-stream/dist-types/createBufferedReadableStream.d.ts new file mode 100644 index 00000000..9f6cdbd5 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/createBufferedReadableStream.d.ts @@ -0,0 +1,50 @@ +import type { Logger } from "@smithy/types"; +import { ByteArrayCollector } from "./ByteArrayCollector"; +export type BufferStore = [string, ByteArrayCollector, ByteArrayCollector?]; +export type BufferUnion = string | Uint8Array; +export type Modes = 0 | 1 | 2; +/** + * @internal + * @param upstream - any ReadableStream. + * @param size - byte or character length minimum. Buffering occurs when a chunk fails to meet this value. + * @param logger - for emitting warnings when buffering occurs. + * @returns another stream of the same data, but buffers chunks until + * the minimum size is met, except for the last chunk. + */ +export declare function createBufferedReadableStream(upstream: ReadableStream, size: number, logger?: Logger): ReadableStream; +/** + * Replaces R/RS polymorphic implementation in environments with only ReadableStream. + * @internal + */ +export declare const createBufferedReadable: typeof createBufferedReadableStream; +/** + * @internal + * @param buffers + * @param mode + * @param chunk + * @returns the new buffer size after merging the chunk with its appropriate buffer. + */ +export declare function merge(buffers: BufferStore, mode: Modes, chunk: string | Uint8Array): number; +/** + * @internal + * @param buffers + * @param mode + * @returns the buffer matching the mode. + */ +export declare function flush(buffers: BufferStore, mode: Modes | -1): BufferUnion; +/** + * @internal + * @param chunk + * @returns size of the chunk in bytes or characters. + */ +export declare function sizeOf(chunk?: { + byteLength?: number; + length?: number; +}): number; +/** + * @internal + * @param chunk - from upstream Readable. + * @param allowBuffer - allow mode 2 (Buffer), otherwise Buffer will return mode 1. + * @returns type index of the chunk. + */ +export declare function modeOf(chunk: BufferUnion, allowBuffer?: boolean): Modes | -1; diff --git a/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.browser.d.ts b/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.browser.d.ts new file mode 100644 index 00000000..f767f771 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.browser.d.ts @@ -0,0 +1,5 @@ +import { GetAwsChunkedEncodingStream } from "@smithy/types"; +/** + * @internal + */ +export declare const getAwsChunkedEncodingStream: GetAwsChunkedEncodingStream; diff --git a/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.d.ts b/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.d.ts new file mode 100644 index 00000000..d3997d01 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.d.ts @@ -0,0 +1,7 @@ +/// +import { GetAwsChunkedEncodingStream } from "@smithy/types"; +import { Readable } from "stream"; +/** + * @internal + */ +export declare const getAwsChunkedEncodingStream: GetAwsChunkedEncodingStream; diff --git a/node_modules/@smithy/util-stream/dist-types/headStream.browser.d.ts b/node_modules/@smithy/util-stream/dist-types/headStream.browser.d.ts new file mode 100644 index 00000000..80ad2673 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/headStream.browser.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * @param stream + * @param bytes - read head bytes from the stream and discard the rest of it. + * + * Caution: the input stream must be destroyed separately, this function does not do so. + */ +export declare function headStream(stream: ReadableStream, bytes: number): Promise; diff --git a/node_modules/@smithy/util-stream/dist-types/headStream.d.ts b/node_modules/@smithy/util-stream/dist-types/headStream.d.ts new file mode 100644 index 00000000..7ab9714b --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/headStream.d.ts @@ -0,0 +1,9 @@ +import { Readable } from "stream"; +/** + * @internal + * @param stream - to be read. + * @param bytes - read head bytes from the stream and discard the rest of it. + * + * Caution: the input stream must be destroyed separately, this function does not do so. + */ +export declare const headStream: (stream: Readable | ReadableStream, bytes: number) => Promise; diff --git a/node_modules/@smithy/util-stream/dist-types/index.d.ts b/node_modules/@smithy/util-stream/dist-types/index.d.ts new file mode 100644 index 00000000..1b5b599b --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/index.d.ts @@ -0,0 +1,9 @@ +export * from "./blob/Uint8ArrayBlobAdapter"; +export * from "./checksum/ChecksumStream"; +export * from "./checksum/createChecksumStream"; +export * from "./createBufferedReadable"; +export * from "./getAwsChunkedEncodingStream"; +export * from "./headStream"; +export * from "./sdk-stream-mixin"; +export * from "./splitStream"; +export * from "./stream-type-check"; diff --git a/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.browser.d.ts b/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.browser.d.ts new file mode 100644 index 00000000..400c0b21 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.browser.d.ts @@ -0,0 +1,7 @@ +import { SdkStream } from "@smithy/types"; +/** + * The stream handling utility functions for browsers and React Native + * + * @internal + */ +export declare const sdkStreamMixin: (stream: unknown) => SdkStream; diff --git a/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.d.ts b/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.d.ts new file mode 100644 index 00000000..34fcb6f3 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.d.ts @@ -0,0 +1,8 @@ +import { SdkStream } from "@smithy/types"; +import { Readable } from "stream"; +/** + * The function that mixes in the utility functions to help consuming runtime-specific payload stream. + * + * @internal + */ +export declare const sdkStreamMixin: (stream: unknown) => SdkStream | SdkStream; diff --git a/node_modules/@smithy/util-stream/dist-types/splitStream.browser.d.ts b/node_modules/@smithy/util-stream/dist-types/splitStream.browser.d.ts new file mode 100644 index 00000000..506c23ae --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/splitStream.browser.d.ts @@ -0,0 +1,5 @@ +/** + * @param stream + * @returns stream split into two identical streams. + */ +export declare function splitStream(stream: ReadableStream | Blob): Promise<[ReadableStream, ReadableStream]>; diff --git a/node_modules/@smithy/util-stream/dist-types/splitStream.d.ts b/node_modules/@smithy/util-stream/dist-types/splitStream.d.ts new file mode 100644 index 00000000..8a8a48c5 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/splitStream.d.ts @@ -0,0 +1,9 @@ +/// +import type { Readable } from "stream"; +/** + * @internal + * @param stream - to be split. + * @returns stream split into two identical streams. + */ +export declare function splitStream(stream: Readable): Promise<[Readable, Readable]>; +export declare function splitStream(stream: ReadableStream): Promise<[ReadableStream, ReadableStream]>; diff --git a/node_modules/@smithy/util-stream/dist-types/stream-type-check.d.ts b/node_modules/@smithy/util-stream/dist-types/stream-type-check.d.ts new file mode 100644 index 00000000..5607088f --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/stream-type-check.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + * Alias prevents compiler from turning + * ReadableStream into ReadableStream, which is incompatible + * with the NodeJS.ReadableStream global type. + */ +type ReadableStreamType = ReadableStream; +/** + * @internal + */ +export declare const isReadableStream: (stream: unknown) => stream is ReadableStreamType; +/** + * @internal + */ +export declare const isBlob: (blob: unknown) => blob is Blob; +export {}; diff --git a/node_modules/@smithy/util-stream/dist-types/ts3.4/ByteArrayCollector.d.ts b/node_modules/@smithy/util-stream/dist-types/ts3.4/ByteArrayCollector.d.ts new file mode 100644 index 00000000..c309a6cf --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ts3.4/ByteArrayCollector.d.ts @@ -0,0 +1,13 @@ +/** + * Aggregates byteArrays on demand. + * @internal + */ +export declare class ByteArrayCollector { + readonly allocByteArray: (size: number) => Uint8Array; + byteLength: number; + private byteArrays; + constructor(allocByteArray: (size: number) => Uint8Array); + push(byteArray: Uint8Array): void; + flush(): Uint8Array; + private reset; +} diff --git a/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/Uint8ArrayBlobAdapter.d.ts b/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/Uint8ArrayBlobAdapter.d.ts new file mode 100644 index 00000000..e0338a20 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/Uint8ArrayBlobAdapter.d.ts @@ -0,0 +1,21 @@ +/** + * Adapter for conversions of the native Uint8Array type. + * @public + */ +export declare class Uint8ArrayBlobAdapter extends Uint8Array { + /** + * @param source - such as a string or Stream. + * @returns a new Uint8ArrayBlobAdapter extending Uint8Array. + */ + static fromString(source: string, encoding?: string): Uint8ArrayBlobAdapter; + /** + * @param source - Uint8Array to be mutated. + * @returns the same Uint8Array but with prototype switched to Uint8ArrayBlobAdapter. + */ + static mutate(source: Uint8Array): Uint8ArrayBlobAdapter; + /** + * @param encoding - default 'utf-8'. + * @returns the blob as string. + */ + transformToString(encoding?: string): string; +} diff --git a/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/transforms.d.ts b/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/transforms.d.ts new file mode 100644 index 00000000..6e3ee0a7 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/transforms.d.ts @@ -0,0 +1,9 @@ +import { Uint8ArrayBlobAdapter } from "./Uint8ArrayBlobAdapter"; +/** + * @internal + */ +export declare function transformToString(payload: Uint8Array, encoding?: string): string; +/** + * @internal + */ +export declare function transformFromString(str: string, encoding?: string): Uint8ArrayBlobAdapter; diff --git a/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.browser.d.ts b/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.browser.d.ts new file mode 100644 index 00000000..902a9b22 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.browser.d.ts @@ -0,0 +1,37 @@ +import { Checksum, Encoder } from "@smithy/types"; +/** + * @internal + */ +export interface ChecksumStreamInit { + /** + * Base64 value of the expected checksum. + */ + expectedChecksum: string; + /** + * For error messaging, the location from which the checksum value was read. + */ + checksumSourceLocation: string; + /** + * The checksum calculator. + */ + checksum: Checksum; + /** + * The stream to be checked. + */ + source: ReadableStream; + /** + * Optional base 64 encoder if calling from a request context. + */ + base64Encoder?: Encoder; +} +declare const ChecksumStream_base: any; +/** + * This stub exists so that the readable returned by createChecksumStream + * identifies as "ChecksumStream" in alignment with the Node.js + * implementation. + * + * @extends ReadableStream + */ +export declare class ChecksumStream extends ChecksumStream_base { +} +export {}; diff --git a/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.d.ts b/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.d.ts new file mode 100644 index 00000000..7151034d --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.d.ts @@ -0,0 +1,60 @@ +/// +import { Checksum, Encoder } from "@smithy/types"; +import { Duplex, Readable } from "stream"; +/** + * @internal + */ +export interface ChecksumStreamInit { + /** + * Base64 value of the expected checksum. + */ + expectedChecksum: string; + /** + * For error messaging, the location from which the checksum value was read. + */ + checksumSourceLocation: string; + /** + * The checksum calculator. + */ + checksum: Checksum; + /** + * The stream to be checked. + */ + source: T; + /** + * Optional base 64 encoder if calling from a request context. + */ + base64Encoder?: Encoder; +} +/** + * @internal + * + * Wrapper for throwing checksum errors for streams without + * buffering the stream. + * + */ +export declare class ChecksumStream extends Duplex { + private expectedChecksum; + private checksumSourceLocation; + private checksum; + private source?; + private base64Encoder; + constructor({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }: ChecksumStreamInit); + /** + * @internal do not call this directly. + */ + _read(size: number): void; + /** + * @internal do not call this directly. + * + * When the upstream source flows data to this stream, + * calculate a step update of the checksum. + */ + _write(chunk: Buffer, encoding: string, callback: (err?: Error) => void): void; + /** + * @internal do not call this directly. + * + * When the upstream source finishes, perform the checksum comparison. + */ + _final(callback: (err?: Error) => void): Promise; +} diff --git a/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.browser.d.ts b/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.browser.d.ts new file mode 100644 index 00000000..bd3c0041 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.browser.d.ts @@ -0,0 +1,15 @@ +import { ChecksumStreamInit } from "./ChecksumStream.browser"; +/** + * @internal + * Alias prevents compiler from turning + * ReadableStream into ReadableStream, which is incompatible + * with the NodeJS.ReadableStream global type. + */ +export type ReadableStreamType = ReadableStream; +/** + * @internal + * + * Creates a stream adapter for throwing checksum errors for streams without + * buffering the stream. + */ +export declare const createChecksumStream: ({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }: ChecksumStreamInit) => ReadableStreamType; diff --git a/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.d.ts b/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.d.ts new file mode 100644 index 00000000..dc364182 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.d.ts @@ -0,0 +1,12 @@ +/// +import { Readable } from "stream"; +import { ChecksumStreamInit } from "./ChecksumStream"; +import { ReadableStreamType } from "./createChecksumStream.browser"; +/** + * @internal + * + * Creates a stream mirroring the input stream's interface, but + * performs checksumming when reading to the end of the stream. + */ +export declare function createChecksumStream(init: ChecksumStreamInit): ReadableStreamType; +export declare function createChecksumStream(init: ChecksumStreamInit): Readable; diff --git a/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadable.d.ts b/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadable.d.ts new file mode 100644 index 00000000..f62c7415 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadable.d.ts @@ -0,0 +1,13 @@ +/// +import { Logger } from "@smithy/types"; +import { Readable } from "node:stream"; +/** + * @internal + * @param upstream - any Readable or ReadableStream. + * @param size - byte or character length minimum. Buffering occurs when a chunk fails to meet this value. + * @param onBuffer - for emitting warnings when buffering occurs. + * @returns another stream of the same data and stream class, but buffers chunks until + * the minimum size is met, except for the last chunk. + */ +export declare function createBufferedReadable(upstream: Readable, size: number, logger?: Logger): Readable; +export declare function createBufferedReadable(upstream: ReadableStream, size: number, logger?: Logger): ReadableStream; diff --git a/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadableStream.d.ts b/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadableStream.d.ts new file mode 100644 index 00000000..7b4effd2 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadableStream.d.ts @@ -0,0 +1,54 @@ +import { Logger } from "@smithy/types"; +import { ByteArrayCollector } from "./ByteArrayCollector"; +export type BufferStore = [ + string, + ByteArrayCollector, + ByteArrayCollector? +]; +export type BufferUnion = string | Uint8Array; +export type Modes = 0 | 1 | 2; +/** + * @internal + * @param upstream - any ReadableStream. + * @param size - byte or character length minimum. Buffering occurs when a chunk fails to meet this value. + * @param logger - for emitting warnings when buffering occurs. + * @returns another stream of the same data, but buffers chunks until + * the minimum size is met, except for the last chunk. + */ +export declare function createBufferedReadableStream(upstream: ReadableStream, size: number, logger?: Logger): ReadableStream; +/** + * Replaces R/RS polymorphic implementation in environments with only ReadableStream. + * @internal + */ +export declare const createBufferedReadable: typeof createBufferedReadableStream; +/** + * @internal + * @param buffers + * @param mode + * @param chunk + * @returns the new buffer size after merging the chunk with its appropriate buffer. + */ +export declare function merge(buffers: BufferStore, mode: Modes, chunk: string | Uint8Array): number; +/** + * @internal + * @param buffers + * @param mode + * @returns the buffer matching the mode. + */ +export declare function flush(buffers: BufferStore, mode: Modes | -1): BufferUnion; +/** + * @internal + * @param chunk + * @returns size of the chunk in bytes or characters. + */ +export declare function sizeOf(chunk?: { + byteLength?: number; + length?: number; +}): number; +/** + * @internal + * @param chunk - from upstream Readable. + * @param allowBuffer - allow mode 2 (Buffer), otherwise Buffer will return mode 1. + * @returns type index of the chunk. + */ +export declare function modeOf(chunk: BufferUnion, allowBuffer?: boolean): Modes | -1; diff --git a/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.browser.d.ts b/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.browser.d.ts new file mode 100644 index 00000000..59790788 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.browser.d.ts @@ -0,0 +1,5 @@ +import { GetAwsChunkedEncodingStream } from "@smithy/types"; +/** + * @internal + */ +export declare const getAwsChunkedEncodingStream: GetAwsChunkedEncodingStream; diff --git a/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.d.ts b/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.d.ts new file mode 100644 index 00000000..a100381e --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.d.ts @@ -0,0 +1,7 @@ +/// +import { GetAwsChunkedEncodingStream } from "@smithy/types"; +import { Readable } from "stream"; +/** + * @internal + */ +export declare const getAwsChunkedEncodingStream: GetAwsChunkedEncodingStream; diff --git a/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.browser.d.ts b/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.browser.d.ts new file mode 100644 index 00000000..d8654c30 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.browser.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * @param stream + * @param bytes - read head bytes from the stream and discard the rest of it. + * + * Caution: the input stream must be destroyed separately, this function does not do so. + */ +export declare function headStream(stream: ReadableStream, bytes: number): Promise; diff --git a/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.d.ts b/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.d.ts new file mode 100644 index 00000000..70377153 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.d.ts @@ -0,0 +1,9 @@ +import { Readable } from "stream"; +/** + * @internal + * @param stream - to be read. + * @param bytes - read head bytes from the stream and discard the rest of it. + * + * Caution: the input stream must be destroyed separately, this function does not do so. + */ +export declare const headStream: (stream: Readable | ReadableStream, bytes: number) => Promise; diff --git a/node_modules/@smithy/util-stream/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/util-stream/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..c7c4c3fe --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ts3.4/index.d.ts @@ -0,0 +1,9 @@ +export * from "./blob/Uint8ArrayBlobAdapter"; +export * from "./checksum/ChecksumStream"; +export * from "./checksum/createChecksumStream"; +export * from "./createBufferedReadable"; +export * from "./getAwsChunkedEncodingStream"; +export * from "./headStream"; +export * from "./sdk-stream-mixin"; +export * from "./splitStream"; +export * from "./stream-type-check"; diff --git a/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.browser.d.ts b/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.browser.d.ts new file mode 100644 index 00000000..99dea401 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.browser.d.ts @@ -0,0 +1,7 @@ +import { SdkStream } from "@smithy/types"; +/** + * The stream handling utility functions for browsers and React Native + * + * @internal + */ +export declare const sdkStreamMixin: (stream: unknown) => SdkStream; diff --git a/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.d.ts b/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.d.ts new file mode 100644 index 00000000..c05518a0 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.d.ts @@ -0,0 +1,8 @@ +import { SdkStream } from "@smithy/types"; +import { Readable } from "stream"; +/** + * The function that mixes in the utility functions to help consuming runtime-specific payload stream. + * + * @internal + */ +export declare const sdkStreamMixin: (stream: unknown) => SdkStream | SdkStream; diff --git a/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.browser.d.ts b/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.browser.d.ts new file mode 100644 index 00000000..25c8549e --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.browser.d.ts @@ -0,0 +1,8 @@ +/** + * @param stream + * @returns stream split into two identical streams. + */ +export declare function splitStream(stream: ReadableStream | Blob): Promise<[ + ReadableStream, + ReadableStream +]>; diff --git a/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.d.ts b/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.d.ts new file mode 100644 index 00000000..61a76202 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.d.ts @@ -0,0 +1,15 @@ +/// +import { Readable } from "stream"; +/** + * @internal + * @param stream - to be split. + * @returns stream split into two identical streams. + */ +export declare function splitStream(stream: Readable): Promise<[ + Readable, + Readable +]>; +export declare function splitStream(stream: ReadableStream): Promise<[ + ReadableStream, + ReadableStream +]>; diff --git a/node_modules/@smithy/util-stream/dist-types/ts3.4/stream-type-check.d.ts b/node_modules/@smithy/util-stream/dist-types/ts3.4/stream-type-check.d.ts new file mode 100644 index 00000000..11be8f14 --- /dev/null +++ b/node_modules/@smithy/util-stream/dist-types/ts3.4/stream-type-check.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + * Alias prevents compiler from turning + * ReadableStream into ReadableStream, which is incompatible + * with the NodeJS.ReadableStream global type. + */ +type ReadableStreamType = ReadableStream; +/** + * @internal + */ +export declare const isReadableStream: (stream: unknown) => stream is ReadableStreamType; +/** + * @internal + */ +export declare const isBlob: (blob: unknown) => blob is Blob; +export {}; diff --git a/node_modules/@smithy/util-stream/package.json b/node_modules/@smithy/util-stream/package.json new file mode 100644 index 00000000..769bfc1e --- /dev/null +++ b/node_modules/@smithy/util-stream/package.json @@ -0,0 +1,98 @@ +{ + "name": "@smithy/util-stream", + "version": "4.2.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-stream", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run && yarn test:browser", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts", + "test:browser": "yarn g:vitest run -c vitest.config.browser.ts", + "test:browser:watch": "yarn g:vitest watch -c vitest.config.browser.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@smithy/util-test": "^0.2.8", + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/checksum/ChecksumStream": "./dist-es/checksum/ChecksumStream.browser", + "./dist-es/checksum/createChecksumStream": "./dist-es/checksum/createChecksumStream.browser", + "./dist-es/createBufferedReadable": "./dist-es/createBufferedReadableStream", + "./dist-es/getAwsChunkedEncodingStream": "./dist-es/getAwsChunkedEncodingStream.browser", + "./dist-es/headStream": "./dist-es/headStream.browser", + "./dist-es/sdk-stream-mixin": "./dist-es/sdk-stream-mixin.browser", + "./dist-es/splitStream": "./dist-es/splitStream.browser" + }, + "react-native": { + "./dist-es/checksum/createChecksumStream": "./dist-es/checksum/createChecksumStream.browser", + "./dist-es/checksum/ChecksumStream": "./dist-es/checksum/ChecksumStream.browser", + "./dist-es/getAwsChunkedEncodingStream": "./dist-es/getAwsChunkedEncodingStream.browser", + "./dist-es/sdk-stream-mixin": "./dist-es/sdk-stream-mixin.browser", + "./dist-es/headStream": "./dist-es/headStream.browser", + "./dist-es/splitStream": "./dist-es/splitStream.browser", + "./dist-es/createBufferedReadable": "./dist-es/createBufferedReadableStream", + "./dist-cjs/checksum/createChecksumStream": "./dist-cjs/checksum/createChecksumStream.browser", + "./dist-cjs/checksum/ChecksumStream": "./dist-cjs/checksum/ChecksumStream.browser", + "./dist-cjs/getAwsChunkedEncodingStream": "./dist-cjs/getAwsChunkedEncodingStream.browser", + "./dist-cjs/sdk-stream-mixin": "./dist-cjs/sdk-stream-mixin.browser", + "./dist-cjs/headStream": "./dist-cjs/headStream.browser", + "./dist-cjs/splitStream": "./dist-cjs/splitStream.browser", + "./dist-cjs/createBufferedReadable": "./dist-cjs/createBufferedReadableStream" + }, + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-stream", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-stream" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/util-uri-escape/LICENSE b/node_modules/@smithy/util-uri-escape/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/util-uri-escape/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/util-uri-escape/README.md b/node_modules/@smithy/util-uri-escape/README.md new file mode 100644 index 00000000..22e939a5 --- /dev/null +++ b/node_modules/@smithy/util-uri-escape/README.md @@ -0,0 +1,10 @@ +# @smithy/util-uri-escape + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-uri-escape/latest.svg)](https://www.npmjs.com/package/@smithy/util-uri-escape) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-uri-escape.svg)](https://www.npmjs.com/package/@smithy/util-uri-escape) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri-path.js b/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri-path.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri-path.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri.js b/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-uri-escape/dist-cjs/index.js b/node_modules/@smithy/util-uri-escape/dist-cjs/index.js new file mode 100644 index 00000000..51001efe --- /dev/null +++ b/node_modules/@smithy/util-uri-escape/dist-cjs/index.js @@ -0,0 +1,43 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + escapeUri: () => escapeUri, + escapeUriPath: () => escapeUriPath +}); +module.exports = __toCommonJS(src_exports); + +// src/escape-uri.ts +var escapeUri = /* @__PURE__ */ __name((uri) => ( + // AWS percent-encodes some extra non-standard characters in a URI + encodeURIComponent(uri).replace(/[!'()*]/g, hexEncode) +), "escapeUri"); +var hexEncode = /* @__PURE__ */ __name((c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`, "hexEncode"); + +// src/escape-uri-path.ts +var escapeUriPath = /* @__PURE__ */ __name((uri) => uri.split("/").map(escapeUri).join("/"), "escapeUriPath"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + escapeUri, + escapeUriPath +}); + diff --git a/node_modules/@smithy/util-uri-escape/dist-es/escape-uri-path.js b/node_modules/@smithy/util-uri-escape/dist-es/escape-uri-path.js new file mode 100644 index 00000000..81b3fe37 --- /dev/null +++ b/node_modules/@smithy/util-uri-escape/dist-es/escape-uri-path.js @@ -0,0 +1,2 @@ +import { escapeUri } from "./escape-uri"; +export const escapeUriPath = (uri) => uri.split("/").map(escapeUri).join("/"); diff --git a/node_modules/@smithy/util-uri-escape/dist-es/escape-uri.js b/node_modules/@smithy/util-uri-escape/dist-es/escape-uri.js new file mode 100644 index 00000000..8990be13 --- /dev/null +++ b/node_modules/@smithy/util-uri-escape/dist-es/escape-uri.js @@ -0,0 +1,2 @@ +export const escapeUri = (uri) => encodeURIComponent(uri).replace(/[!'()*]/g, hexEncode); +const hexEncode = (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`; diff --git a/node_modules/@smithy/util-uri-escape/dist-es/index.js b/node_modules/@smithy/util-uri-escape/dist-es/index.js new file mode 100644 index 00000000..ed402e1c --- /dev/null +++ b/node_modules/@smithy/util-uri-escape/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./escape-uri"; +export * from "./escape-uri-path"; diff --git a/node_modules/@smithy/util-uri-escape/dist-types/escape-uri-path.d.ts b/node_modules/@smithy/util-uri-escape/dist-types/escape-uri-path.d.ts new file mode 100644 index 00000000..b547ff90 --- /dev/null +++ b/node_modules/@smithy/util-uri-escape/dist-types/escape-uri-path.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const escapeUriPath: (uri: string) => string; diff --git a/node_modules/@smithy/util-uri-escape/dist-types/escape-uri.d.ts b/node_modules/@smithy/util-uri-escape/dist-types/escape-uri.d.ts new file mode 100644 index 00000000..3f14d2c9 --- /dev/null +++ b/node_modules/@smithy/util-uri-escape/dist-types/escape-uri.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const escapeUri: (uri: string) => string; diff --git a/node_modules/@smithy/util-uri-escape/dist-types/index.d.ts b/node_modules/@smithy/util-uri-escape/dist-types/index.d.ts new file mode 100644 index 00000000..1913825c --- /dev/null +++ b/node_modules/@smithy/util-uri-escape/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./escape-uri"; +/** + * @internal + */ +export * from "./escape-uri-path"; diff --git a/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri-path.d.ts b/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri-path.d.ts new file mode 100644 index 00000000..a7e19caf --- /dev/null +++ b/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri-path.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const escapeUriPath: (uri: string) => string; diff --git a/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri.d.ts b/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri.d.ts new file mode 100644 index 00000000..13cc3728 --- /dev/null +++ b/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const escapeUri: (uri: string) => string; diff --git a/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..ad719fe3 --- /dev/null +++ b/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./escape-uri"; +/** + * @internal + */ +export * from "./escape-uri-path"; diff --git a/node_modules/@smithy/util-uri-escape/package.json b/node_modules/@smithy/util-uri-escape/package.json new file mode 100644 index 00000000..4ca6fd99 --- /dev/null +++ b/node_modules/@smithy/util-uri-escape/package.json @@ -0,0 +1,59 @@ +{ + "name": "@smithy/util-uri-escape", + "version": "4.0.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-uri-escape", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-uri-escape", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-uri-escape" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/util-utf8/LICENSE b/node_modules/@smithy/util-utf8/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/util-utf8/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/util-utf8/README.md b/node_modules/@smithy/util-utf8/README.md new file mode 100644 index 00000000..fc5db6d8 --- /dev/null +++ b/node_modules/@smithy/util-utf8/README.md @@ -0,0 +1,4 @@ +# @smithy/util-utf8 + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-utf8/latest.svg)](https://www.npmjs.com/package/@smithy/util-utf8) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-utf8.svg)](https://www.npmjs.com/package/@smithy/util-utf8) diff --git a/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js b/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js b/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-utf8/dist-cjs/index.js b/node_modules/@smithy/util-utf8/dist-cjs/index.js new file mode 100644 index 00000000..0b22680a --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-cjs/index.js @@ -0,0 +1,65 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromUtf8: () => fromUtf8, + toUint8Array: () => toUint8Array, + toUtf8: () => toUtf8 +}); +module.exports = __toCommonJS(src_exports); + +// src/fromUtf8.ts +var import_util_buffer_from = require("@smithy/util-buffer-from"); +var fromUtf8 = /* @__PURE__ */ __name((input) => { + const buf = (0, import_util_buffer_from.fromString)(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}, "fromUtf8"); + +// src/toUint8Array.ts +var toUint8Array = /* @__PURE__ */ __name((data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}, "toUint8Array"); + +// src/toUtf8.ts + +var toUtf8 = /* @__PURE__ */ __name((input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return (0, import_util_buffer_from.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}, "toUtf8"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromUtf8, + toUint8Array, + toUtf8 +}); + diff --git a/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js b/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js b/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js b/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js b/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js new file mode 100644 index 00000000..73441900 --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js @@ -0,0 +1 @@ +export const fromUtf8 = (input) => new TextEncoder().encode(input); diff --git a/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js b/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js new file mode 100644 index 00000000..6dc438b3 --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js @@ -0,0 +1,5 @@ +import { fromString } from "@smithy/util-buffer-from"; +export const fromUtf8 = (input) => { + const buf = fromString(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}; diff --git a/node_modules/@smithy/util-utf8/dist-es/index.js b/node_modules/@smithy/util-utf8/dist-es/index.js new file mode 100644 index 00000000..00ba4657 --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js b/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js new file mode 100644 index 00000000..2cd36f75 --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js @@ -0,0 +1,10 @@ +import { fromUtf8 } from "./fromUtf8"; +export const toUint8Array = (data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}; diff --git a/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js b/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js new file mode 100644 index 00000000..c2921278 --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js @@ -0,0 +1,9 @@ +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return new TextDecoder("utf-8").decode(input); +}; diff --git a/node_modules/@smithy/util-utf8/dist-es/toUtf8.js b/node_modules/@smithy/util-utf8/dist-es/toUtf8.js new file mode 100644 index 00000000..7be8745a --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-es/toUtf8.js @@ -0,0 +1,10 @@ +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return fromArrayBuffer(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}; diff --git a/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts b/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts new file mode 100644 index 00000000..dd919817 --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts b/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts new file mode 100644 index 00000000..dd919817 --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/node_modules/@smithy/util-utf8/dist-types/index.d.ts b/node_modules/@smithy/util-utf8/dist-types/index.d.ts new file mode 100644 index 00000000..00ba4657 --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts b/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts new file mode 100644 index 00000000..11b6342e --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts b/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts new file mode 100644 index 00000000..8494acd8 --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts b/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts new file mode 100644 index 00000000..8494acd8 --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts b/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts new file mode 100644 index 00000000..39f3d6dd --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts b/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts new file mode 100644 index 00000000..39f3d6dd --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..ef9761d7 --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts b/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts new file mode 100644 index 00000000..562fe101 --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts b/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts new file mode 100644 index 00000000..33511ad7 --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts b/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts new file mode 100644 index 00000000..33511ad7 --- /dev/null +++ b/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/node_modules/@smithy/util-utf8/package.json b/node_modules/@smithy/util-utf8/package.json new file mode 100644 index 00000000..e33060de --- /dev/null +++ b/node_modules/@smithy/util-utf8/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-utf8", + "version": "4.0.0", + "description": "A UTF-8 string <-> UInt8Array converter", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-utf8", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/fromUtf8": "./dist-es/fromUtf8.browser", + "./dist-es/toUtf8": "./dist-es/toUtf8.browser" + }, + "react-native": {}, + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-utf8", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-utf8" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/@smithy/util-waiter/LICENSE b/node_modules/@smithy/util-waiter/LICENSE new file mode 100644 index 00000000..7b6491ba --- /dev/null +++ b/node_modules/@smithy/util-waiter/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/@smithy/util-waiter/README.md b/node_modules/@smithy/util-waiter/README.md new file mode 100644 index 00000000..17169a81 --- /dev/null +++ b/node_modules/@smithy/util-waiter/README.md @@ -0,0 +1,10 @@ +# @smithy/util-waiter + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-waiter/latest.svg)](https://www.npmjs.com/package/@smithy/util-waiter) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-waiter.svg)](https://www.npmjs.com/package/@smithy/util-waiter) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/node_modules/@smithy/util-waiter/dist-cjs/createWaiter.js b/node_modules/@smithy/util-waiter/dist-cjs/createWaiter.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-cjs/createWaiter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-waiter/dist-cjs/index.js b/node_modules/@smithy/util-waiter/dist-cjs/index.js new file mode 100644 index 00000000..c038e3b9 --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-cjs/index.js @@ -0,0 +1,185 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + WaiterState: () => WaiterState, + checkExceptions: () => checkExceptions, + createWaiter: () => createWaiter, + waiterServiceDefaults: () => waiterServiceDefaults +}); +module.exports = __toCommonJS(src_exports); + +// src/utils/sleep.ts +var sleep = /* @__PURE__ */ __name((seconds) => { + return new Promise((resolve) => setTimeout(resolve, seconds * 1e3)); +}, "sleep"); + +// src/waiter.ts +var waiterServiceDefaults = { + minDelay: 2, + maxDelay: 120 +}; +var WaiterState = /* @__PURE__ */ ((WaiterState2) => { + WaiterState2["ABORTED"] = "ABORTED"; + WaiterState2["FAILURE"] = "FAILURE"; + WaiterState2["SUCCESS"] = "SUCCESS"; + WaiterState2["RETRY"] = "RETRY"; + WaiterState2["TIMEOUT"] = "TIMEOUT"; + return WaiterState2; +})(WaiterState || {}); +var checkExceptions = /* @__PURE__ */ __name((result) => { + if (result.state === "ABORTED" /* ABORTED */) { + const abortError = new Error( + `${JSON.stringify({ + ...result, + reason: "Request was aborted" + })}` + ); + abortError.name = "AbortError"; + throw abortError; + } else if (result.state === "TIMEOUT" /* TIMEOUT */) { + const timeoutError = new Error( + `${JSON.stringify({ + ...result, + reason: "Waiter has timed out" + })}` + ); + timeoutError.name = "TimeoutError"; + throw timeoutError; + } else if (result.state !== "SUCCESS" /* SUCCESS */) { + throw new Error(`${JSON.stringify(result)}`); + } + return result; +}, "checkExceptions"); + +// src/poller.ts +var exponentialBackoffWithJitter = /* @__PURE__ */ __name((minDelay, maxDelay, attemptCeiling, attempt) => { + if (attempt > attemptCeiling) + return maxDelay; + const delay = minDelay * 2 ** (attempt - 1); + return randomInRange(minDelay, delay); +}, "exponentialBackoffWithJitter"); +var randomInRange = /* @__PURE__ */ __name((min, max) => min + Math.random() * (max - min), "randomInRange"); +var runPolling = /* @__PURE__ */ __name(async ({ minDelay, maxDelay, maxWaitTime, abortController, client, abortSignal }, input, acceptorChecks) => { + const observedResponses = {}; + const { state, reason } = await acceptorChecks(client, input); + if (reason) { + const message = createMessageFromResponse(reason); + observedResponses[message] |= 0; + observedResponses[message] += 1; + } + if (state !== "RETRY" /* RETRY */) { + return { state, reason, observedResponses }; + } + let currentAttempt = 1; + const waitUntil = Date.now() + maxWaitTime * 1e3; + const attemptCeiling = Math.log(maxDelay / minDelay) / Math.log(2) + 1; + while (true) { + if (abortController?.signal?.aborted || abortSignal?.aborted) { + const message = "AbortController signal aborted."; + observedResponses[message] |= 0; + observedResponses[message] += 1; + return { state: "ABORTED" /* ABORTED */, observedResponses }; + } + const delay = exponentialBackoffWithJitter(minDelay, maxDelay, attemptCeiling, currentAttempt); + if (Date.now() + delay * 1e3 > waitUntil) { + return { state: "TIMEOUT" /* TIMEOUT */, observedResponses }; + } + await sleep(delay); + const { state: state2, reason: reason2 } = await acceptorChecks(client, input); + if (reason2) { + const message = createMessageFromResponse(reason2); + observedResponses[message] |= 0; + observedResponses[message] += 1; + } + if (state2 !== "RETRY" /* RETRY */) { + return { state: state2, reason: reason2, observedResponses }; + } + currentAttempt += 1; + } +}, "runPolling"); +var createMessageFromResponse = /* @__PURE__ */ __name((reason) => { + if (reason?.$responseBodyText) { + return `Deserialization error for body: ${reason.$responseBodyText}`; + } + if (reason?.$metadata?.httpStatusCode) { + if (reason.$response || reason.message) { + return `${reason.$response.statusCode ?? reason.$metadata.httpStatusCode ?? "Unknown"}: ${reason.message}`; + } + return `${reason.$metadata.httpStatusCode}: OK`; + } + return String(reason?.message ?? JSON.stringify(reason) ?? "Unknown"); +}, "createMessageFromResponse"); + +// src/utils/validate.ts +var validateWaiterOptions = /* @__PURE__ */ __name((options) => { + if (options.maxWaitTime <= 0) { + throw new Error(`WaiterConfiguration.maxWaitTime must be greater than 0`); + } else if (options.minDelay <= 0) { + throw new Error(`WaiterConfiguration.minDelay must be greater than 0`); + } else if (options.maxDelay <= 0) { + throw new Error(`WaiterConfiguration.maxDelay must be greater than 0`); + } else if (options.maxWaitTime <= options.minDelay) { + throw new Error( + `WaiterConfiguration.maxWaitTime [${options.maxWaitTime}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter` + ); + } else if (options.maxDelay < options.minDelay) { + throw new Error( + `WaiterConfiguration.maxDelay [${options.maxDelay}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter` + ); + } +}, "validateWaiterOptions"); + +// src/createWaiter.ts +var abortTimeout = /* @__PURE__ */ __name(async (abortSignal) => { + return new Promise((resolve) => { + const onAbort = /* @__PURE__ */ __name(() => resolve({ state: "ABORTED" /* ABORTED */ }), "onAbort"); + if (typeof abortSignal.addEventListener === "function") { + abortSignal.addEventListener("abort", onAbort); + } else { + abortSignal.onabort = onAbort; + } + }); +}, "abortTimeout"); +var createWaiter = /* @__PURE__ */ __name(async (options, input, acceptorChecks) => { + const params = { + ...waiterServiceDefaults, + ...options + }; + validateWaiterOptions(params); + const exitConditions = [runPolling(params, input, acceptorChecks)]; + if (options.abortController) { + exitConditions.push(abortTimeout(options.abortController.signal)); + } + if (options.abortSignal) { + exitConditions.push(abortTimeout(options.abortSignal)); + } + return Promise.race(exitConditions); +}, "createWaiter"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + createWaiter, + waiterServiceDefaults, + WaiterState, + checkExceptions +}); + diff --git a/node_modules/@smithy/util-waiter/dist-cjs/poller.js b/node_modules/@smithy/util-waiter/dist-cjs/poller.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-cjs/poller.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-waiter/dist-cjs/utils/index.js b/node_modules/@smithy/util-waiter/dist-cjs/utils/index.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-cjs/utils/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-waiter/dist-cjs/utils/sleep.js b/node_modules/@smithy/util-waiter/dist-cjs/utils/sleep.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-cjs/utils/sleep.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-waiter/dist-cjs/utils/validate.js b/node_modules/@smithy/util-waiter/dist-cjs/utils/validate.js new file mode 100644 index 00000000..04405773 --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-cjs/utils/validate.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-waiter/dist-cjs/waiter.js b/node_modules/@smithy/util-waiter/dist-cjs/waiter.js new file mode 100644 index 00000000..532e610f --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-cjs/waiter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/node_modules/@smithy/util-waiter/dist-es/createWaiter.js b/node_modules/@smithy/util-waiter/dist-es/createWaiter.js new file mode 100644 index 00000000..59bfdb9d --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-es/createWaiter.js @@ -0,0 +1,29 @@ +import { runPolling } from "./poller"; +import { validateWaiterOptions } from "./utils"; +import { waiterServiceDefaults, WaiterState } from "./waiter"; +const abortTimeout = async (abortSignal) => { + return new Promise((resolve) => { + const onAbort = () => resolve({ state: WaiterState.ABORTED }); + if (typeof abortSignal.addEventListener === "function") { + abortSignal.addEventListener("abort", onAbort); + } + else { + abortSignal.onabort = onAbort; + } + }); +}; +export const createWaiter = async (options, input, acceptorChecks) => { + const params = { + ...waiterServiceDefaults, + ...options, + }; + validateWaiterOptions(params); + const exitConditions = [runPolling(params, input, acceptorChecks)]; + if (options.abortController) { + exitConditions.push(abortTimeout(options.abortController.signal)); + } + if (options.abortSignal) { + exitConditions.push(abortTimeout(options.abortSignal)); + } + return Promise.race(exitConditions); +}; diff --git a/node_modules/@smithy/util-waiter/dist-es/index.js b/node_modules/@smithy/util-waiter/dist-es/index.js new file mode 100644 index 00000000..d77f139a --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./createWaiter"; +export * from "./waiter"; diff --git a/node_modules/@smithy/util-waiter/dist-es/poller.js b/node_modules/@smithy/util-waiter/dist-es/poller.js new file mode 100644 index 00000000..d1a0ec0b --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-es/poller.js @@ -0,0 +1,59 @@ +import { sleep } from "./utils/sleep"; +import { WaiterState } from "./waiter"; +const exponentialBackoffWithJitter = (minDelay, maxDelay, attemptCeiling, attempt) => { + if (attempt > attemptCeiling) + return maxDelay; + const delay = minDelay * 2 ** (attempt - 1); + return randomInRange(minDelay, delay); +}; +const randomInRange = (min, max) => min + Math.random() * (max - min); +export const runPolling = async ({ minDelay, maxDelay, maxWaitTime, abortController, client, abortSignal }, input, acceptorChecks) => { + const observedResponses = {}; + const { state, reason } = await acceptorChecks(client, input); + if (reason) { + const message = createMessageFromResponse(reason); + observedResponses[message] |= 0; + observedResponses[message] += 1; + } + if (state !== WaiterState.RETRY) { + return { state, reason, observedResponses }; + } + let currentAttempt = 1; + const waitUntil = Date.now() + maxWaitTime * 1000; + const attemptCeiling = Math.log(maxDelay / minDelay) / Math.log(2) + 1; + while (true) { + if (abortController?.signal?.aborted || abortSignal?.aborted) { + const message = "AbortController signal aborted."; + observedResponses[message] |= 0; + observedResponses[message] += 1; + return { state: WaiterState.ABORTED, observedResponses }; + } + const delay = exponentialBackoffWithJitter(minDelay, maxDelay, attemptCeiling, currentAttempt); + if (Date.now() + delay * 1000 > waitUntil) { + return { state: WaiterState.TIMEOUT, observedResponses }; + } + await sleep(delay); + const { state, reason } = await acceptorChecks(client, input); + if (reason) { + const message = createMessageFromResponse(reason); + observedResponses[message] |= 0; + observedResponses[message] += 1; + } + if (state !== WaiterState.RETRY) { + return { state, reason, observedResponses }; + } + currentAttempt += 1; + } +}; +const createMessageFromResponse = (reason) => { + if (reason?.$responseBodyText) { + return `Deserialization error for body: ${reason.$responseBodyText}`; + } + if (reason?.$metadata?.httpStatusCode) { + if (reason.$response || reason.message) { + return `${reason.$response.statusCode ?? reason.$metadata.httpStatusCode ?? "Unknown"}: ${reason.message}`; + } + return `${reason.$metadata.httpStatusCode}: OK`; + } + return String(reason?.message ?? JSON.stringify(reason) ?? "Unknown"); +}; diff --git a/node_modules/@smithy/util-waiter/dist-es/utils/index.js b/node_modules/@smithy/util-waiter/dist-es/utils/index.js new file mode 100644 index 00000000..e15a156b --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-es/utils/index.js @@ -0,0 +1,2 @@ +export * from "./sleep"; +export * from "./validate"; diff --git a/node_modules/@smithy/util-waiter/dist-es/utils/sleep.js b/node_modules/@smithy/util-waiter/dist-es/utils/sleep.js new file mode 100644 index 00000000..789205d4 --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-es/utils/sleep.js @@ -0,0 +1,3 @@ +export const sleep = (seconds) => { + return new Promise((resolve) => setTimeout(resolve, seconds * 1000)); +}; diff --git a/node_modules/@smithy/util-waiter/dist-es/utils/validate.js b/node_modules/@smithy/util-waiter/dist-es/utils/validate.js new file mode 100644 index 00000000..e094ea73 --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-es/utils/validate.js @@ -0,0 +1,17 @@ +export const validateWaiterOptions = (options) => { + if (options.maxWaitTime <= 0) { + throw new Error(`WaiterConfiguration.maxWaitTime must be greater than 0`); + } + else if (options.minDelay <= 0) { + throw new Error(`WaiterConfiguration.minDelay must be greater than 0`); + } + else if (options.maxDelay <= 0) { + throw new Error(`WaiterConfiguration.maxDelay must be greater than 0`); + } + else if (options.maxWaitTime <= options.minDelay) { + throw new Error(`WaiterConfiguration.maxWaitTime [${options.maxWaitTime}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter`); + } + else if (options.maxDelay < options.minDelay) { + throw new Error(`WaiterConfiguration.maxDelay [${options.maxDelay}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter`); + } +}; diff --git a/node_modules/@smithy/util-waiter/dist-es/waiter.js b/node_modules/@smithy/util-waiter/dist-es/waiter.js new file mode 100644 index 00000000..158c46a5 --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-es/waiter.js @@ -0,0 +1,34 @@ +export const waiterServiceDefaults = { + minDelay: 2, + maxDelay: 120, +}; +export var WaiterState; +(function (WaiterState) { + WaiterState["ABORTED"] = "ABORTED"; + WaiterState["FAILURE"] = "FAILURE"; + WaiterState["SUCCESS"] = "SUCCESS"; + WaiterState["RETRY"] = "RETRY"; + WaiterState["TIMEOUT"] = "TIMEOUT"; +})(WaiterState || (WaiterState = {})); +export const checkExceptions = (result) => { + if (result.state === WaiterState.ABORTED) { + const abortError = new Error(`${JSON.stringify({ + ...result, + reason: "Request was aborted", + })}`); + abortError.name = "AbortError"; + throw abortError; + } + else if (result.state === WaiterState.TIMEOUT) { + const timeoutError = new Error(`${JSON.stringify({ + ...result, + reason: "Waiter has timed out", + })}`); + timeoutError.name = "TimeoutError"; + throw timeoutError; + } + else if (result.state !== WaiterState.SUCCESS) { + throw new Error(`${JSON.stringify(result)}`); + } + return result; +}; diff --git a/node_modules/@smithy/util-waiter/dist-types/createWaiter.d.ts b/node_modules/@smithy/util-waiter/dist-types/createWaiter.d.ts new file mode 100644 index 00000000..16958029 --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-types/createWaiter.d.ts @@ -0,0 +1,11 @@ +import { WaiterOptions, WaiterResult } from "./waiter"; +/** + * Create a waiter promise that only resolves when: + * 1. Abort controller is signaled + * 2. Max wait time is reached + * 3. `acceptorChecks` succeeds, or fails + * Otherwise, it invokes `acceptorChecks` with exponential-backoff delay. + * + * @internal + */ +export declare const createWaiter: (options: WaiterOptions, input: Input, acceptorChecks: (client: Client, input: Input) => Promise) => Promise; diff --git a/node_modules/@smithy/util-waiter/dist-types/index.d.ts b/node_modules/@smithy/util-waiter/dist-types/index.d.ts new file mode 100644 index 00000000..d77f139a --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./createWaiter"; +export * from "./waiter"; diff --git a/node_modules/@smithy/util-waiter/dist-types/poller.d.ts b/node_modules/@smithy/util-waiter/dist-types/poller.d.ts new file mode 100644 index 00000000..40089572 --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-types/poller.d.ts @@ -0,0 +1,10 @@ +import { WaiterOptions, WaiterResult } from "./waiter"; +/** + * Function that runs polling as part of waiters. This will make one inital attempt and then + * subsequent attempts with an increasing delay. + * @param params - options passed to the waiter. + * @param client - AWS SDK Client + * @param input - client input + * @param stateChecker - function that checks the acceptor states on each poll. + */ +export declare const runPolling: ({ minDelay, maxDelay, maxWaitTime, abortController, client, abortSignal }: WaiterOptions, input: Input, acceptorChecks: (client: Client, input: Input) => Promise) => Promise; diff --git a/node_modules/@smithy/util-waiter/dist-types/ts3.4/createWaiter.d.ts b/node_modules/@smithy/util-waiter/dist-types/ts3.4/createWaiter.d.ts new file mode 100644 index 00000000..f9b32425 --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-types/ts3.4/createWaiter.d.ts @@ -0,0 +1,11 @@ +import { WaiterOptions, WaiterResult } from "./waiter"; +/** + * Create a waiter promise that only resolves when: + * 1. Abort controller is signaled + * 2. Max wait time is reached + * 3. `acceptorChecks` succeeds, or fails + * Otherwise, it invokes `acceptorChecks` with exponential-backoff delay. + * + * @internal + */ +export declare const createWaiter: (options: WaiterOptions, input: Input, acceptorChecks: (client: Client, input: Input) => Promise) => Promise; diff --git a/node_modules/@smithy/util-waiter/dist-types/ts3.4/index.d.ts b/node_modules/@smithy/util-waiter/dist-types/ts3.4/index.d.ts new file mode 100644 index 00000000..be143d54 --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./createWaiter"; +export * from "./waiter"; diff --git a/node_modules/@smithy/util-waiter/dist-types/ts3.4/poller.d.ts b/node_modules/@smithy/util-waiter/dist-types/ts3.4/poller.d.ts new file mode 100644 index 00000000..8b33c940 --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-types/ts3.4/poller.d.ts @@ -0,0 +1,10 @@ +import { WaiterOptions, WaiterResult } from "./waiter"; +/** + * Function that runs polling as part of waiters. This will make one inital attempt and then + * subsequent attempts with an increasing delay. + * @param params - options passed to the waiter. + * @param client - AWS SDK Client + * @param input - client input + * @param stateChecker - function that checks the acceptor states on each poll. + */ +export declare const runPolling: ({ minDelay, maxDelay, maxWaitTime, abortController, client, abortSignal }: WaiterOptions, input: Input, acceptorChecks: (client: Client, input: Input) => Promise) => Promise; diff --git a/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/index.d.ts b/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/index.d.ts new file mode 100644 index 00000000..974384c9 --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./sleep"; +/** + * @internal + */ +export * from "./validate"; diff --git a/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/sleep.d.ts b/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/sleep.d.ts new file mode 100644 index 00000000..f53553b0 --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/sleep.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const sleep: (seconds: number) => Promise; diff --git a/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/validate.d.ts b/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/validate.d.ts new file mode 100644 index 00000000..73d79b0c --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/validate.d.ts @@ -0,0 +1,8 @@ +import { WaiterOptions } from "../waiter"; +/** + * @internal + * + * Validates that waiter options are passed correctly + * @param options - a waiter configuration object + */ +export declare const validateWaiterOptions: (options: WaiterOptions) => void; diff --git a/node_modules/@smithy/util-waiter/dist-types/ts3.4/waiter.d.ts b/node_modules/@smithy/util-waiter/dist-types/ts3.4/waiter.d.ts new file mode 100644 index 00000000..f685ce4d --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-types/ts3.4/waiter.d.ts @@ -0,0 +1,49 @@ +import { WaiterConfiguration as WaiterConfiguration__ } from "@smithy/types"; +/** + * @internal + */ +export interface WaiterConfiguration extends WaiterConfiguration__ { +} +/** + * @internal + */ +export declare const waiterServiceDefaults: { + minDelay: number; + maxDelay: number; +}; +/** + * @internal + */ +export type WaiterOptions = WaiterConfiguration & Required, "minDelay" | "maxDelay">>; +/** + * @internal + */ +export declare enum WaiterState { + ABORTED = "ABORTED", + FAILURE = "FAILURE", + SUCCESS = "SUCCESS", + RETRY = "RETRY", + TIMEOUT = "TIMEOUT" +} +/** + * @internal + */ +export type WaiterResult = { + state: WaiterState; + /** + * (optional) Indicates a reason for why a waiter has reached its state. + */ + reason?: any; + /** + * Responses observed by the waiter during its polling, where the value + * is the count. + */ + observedResponses?: Record; +}; +/** + * @internal + * + * Handles and throws exceptions resulting from the waiterResult + * @param result - WaiterResult + */ +export declare const checkExceptions: (result: WaiterResult) => WaiterResult; diff --git a/node_modules/@smithy/util-waiter/dist-types/utils/index.d.ts b/node_modules/@smithy/util-waiter/dist-types/utils/index.d.ts new file mode 100644 index 00000000..b9a32052 --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-types/utils/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./sleep"; +/** + * @internal + */ +export * from "./validate"; diff --git a/node_modules/@smithy/util-waiter/dist-types/utils/sleep.d.ts b/node_modules/@smithy/util-waiter/dist-types/utils/sleep.d.ts new file mode 100644 index 00000000..e5d9f73e --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-types/utils/sleep.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const sleep: (seconds: number) => Promise; diff --git a/node_modules/@smithy/util-waiter/dist-types/utils/validate.d.ts b/node_modules/@smithy/util-waiter/dist-types/utils/validate.d.ts new file mode 100644 index 00000000..a847eee6 --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-types/utils/validate.d.ts @@ -0,0 +1,8 @@ +import { WaiterOptions } from "../waiter"; +/** + * @internal + * + * Validates that waiter options are passed correctly + * @param options - a waiter configuration object + */ +export declare const validateWaiterOptions: (options: WaiterOptions) => void; diff --git a/node_modules/@smithy/util-waiter/dist-types/waiter.d.ts b/node_modules/@smithy/util-waiter/dist-types/waiter.d.ts new file mode 100644 index 00000000..e0c690f3 --- /dev/null +++ b/node_modules/@smithy/util-waiter/dist-types/waiter.d.ts @@ -0,0 +1,49 @@ +import { WaiterConfiguration as WaiterConfiguration__ } from "@smithy/types"; +/** + * @internal + */ +export interface WaiterConfiguration extends WaiterConfiguration__ { +} +/** + * @internal + */ +export declare const waiterServiceDefaults: { + minDelay: number; + maxDelay: number; +}; +/** + * @internal + */ +export type WaiterOptions = WaiterConfiguration & Required, "minDelay" | "maxDelay">>; +/** + * @internal + */ +export declare enum WaiterState { + ABORTED = "ABORTED", + FAILURE = "FAILURE", + SUCCESS = "SUCCESS", + RETRY = "RETRY", + TIMEOUT = "TIMEOUT" +} +/** + * @internal + */ +export type WaiterResult = { + state: WaiterState; + /** + * (optional) Indicates a reason for why a waiter has reached its state. + */ + reason?: any; + /** + * Responses observed by the waiter during its polling, where the value + * is the count. + */ + observedResponses?: Record; +}; +/** + * @internal + * + * Handles and throws exceptions resulting from the waiterResult + * @param result - WaiterResult + */ +export declare const checkExceptions: (result: WaiterResult) => WaiterResult; diff --git a/node_modules/@smithy/util-waiter/package.json b/node_modules/@smithy/util-waiter/package.json new file mode 100644 index 00000000..2706fd70 --- /dev/null +++ b/node_modules/@smithy/util-waiter/package.json @@ -0,0 +1,62 @@ +{ + "name": "@smithy/util-waiter", + "version": "4.0.3", + "description": "Shared utilities for client waiters for the AWS SDK", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-waiter", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-waiter", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-waiter" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/node_modules/bowser/CHANGELOG.md b/node_modules/bowser/CHANGELOG.md new file mode 100644 index 00000000..260a03d9 --- /dev/null +++ b/node_modules/bowser/CHANGELOG.md @@ -0,0 +1,218 @@ +# Bowser Changelog + +### 2.11.0 (Sep 12, 2020) +- [ADD] Added support for aliases in `Parser#is` method (#437) +- [ADD] Added more typings (#438, #427) +- [ADD] Added support for MIUI Browserr (#436) + +### 2.10.0 (Jul 9, 2020) +- [FIX] Fix for Firefox detection on iOS 13 [#415] +- [FIX] Fixes for typings.d.ts [#409] +- [FIX] Updated development dependencies + +### 2.9.0 (Jan 28, 2020) +- [ADD] Export more methods and constants via .d.ts [#388], [#390] + +### 2.8.1 (Dec 26, 2019) +- [FIX] Reverted [#382] as it broke build + +### 2.8.0 (Dec 26, 2019) +- [ADD] Add polyfills for Array.find & Object.assign [#383] +- [ADD] Export constants with types.d.ts [#382] +- [FIX] Add support for WeChat on Windows [#381] +- [FIX] Fix detection of Firefox on iPad [#379] +- [FIX] Add detection of Electron [#375] +- [FIX] Updated dev-dependencies + +### 2.7.0 (Oct 2, 2019) +- [FIX] Add support for QQ Browser [#362] +- [FIX] Add support for GSA [#364] +- [FIX] Updated dependencies + +### 2.6.0 (Sep 6, 2019) +- [ADD] Define "module" export in package.json [#354] +- [FIX] Fix Tablet PC detection [#334] + +### 2.5.4 (Sep 2, 2019) +- [FIX] Exclude docs from the npm package [#349] + +### 2.5.3 (Aug 4, 2019) +- [FIX] Add MacOS names support [#338] +- [FIX] Point typings.d.ts from package.json [#341] +- [FIX] Upgrade dependencies + +### 2.5.2 (July 17, 2019) +- [FIX] Fixes the bug undefined method because of failed build (#335) + +### 2.5.1 (July 17, 2019) +- [FIX] Fixes the bug with a custom Error class (#335) +- [FIX] Fixes the settings for Babel to reduce the bundle size (#259) + +### 2.5.0 (July 16, 2019) +- [ADD] Add constant output so that users can quickly get all types (#325) +- [FIX] Add support for Roku OS (#332) +- [FIX] Update devDependencies +- [FIX] Fix docs, README and added funding information + +### 2.4.0 (May 3, 2019) +- [FIX] Update regexp for generic browsers (#310) +- [FIX] Fix issues with module.exports (#318) +- [FIX] Update devDependencies (#316, #321, #322) +- [FIX] Fix docs (#320) + +### 2.3.0 (April 14, 2019) +- [ADD] Add support for Blink-based MS Edge (#311) +- [ADD] Add more types for TS (#289) +- [FIX] Update dev-dependencies +- [FIX] Update docs + +### 2.2.1 (April 12, 2019) +- [ADD] Add an alias for Samsung Internet +- [FIX] Fix browser name detection for browsers without an alias (#313) + +### 2.2.0 (April 7, 2019) +- [ADD] Add short aliases for browser names (#295) +- [FIX] Fix Yandex Browser version detection (#308) + +### 2.1.2 (March 6, 2019) +- [FIX] Fix buggy `getFirstMatch` reference + +### 2.1.1 (March 6, 2019) +- [ADD] Add detection of PlayStation 4 (#291) +- [ADD] Deploy docs on GH Pages (#293) +- [FIX] Fix files extensions for importing (#294) +- [FIX] Fix docs (#295) + +### 2.1.0 (January 24, 2019) +- [ADD] Add new `Parser.getEngineName()` method (#288) +- [ADD] Add detection of ChromeOS (#287) +- [FIX] Fix README + +### 2.0.0 (January 19, 2019) +- [ADD] Support a non strict equality in `Parser.satisfies()` (#275) +- [ADD] Add Android versions names (#276) +- [ADD] Add a typings file (#277) +- [ADD] Added support for Googlebot recognition (#278) +- [FIX] Update building tools, avoid security issues + +### 2.0.0-beta.3 (September 15, 2018) +- [FIX] Fix Chrome Mobile detection (#253) +- [FIX] Use built bowser for CI (#252) +- [FIX] Update babel-plugin-add-module-exports (#251) + +### 2.0.0-beta.2 (September 9, 2018) +- [FIX] Fix failing comparing version through `Parser.satisfies` (#243) +- [FIX] Fix travis testing, include eslint into CI testing +- [FIX] Add support for Maxthon desktop browser (#246) +- [FIX] Add support for Swing browser (#248) +- [DOCS] Regenerate docs + +### 2.0.0-beta.1 (August 18, 2018) +- [ADD] Add loose version comparison to `Parser.compareVersion()` and `Parser.satisfies()` +- [CHORE] Add CONTRIBUTING.md +- [DOCS] Regenerate docs + +### 2.0.0-alpha.4 (August 2, 2018) +- [DOCS] Fix usage docs (#238) +- [CHANGE] Make `./es5.js` the main file of the package (#239) + +### 2.0.0-alpha.3 (July 22, 2018) +- [CHANGE] Rename split and rename `compiled.js` to `es5.js` and `bundled.js` (#231, #236, #237) +- [ADD] Add `Parser.some` (#235) + +### 2.0.0-alpha.2 (July 17, 2018) +- [CHANGE] Make `src/bowser` main file instead of the bundled one +- [CHANGE] Move the bundled file to the root of the package to make it possible to `require('bowser/compiled')` (#231) +- [REMOVE] Remove `typings.d.ts` before stable release (#232) +- [FIX] Improve Nexus devices detection (#233) + +### 2.0.0-alpha.1 (July 9, 2018) +- [ADD] `Bowser.getParser()` +- [ADD] `Bowser.parse` +- [ADD] `Parser` class which describes parsing process +- [CHANGE] Change bowser's returning object +- [REMOVE] Remove bower support + +### 1.9.4 (June 28, 2018) +- [FIX] Fix NAVER Whale browser detection (#220) +- [FIX] Fix MZ Browser browser detection (#219) +- [FIX] Fix Firefox Focus browser detection (#191) +- [FIX] Fix webOS browser detection (#186) + +### 1.9.3 (March 12, 2018) +- [FIX] Fix `typings.d.ts` — add `ipad`, `iphone`, `ipod` flags to the interface + +### 1.9.2 (February 5, 2018) +- [FIX] Fix `typings.d.ts` — add `osname` flag to the interface + +### 1.9.1 (December 22, 2017) +- [FIX] Fix `typings.d.ts` — add `chromium` flag to the interface + +### 1.9.0 (December 20, 2017) +- [ADD] Add a public method `.detect()` (#205) +- [DOCS] Fix description of `chromium` flag in docs (#206) + +### 1.8.1 (October 7, 2017) +- [FIX] Fix detection of MS Edge on Android and iOS (#201) + +### 1.8.0 (October 7, 2017) +- [ADD] Add `osname` into result object (#200) + +### 1.7.3 (August 30, 2017) +- [FIX] Fix detection of Chrome on Android 8 OPR6 (#193) + +### 1.7.2 (August 17, 2017) +- [FIX] Fix typings.d.ts according to #185 + +### 1.7.1 (July 13, 2017) +- [ADD] Fix detecting of Tablet PC as tablet (#183) + +### 1.7.0 (May 18, 2017) +- [ADD] Add OS version support for Windows and macOS (#178) + +### 1.6.0 (December 5, 2016) +- [ADD] Add some tests for Windows devices (#89) +- [ADD] Add `root` to initialization process (#170) +- [FIX] Upgrade .travis.yml config + +### 1.5.0 (October 31, 2016) +- [ADD] Throw an error when `minVersion` map has not a string as a version and fix readme (#165) +- [FIX] Fix truly detection of Windows Phones (#167) + +### 1.4.6 (September 19, 2016) +- [FIX] Fix mobile Opera's version detection on Android +- [FIX] Fix typescript typings — add `mobile` and `tablet` flags +- [DOC] Fix description of `bowser.check` + +### 1.4.5 (August 30, 2016) + +- [FIX] Add support of Samsung Internet for Android +- [FIX] Fix case when `navigator.userAgent` is `undefined` +- [DOC] Add information about `strictMode` in `check` function +- [DOC] Consistent use of `bowser` variable in the README + +### 1.4.4 (August 10, 2016) + +- [FIX] Fix AMD `define` call — pass name to the function + +### 1.4.3 (July 27, 2016) + +- [FIX] Fix error `Object doesn't support this property or method` on IE8 + +### 1.4.2 (July 26, 2016) + +- [FIX] Fix missing `isUnsupportedBrowser` in typings description +- [DOC] Fix `check`'s declaration in README + +### 1.4.1 (July 7, 2016) + +- [FIX] Fix `strictMode` logic for `isUnsupportedBrowser` + +### 1.4.0 (June 28, 2016) + +- [FEATURE] Add `bowser.compareVersions` method +- [FEATURE] Add `bowser.isUnsupportedBrowser` method +- [FEATURE] Add `bowser.check` method +- [DOC] Changelog started +- [DOC] Add API section to README +- [FIX] Fix detection of browser type (A/C/X) for Chromium diff --git a/node_modules/bowser/LICENSE b/node_modules/bowser/LICENSE new file mode 100644 index 00000000..94085f02 --- /dev/null +++ b/node_modules/bowser/LICENSE @@ -0,0 +1,39 @@ +Copyright 2015, Dustin Diaz (the "Original Author") +All rights reserved. + +MIT License + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +Distributions of all or part of the Software intended to be used +by the recipients as they would use the unmodified Software, +containing modifications that substantially alter, remove, or +disable functionality of the Software, outside of the documented +configuration mechanisms provided by the Software, shall be +modified such that the Original Author's bug reporting email +addresses and urls are either replaced with the contact information +of the parties responsible for the changes, or removed entirely. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + + +Except where noted, this license applies to any and all software +programs and associated documentation files created by the +Original Author, when distributed with the Software. diff --git a/node_modules/bowser/README.md b/node_modules/bowser/README.md new file mode 100644 index 00000000..8f5f915b --- /dev/null +++ b/node_modules/bowser/README.md @@ -0,0 +1,179 @@ +## Bowser +A small, fast and rich-API browser/platform/engine detector for both browser and node. +- **Small.** Use plain ES5-version which is ~4.8kB gzipped. +- **Optimized.** Use only those parsers you need — it doesn't do useless work. +- **Multi-platform.** It's browser- and node-ready, so you can use it in any environment. + +Don't hesitate to support the project on Github or [OpenCollective](https://opencollective.com/bowser) if you like it ❤️ Also, contributors are always welcome! + +[![Financial Contributors on Open Collective](https://opencollective.com/bowser/all/badge.svg?label=financial+contributors)](https://opencollective.com/bowser) [![Build Status](https://travis-ci.org/lancedikson/bowser.svg?branch=master)](https://travis-ci.org/lancedikson/bowser/) [![Greenkeeper badge](https://badges.greenkeeper.io/lancedikson/bowser.svg)](https://greenkeeper.io/) [![Coverage Status](https://coveralls.io/repos/github/lancedikson/bowser/badge.svg?branch=master)](https://coveralls.io/github/lancedikson/bowser?branch=master) ![Downloads](https://img.shields.io/npm/dm/bowser) + +# Contents +- [Overview](#overview) +- [Use cases](#use-cases) +- [Advanced usage](#advanced-usage) +- [How can I help?](#contributing) + +# Overview + +The library is made to help to detect what browser your user has and gives you a convenient API to filter the users somehow depending on their browsers. Check it out on this page: https://bowser-js.github.io/bowser-online/. + +### ⚠️ Version 2.0 breaking changes ⚠️ + +Version 2.0 has drastically changed the API. All available methods are on the [docs page](https://lancedikson.github.io/bowser/docs). + +_For legacy code, check out the [1.x](https://github.com/lancedikson/bowser/tree/v1.x) branch and install it through `npm install bowser@1.9.4`._ + +# Use cases + +First of all, require the library. This is a UMD Module, so it will work for AMD, TypeScript, ES6, and CommonJS module systems. + +```javascript +const Bowser = require("bowser"); // CommonJS + +import * as Bowser from "bowser"; // TypeScript + +import Bowser from "bowser"; // ES6 (and TypeScript with --esModuleInterop enabled) +``` + +By default, the exported version is the *ES5 transpiled version*, which **do not** include any polyfills. + +In case you don't use your own `babel-polyfill` you may need to have pre-built bundle with all needed polyfills. +So, for you it's suitable to require bowser like this: `require('bowser/bundled')`. +As the result, you get a ES5 version of bowser with `babel-polyfill` bundled together. + +You may need to use the source files, so they will be available in the package as well. + +## Browser props detection + +Often we need to pick users' browser properties such as the name, the version, the rendering engine and so on. Here is an example how to do it with Bowser: + +```javascript +const browser = Bowser.getParser(window.navigator.userAgent); + +console.log(`The current browser name is "${browser.getBrowserName()}"`); +// The current browser name is "Internet Explorer" +``` + +or + +```javascript +const browser = Bowser.getParser(window.navigator.userAgent); +console.log(browser.getBrowser()); + +// outputs +{ + name: "Internet Explorer" + version: "11.0" +} +``` + +or + +```javascript +console.log(Bowser.parse(window.navigator.userAgent)); + +// outputs +{ + browser: { + name: "Internet Explorer" + version: "11.0" + }, + os: { + name: "Windows" + version: "NT 6.3" + versionName: "8.1" + }, + platform: { + type: "desktop" + }, + engine: { + name: "Trident" + version: "7.0" + } +} +``` + + +## Filtering browsers + +You could want to filter some particular browsers to provide any special support for them or make any workarounds. +It could look like this: + +```javascript +const browser = Bowser.getParser(window.navigator.userAgent); +const isValidBrowser = browser.satisfies({ + // declare browsers per OS + windows: { + "internet explorer": ">10", + }, + macos: { + safari: ">10.1" + }, + + // per platform (mobile, desktop or tablet) + mobile: { + safari: '>=9', + 'android browser': '>3.10' + }, + + // or in general + chrome: "~20.1.1432", + firefox: ">31", + opera: ">=22", + + // also supports equality operator + chrome: "=20.1.1432", // will match particular build only + + // and loose-equality operator + chrome: "~20", // will match any 20.* sub-version + chrome: "~20.1" // will match any 20.1.* sub-version (20.1.19 as well as 20.1.12.42-alpha.1) +}); +``` + +Settings for any particular OS or platform has more priority and redefines settings of standalone browsers. +Thus, you can define OS or platform specific rules and they will have more priority in the end. + +More of API and possibilities you will find in the `docs` folder. + +### Browser names for `.satisfies()` + +By default you are supposed to use the full browser name for `.satisfies`. +But, there's a short way to define a browser using short aliases. The full +list of aliases can be found in [the file](src/constants.js). + +## Similar Projects +* [Kong](https://github.com/BigBadBleuCheese/Kong) - A C# port of Bowser. + +## Contributors + +### Code Contributors + +This project exists thanks to all the people who contribute. [[Contribute](CONTRIBUTING.md)]. + + +### Financial Contributors + +Become a financial contributor and help us sustain our community. [[Contribute](https://opencollective.com/bowser/contribute)] + +#### Individuals + + + +#### Organizations + +Support this project with your organization. Your logo will show up here with a link to your website. [[Contribute](https://opencollective.com/bowser/contribute)] + + + + + + + + + + + + +## License +Licensed as MIT. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE file for more details. diff --git a/node_modules/bowser/bundled.js b/node_modules/bowser/bundled.js new file mode 100644 index 00000000..066ac409 --- /dev/null +++ b/node_modules/bowser/bundled.js @@ -0,0 +1 @@ +!function(t,n){"object"==typeof exports&&"object"==typeof module?module.exports=n():"function"==typeof define&&define.amd?define([],n):"object"==typeof exports?exports.bowser=n():t.bowser=n()}(this,(function(){return function(t){var n={};function e(r){if(n[r])return n[r].exports;var i=n[r]={i:r,l:!1,exports:{}};return t[r].call(i.exports,i,i.exports,e),i.l=!0,i.exports}return e.m=t,e.c=n,e.d=function(t,n,r){e.o(t,n)||Object.defineProperty(t,n,{enumerable:!0,get:r})},e.r=function(t){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},e.t=function(t,n){if(1&n&&(t=e(t)),8&n)return t;if(4&n&&"object"==typeof t&&t&&t.__esModule)return t;var r=Object.create(null);if(e.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:t}),2&n&&"string"!=typeof t)for(var i in t)e.d(r,i,function(n){return t[n]}.bind(null,i));return r},e.n=function(t){var n=t&&t.__esModule?function(){return t.default}:function(){return t};return e.d(n,"a",n),n},e.o=function(t,n){return Object.prototype.hasOwnProperty.call(t,n)},e.p="",e(e.s=129)}([function(t,n,e){var r=e(1),i=e(7),o=e(14),u=e(11),a=e(19),c=function(t,n,e){var s,f,l,h,d=t&c.F,p=t&c.G,v=t&c.S,g=t&c.P,y=t&c.B,m=p?r:v?r[n]||(r[n]={}):(r[n]||{}).prototype,b=p?i:i[n]||(i[n]={}),S=b.prototype||(b.prototype={});for(s in p&&(e=n),e)l=((f=!d&&m&&void 0!==m[s])?m:e)[s],h=y&&f?a(l,r):g&&"function"==typeof l?a(Function.call,l):l,m&&u(m,s,l,t&c.U),b[s]!=l&&o(b,s,h),g&&S[s]!=l&&(S[s]=l)};r.core=i,c.F=1,c.G=2,c.S=4,c.P=8,c.B=16,c.W=32,c.U=64,c.R=128,t.exports=c},function(t,n){var e=t.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=e)},function(t,n){t.exports=function(t){try{return!!t()}catch(t){return!0}}},function(t,n,e){var r=e(4);t.exports=function(t){if(!r(t))throw TypeError(t+" is not an object!");return t}},function(t,n){t.exports=function(t){return"object"==typeof t?null!==t:"function"==typeof t}},function(t,n,e){var r=e(50)("wks"),i=e(31),o=e(1).Symbol,u="function"==typeof o;(t.exports=function(t){return r[t]||(r[t]=u&&o[t]||(u?o:i)("Symbol."+t))}).store=r},function(t,n,e){var r=e(21),i=Math.min;t.exports=function(t){return t>0?i(r(t),9007199254740991):0}},function(t,n){var e=t.exports={version:"2.6.9"};"number"==typeof __e&&(__e=e)},function(t,n,e){t.exports=!e(2)((function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a}))},function(t,n,e){var r=e(3),i=e(96),o=e(28),u=Object.defineProperty;n.f=e(8)?Object.defineProperty:function(t,n,e){if(r(t),n=o(n,!0),r(e),i)try{return u(t,n,e)}catch(t){}if("get"in e||"set"in e)throw TypeError("Accessors not supported!");return"value"in e&&(t[n]=e.value),t}},function(t,n,e){var r=e(26);t.exports=function(t){return Object(r(t))}},function(t,n,e){var r=e(1),i=e(14),o=e(13),u=e(31)("src"),a=e(134),c=(""+a).split("toString");e(7).inspectSource=function(t){return a.call(t)},(t.exports=function(t,n,e,a){var s="function"==typeof e;s&&(o(e,"name")||i(e,"name",n)),t[n]!==e&&(s&&(o(e,u)||i(e,u,t[n]?""+t[n]:c.join(String(n)))),t===r?t[n]=e:a?t[n]?t[n]=e:i(t,n,e):(delete t[n],i(t,n,e)))})(Function.prototype,"toString",(function(){return"function"==typeof this&&this[u]||a.call(this)}))},function(t,n,e){var r=e(0),i=e(2),o=e(26),u=/"/g,a=function(t,n,e,r){var i=String(o(t)),a="<"+n;return""!==e&&(a+=" "+e+'="'+String(r).replace(u,""")+'"'),a+">"+i+""};t.exports=function(t,n){var e={};e[t]=n(a),r(r.P+r.F*i((function(){var n=""[t]('"');return n!==n.toLowerCase()||n.split('"').length>3})),"String",e)}},function(t,n){var e={}.hasOwnProperty;t.exports=function(t,n){return e.call(t,n)}},function(t,n,e){var r=e(9),i=e(30);t.exports=e(8)?function(t,n,e){return r.f(t,n,i(1,e))}:function(t,n,e){return t[n]=e,t}},function(t,n,e){var r=e(46),i=e(26);t.exports=function(t){return r(i(t))}},function(t,n,e){"use strict";var r=e(2);t.exports=function(t,n){return!!t&&r((function(){n?t.call(null,(function(){}),1):t.call(null)}))}},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r=e(18),i=function(){function t(){}return t.getFirstMatch=function(t,n){var e=n.match(t);return e&&e.length>0&&e[1]||""},t.getSecondMatch=function(t,n){var e=n.match(t);return e&&e.length>1&&e[2]||""},t.matchAndReturnConst=function(t,n,e){if(t.test(n))return e},t.getWindowsVersionName=function(t){switch(t){case"NT":return"NT";case"XP":return"XP";case"NT 5.0":return"2000";case"NT 5.1":return"XP";case"NT 5.2":return"2003";case"NT 6.0":return"Vista";case"NT 6.1":return"7";case"NT 6.2":return"8";case"NT 6.3":return"8.1";case"NT 10.0":return"10";default:return}},t.getMacOSVersionName=function(t){var n=t.split(".").splice(0,2).map((function(t){return parseInt(t,10)||0}));if(n.push(0),10===n[0])switch(n[1]){case 5:return"Leopard";case 6:return"Snow Leopard";case 7:return"Lion";case 8:return"Mountain Lion";case 9:return"Mavericks";case 10:return"Yosemite";case 11:return"El Capitan";case 12:return"Sierra";case 13:return"High Sierra";case 14:return"Mojave";case 15:return"Catalina";default:return}},t.getAndroidVersionName=function(t){var n=t.split(".").splice(0,2).map((function(t){return parseInt(t,10)||0}));if(n.push(0),!(1===n[0]&&n[1]<5))return 1===n[0]&&n[1]<6?"Cupcake":1===n[0]&&n[1]>=6?"Donut":2===n[0]&&n[1]<2?"Eclair":2===n[0]&&2===n[1]?"Froyo":2===n[0]&&n[1]>2?"Gingerbread":3===n[0]?"Honeycomb":4===n[0]&&n[1]<1?"Ice Cream Sandwich":4===n[0]&&n[1]<4?"Jelly Bean":4===n[0]&&n[1]>=4?"KitKat":5===n[0]?"Lollipop":6===n[0]?"Marshmallow":7===n[0]?"Nougat":8===n[0]?"Oreo":9===n[0]?"Pie":void 0},t.getVersionPrecision=function(t){return t.split(".").length},t.compareVersions=function(n,e,r){void 0===r&&(r=!1);var i=t.getVersionPrecision(n),o=t.getVersionPrecision(e),u=Math.max(i,o),a=0,c=t.map([n,e],(function(n){var e=u-t.getVersionPrecision(n),r=n+new Array(e+1).join(".0");return t.map(r.split("."),(function(t){return new Array(20-t.length).join("0")+t})).reverse()}));for(r&&(a=u-Math.min(i,o)),u-=1;u>=a;){if(c[0][u]>c[1][u])return 1;if(c[0][u]===c[1][u]){if(u===a)return 0;u-=1}else if(c[0][u]1?i-1:0),u=1;u0?r:e)(t)}},function(t,n,e){var r=e(47),i=e(30),o=e(15),u=e(28),a=e(13),c=e(96),s=Object.getOwnPropertyDescriptor;n.f=e(8)?s:function(t,n){if(t=o(t),n=u(n,!0),c)try{return s(t,n)}catch(t){}if(a(t,n))return i(!r.f.call(t,n),t[n])}},function(t,n,e){var r=e(0),i=e(7),o=e(2);t.exports=function(t,n){var e=(i.Object||{})[t]||Object[t],u={};u[t]=n(e),r(r.S+r.F*o((function(){e(1)})),"Object",u)}},function(t,n,e){var r=e(19),i=e(46),o=e(10),u=e(6),a=e(112);t.exports=function(t,n){var e=1==t,c=2==t,s=3==t,f=4==t,l=6==t,h=5==t||l,d=n||a;return function(n,a,p){for(var v,g,y=o(n),m=i(y),b=r(a,p,3),S=u(m.length),w=0,_=e?d(n,S):c?d(n,0):void 0;S>w;w++)if((h||w in m)&&(g=b(v=m[w],w,y),t))if(e)_[w]=g;else if(g)switch(t){case 3:return!0;case 5:return v;case 6:return w;case 2:_.push(v)}else if(f)return!1;return l?-1:s||f?f:_}}},function(t,n){var e={}.toString;t.exports=function(t){return e.call(t).slice(8,-1)}},function(t,n){t.exports=function(t){if(null==t)throw TypeError("Can't call method on "+t);return t}},function(t,n,e){"use strict";if(e(8)){var r=e(32),i=e(1),o=e(2),u=e(0),a=e(61),c=e(86),s=e(19),f=e(44),l=e(30),h=e(14),d=e(45),p=e(21),v=e(6),g=e(123),y=e(34),m=e(28),b=e(13),S=e(48),w=e(4),_=e(10),M=e(78),x=e(35),P=e(37),O=e(36).f,F=e(80),A=e(31),E=e(5),N=e(24),R=e(51),k=e(49),T=e(82),I=e(42),j=e(54),L=e(43),B=e(81),C=e(114),W=e(9),V=e(22),G=W.f,D=V.f,U=i.RangeError,z=i.TypeError,q=i.Uint8Array,K=Array.prototype,Y=c.ArrayBuffer,Q=c.DataView,H=N(0),J=N(2),X=N(3),Z=N(4),$=N(5),tt=N(6),nt=R(!0),et=R(!1),rt=T.values,it=T.keys,ot=T.entries,ut=K.lastIndexOf,at=K.reduce,ct=K.reduceRight,st=K.join,ft=K.sort,lt=K.slice,ht=K.toString,dt=K.toLocaleString,pt=E("iterator"),vt=E("toStringTag"),gt=A("typed_constructor"),yt=A("def_constructor"),mt=a.CONSTR,bt=a.TYPED,St=a.VIEW,wt=N(1,(function(t,n){return Ot(k(t,t[yt]),n)})),_t=o((function(){return 1===new q(new Uint16Array([1]).buffer)[0]})),Mt=!!q&&!!q.prototype.set&&o((function(){new q(1).set({})})),xt=function(t,n){var e=p(t);if(e<0||e%n)throw U("Wrong offset!");return e},Pt=function(t){if(w(t)&&bt in t)return t;throw z(t+" is not a typed array!")},Ot=function(t,n){if(!(w(t)&> in t))throw z("It is not a typed array constructor!");return new t(n)},Ft=function(t,n){return At(k(t,t[yt]),n)},At=function(t,n){for(var e=0,r=n.length,i=Ot(t,r);r>e;)i[e]=n[e++];return i},Et=function(t,n,e){G(t,n,{get:function(){return this._d[e]}})},Nt=function(t){var n,e,r,i,o,u,a=_(t),c=arguments.length,f=c>1?arguments[1]:void 0,l=void 0!==f,h=F(a);if(null!=h&&!M(h)){for(u=h.call(a),r=[],n=0;!(o=u.next()).done;n++)r.push(o.value);a=r}for(l&&c>2&&(f=s(f,arguments[2],2)),n=0,e=v(a.length),i=Ot(this,e);e>n;n++)i[n]=l?f(a[n],n):a[n];return i},Rt=function(){for(var t=0,n=arguments.length,e=Ot(this,n);n>t;)e[t]=arguments[t++];return e},kt=!!q&&o((function(){dt.call(new q(1))})),Tt=function(){return dt.apply(kt?lt.call(Pt(this)):Pt(this),arguments)},It={copyWithin:function(t,n){return C.call(Pt(this),t,n,arguments.length>2?arguments[2]:void 0)},every:function(t){return Z(Pt(this),t,arguments.length>1?arguments[1]:void 0)},fill:function(t){return B.apply(Pt(this),arguments)},filter:function(t){return Ft(this,J(Pt(this),t,arguments.length>1?arguments[1]:void 0))},find:function(t){return $(Pt(this),t,arguments.length>1?arguments[1]:void 0)},findIndex:function(t){return tt(Pt(this),t,arguments.length>1?arguments[1]:void 0)},forEach:function(t){H(Pt(this),t,arguments.length>1?arguments[1]:void 0)},indexOf:function(t){return et(Pt(this),t,arguments.length>1?arguments[1]:void 0)},includes:function(t){return nt(Pt(this),t,arguments.length>1?arguments[1]:void 0)},join:function(t){return st.apply(Pt(this),arguments)},lastIndexOf:function(t){return ut.apply(Pt(this),arguments)},map:function(t){return wt(Pt(this),t,arguments.length>1?arguments[1]:void 0)},reduce:function(t){return at.apply(Pt(this),arguments)},reduceRight:function(t){return ct.apply(Pt(this),arguments)},reverse:function(){for(var t,n=Pt(this).length,e=Math.floor(n/2),r=0;r1?arguments[1]:void 0)},sort:function(t){return ft.call(Pt(this),t)},subarray:function(t,n){var e=Pt(this),r=e.length,i=y(t,r);return new(k(e,e[yt]))(e.buffer,e.byteOffset+i*e.BYTES_PER_ELEMENT,v((void 0===n?r:y(n,r))-i))}},jt=function(t,n){return Ft(this,lt.call(Pt(this),t,n))},Lt=function(t){Pt(this);var n=xt(arguments[1],1),e=this.length,r=_(t),i=v(r.length),o=0;if(i+n>e)throw U("Wrong length!");for(;o255?255:255&r),i.v[d](e*n+i.o,r,_t)}(this,e,t)},enumerable:!0})};b?(p=e((function(t,e,r,i){f(t,p,s,"_d");var o,u,a,c,l=0,d=0;if(w(e)){if(!(e instanceof Y||"ArrayBuffer"==(c=S(e))||"SharedArrayBuffer"==c))return bt in e?At(p,e):Nt.call(p,e);o=e,d=xt(r,n);var y=e.byteLength;if(void 0===i){if(y%n)throw U("Wrong length!");if((u=y-d)<0)throw U("Wrong length!")}else if((u=v(i)*n)+d>y)throw U("Wrong length!");a=u/n}else a=g(e),o=new Y(u=a*n);for(h(t,"_d",{b:o,o:d,l:u,e:a,v:new Q(o)});ldocument.F=Object<\/script>"),t.close(),c=t.F;r--;)delete c.prototype[o[r]];return c()};t.exports=Object.create||function(t,n){var e;return null!==t?(a.prototype=r(t),e=new a,a.prototype=null,e[u]=t):e=c(),void 0===n?e:i(e,n)}},function(t,n,e){var r=e(98),i=e(65).concat("length","prototype");n.f=Object.getOwnPropertyNames||function(t){return r(t,i)}},function(t,n,e){var r=e(13),i=e(10),o=e(64)("IE_PROTO"),u=Object.prototype;t.exports=Object.getPrototypeOf||function(t){return t=i(t),r(t,o)?t[o]:"function"==typeof t.constructor&&t instanceof t.constructor?t.constructor.prototype:t instanceof Object?u:null}},function(t,n,e){var r=e(5)("unscopables"),i=Array.prototype;null==i[r]&&e(14)(i,r,{}),t.exports=function(t){i[r][t]=!0}},function(t,n,e){var r=e(4);t.exports=function(t,n){if(!r(t)||t._t!==n)throw TypeError("Incompatible receiver, "+n+" required!");return t}},function(t,n,e){var r=e(9).f,i=e(13),o=e(5)("toStringTag");t.exports=function(t,n,e){t&&!i(t=e?t:t.prototype,o)&&r(t,o,{configurable:!0,value:n})}},function(t,n,e){var r=e(0),i=e(26),o=e(2),u=e(68),a="["+u+"]",c=RegExp("^"+a+a+"*"),s=RegExp(a+a+"*$"),f=function(t,n,e){var i={},a=o((function(){return!!u[t]()||"​…"!="​…"[t]()})),c=i[t]=a?n(l):u[t];e&&(i[e]=c),r(r.P+r.F*a,"String",i)},l=f.trim=function(t,n){return t=String(i(t)),1&n&&(t=t.replace(c,"")),2&n&&(t=t.replace(s,"")),t};t.exports=f},function(t,n){t.exports={}},function(t,n,e){"use strict";var r=e(1),i=e(9),o=e(8),u=e(5)("species");t.exports=function(t){var n=r[t];o&&n&&!n[u]&&i.f(n,u,{configurable:!0,get:function(){return this}})}},function(t,n){t.exports=function(t,n,e,r){if(!(t instanceof n)||void 0!==r&&r in t)throw TypeError(e+": incorrect invocation!");return t}},function(t,n,e){var r=e(11);t.exports=function(t,n,e){for(var i in n)r(t,i,n[i],e);return t}},function(t,n,e){var r=e(25);t.exports=Object("z").propertyIsEnumerable(0)?Object:function(t){return"String"==r(t)?t.split(""):Object(t)}},function(t,n){n.f={}.propertyIsEnumerable},function(t,n,e){var r=e(25),i=e(5)("toStringTag"),o="Arguments"==r(function(){return arguments}());t.exports=function(t){var n,e,u;return void 0===t?"Undefined":null===t?"Null":"string"==typeof(e=function(t,n){try{return t[n]}catch(t){}}(n=Object(t),i))?e:o?r(n):"Object"==(u=r(n))&&"function"==typeof n.callee?"Arguments":u}},function(t,n,e){var r=e(3),i=e(20),o=e(5)("species");t.exports=function(t,n){var e,u=r(t).constructor;return void 0===u||null==(e=r(u)[o])?n:i(e)}},function(t,n,e){var r=e(7),i=e(1),o=i["__core-js_shared__"]||(i["__core-js_shared__"]={});(t.exports=function(t,n){return o[t]||(o[t]=void 0!==n?n:{})})("versions",[]).push({version:r.version,mode:e(32)?"pure":"global",copyright:"© 2019 Denis Pushkarev (zloirock.ru)"})},function(t,n,e){var r=e(15),i=e(6),o=e(34);t.exports=function(t){return function(n,e,u){var a,c=r(n),s=i(c.length),f=o(u,s);if(t&&e!=e){for(;s>f;)if((a=c[f++])!=a)return!0}else for(;s>f;f++)if((t||f in c)&&c[f]===e)return t||f||0;return!t&&-1}}},function(t,n){n.f=Object.getOwnPropertySymbols},function(t,n,e){var r=e(25);t.exports=Array.isArray||function(t){return"Array"==r(t)}},function(t,n,e){var r=e(5)("iterator"),i=!1;try{var o=[7][r]();o.return=function(){i=!0},Array.from(o,(function(){throw 2}))}catch(t){}t.exports=function(t,n){if(!n&&!i)return!1;var e=!1;try{var o=[7],u=o[r]();u.next=function(){return{done:e=!0}},o[r]=function(){return u},t(o)}catch(t){}return e}},function(t,n,e){"use strict";var r=e(3);t.exports=function(){var t=r(this),n="";return t.global&&(n+="g"),t.ignoreCase&&(n+="i"),t.multiline&&(n+="m"),t.unicode&&(n+="u"),t.sticky&&(n+="y"),n}},function(t,n,e){"use strict";var r=e(48),i=RegExp.prototype.exec;t.exports=function(t,n){var e=t.exec;if("function"==typeof e){var o=e.call(t,n);if("object"!=typeof o)throw new TypeError("RegExp exec method returned something other than an Object or null");return o}if("RegExp"!==r(t))throw new TypeError("RegExp#exec called on incompatible receiver");return i.call(t,n)}},function(t,n,e){"use strict";e(116);var r=e(11),i=e(14),o=e(2),u=e(26),a=e(5),c=e(83),s=a("species"),f=!o((function(){var t=/./;return t.exec=function(){var t=[];return t.groups={a:"7"},t},"7"!=="".replace(t,"$")})),l=function(){var t=/(?:)/,n=t.exec;t.exec=function(){return n.apply(this,arguments)};var e="ab".split(t);return 2===e.length&&"a"===e[0]&&"b"===e[1]}();t.exports=function(t,n,e){var h=a(t),d=!o((function(){var n={};return n[h]=function(){return 7},7!=""[t](n)})),p=d?!o((function(){var n=!1,e=/a/;return e.exec=function(){return n=!0,null},"split"===t&&(e.constructor={},e.constructor[s]=function(){return e}),e[h](""),!n})):void 0;if(!d||!p||"replace"===t&&!f||"split"===t&&!l){var v=/./[h],g=e(u,h,""[t],(function(t,n,e,r,i){return n.exec===c?d&&!i?{done:!0,value:v.call(n,e,r)}:{done:!0,value:t.call(e,n,r)}:{done:!1}})),y=g[0],m=g[1];r(String.prototype,t,y),i(RegExp.prototype,h,2==n?function(t,n){return m.call(t,this,n)}:function(t){return m.call(t,this)})}}},function(t,n,e){var r=e(19),i=e(111),o=e(78),u=e(3),a=e(6),c=e(80),s={},f={};(n=t.exports=function(t,n,e,l,h){var d,p,v,g,y=h?function(){return t}:c(t),m=r(e,l,n?2:1),b=0;if("function"!=typeof y)throw TypeError(t+" is not iterable!");if(o(y)){for(d=a(t.length);d>b;b++)if((g=n?m(u(p=t[b])[0],p[1]):m(t[b]))===s||g===f)return g}else for(v=y.call(t);!(p=v.next()).done;)if((g=i(v,m,p.value,n))===s||g===f)return g}).BREAK=s,n.RETURN=f},function(t,n,e){var r=e(1).navigator;t.exports=r&&r.userAgent||""},function(t,n,e){"use strict";var r=e(1),i=e(0),o=e(11),u=e(45),a=e(29),c=e(58),s=e(44),f=e(4),l=e(2),h=e(54),d=e(40),p=e(69);t.exports=function(t,n,e,v,g,y){var m=r[t],b=m,S=g?"set":"add",w=b&&b.prototype,_={},M=function(t){var n=w[t];o(w,t,"delete"==t?function(t){return!(y&&!f(t))&&n.call(this,0===t?0:t)}:"has"==t?function(t){return!(y&&!f(t))&&n.call(this,0===t?0:t)}:"get"==t?function(t){return y&&!f(t)?void 0:n.call(this,0===t?0:t)}:"add"==t?function(t){return n.call(this,0===t?0:t),this}:function(t,e){return n.call(this,0===t?0:t,e),this})};if("function"==typeof b&&(y||w.forEach&&!l((function(){(new b).entries().next()})))){var x=new b,P=x[S](y?{}:-0,1)!=x,O=l((function(){x.has(1)})),F=h((function(t){new b(t)})),A=!y&&l((function(){for(var t=new b,n=5;n--;)t[S](n,n);return!t.has(-0)}));F||((b=n((function(n,e){s(n,b,t);var r=p(new m,n,b);return null!=e&&c(e,g,r[S],r),r}))).prototype=w,w.constructor=b),(O||A)&&(M("delete"),M("has"),g&&M("get")),(A||P)&&M(S),y&&w.clear&&delete w.clear}else b=v.getConstructor(n,t,g,S),u(b.prototype,e),a.NEED=!0;return d(b,t),_[t]=b,i(i.G+i.W+i.F*(b!=m),_),y||v.setStrong(b,t,g),b}},function(t,n,e){for(var r,i=e(1),o=e(14),u=e(31),a=u("typed_array"),c=u("view"),s=!(!i.ArrayBuffer||!i.DataView),f=s,l=0,h="Int8Array,Uint8Array,Uint8ClampedArray,Int16Array,Uint16Array,Int32Array,Uint32Array,Float32Array,Float64Array".split(",");l<9;)(r=i[h[l++]])?(o(r.prototype,a,!0),o(r.prototype,c,!0)):f=!1;t.exports={ABV:s,CONSTR:f,TYPED:a,VIEW:c}},function(t,n,e){var r=e(4),i=e(1).document,o=r(i)&&r(i.createElement);t.exports=function(t){return o?i.createElement(t):{}}},function(t,n,e){n.f=e(5)},function(t,n,e){var r=e(50)("keys"),i=e(31);t.exports=function(t){return r[t]||(r[t]=i(t))}},function(t,n){t.exports="constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf".split(",")},function(t,n,e){var r=e(1).document;t.exports=r&&r.documentElement},function(t,n,e){var r=e(4),i=e(3),o=function(t,n){if(i(t),!r(n)&&null!==n)throw TypeError(n+": can't set as prototype!")};t.exports={set:Object.setPrototypeOf||("__proto__"in{}?function(t,n,r){try{(r=e(19)(Function.call,e(22).f(Object.prototype,"__proto__").set,2))(t,[]),n=!(t instanceof Array)}catch(t){n=!0}return function(t,e){return o(t,e),n?t.__proto__=e:r(t,e),t}}({},!1):void 0),check:o}},function(t,n){t.exports="\t\n\v\f\r   ᠎              \u2028\u2029\ufeff"},function(t,n,e){var r=e(4),i=e(67).set;t.exports=function(t,n,e){var o,u=n.constructor;return u!==e&&"function"==typeof u&&(o=u.prototype)!==e.prototype&&r(o)&&i&&i(t,o),t}},function(t,n,e){"use strict";var r=e(21),i=e(26);t.exports=function(t){var n=String(i(this)),e="",o=r(t);if(o<0||o==1/0)throw RangeError("Count can't be negative");for(;o>0;(o>>>=1)&&(n+=n))1&o&&(e+=n);return e}},function(t,n){t.exports=Math.sign||function(t){return 0==(t=+t)||t!=t?t:t<0?-1:1}},function(t,n){var e=Math.expm1;t.exports=!e||e(10)>22025.465794806718||e(10)<22025.465794806718||-2e-17!=e(-2e-17)?function(t){return 0==(t=+t)?t:t>-1e-6&&t<1e-6?t+t*t/2:Math.exp(t)-1}:e},function(t,n,e){var r=e(21),i=e(26);t.exports=function(t){return function(n,e){var o,u,a=String(i(n)),c=r(e),s=a.length;return c<0||c>=s?t?"":void 0:(o=a.charCodeAt(c))<55296||o>56319||c+1===s||(u=a.charCodeAt(c+1))<56320||u>57343?t?a.charAt(c):o:t?a.slice(c,c+2):u-56320+(o-55296<<10)+65536}}},function(t,n,e){"use strict";var r=e(32),i=e(0),o=e(11),u=e(14),a=e(42),c=e(110),s=e(40),f=e(37),l=e(5)("iterator"),h=!([].keys&&"next"in[].keys()),d=function(){return this};t.exports=function(t,n,e,p,v,g,y){c(e,n,p);var m,b,S,w=function(t){if(!h&&t in P)return P[t];switch(t){case"keys":case"values":return function(){return new e(this,t)}}return function(){return new e(this,t)}},_=n+" Iterator",M="values"==v,x=!1,P=t.prototype,O=P[l]||P["@@iterator"]||v&&P[v],F=O||w(v),A=v?M?w("entries"):F:void 0,E="Array"==n&&P.entries||O;if(E&&(S=f(E.call(new t)))!==Object.prototype&&S.next&&(s(S,_,!0),r||"function"==typeof S[l]||u(S,l,d)),M&&O&&"values"!==O.name&&(x=!0,F=function(){return O.call(this)}),r&&!y||!h&&!x&&P[l]||u(P,l,F),a[n]=F,a[_]=d,v)if(m={values:M?F:w("values"),keys:g?F:w("keys"),entries:A},y)for(b in m)b in P||o(P,b,m[b]);else i(i.P+i.F*(h||x),n,m);return m}},function(t,n,e){var r=e(76),i=e(26);t.exports=function(t,n,e){if(r(n))throw TypeError("String#"+e+" doesn't accept regex!");return String(i(t))}},function(t,n,e){var r=e(4),i=e(25),o=e(5)("match");t.exports=function(t){var n;return r(t)&&(void 0!==(n=t[o])?!!n:"RegExp"==i(t))}},function(t,n,e){var r=e(5)("match");t.exports=function(t){var n=/./;try{"/./"[t](n)}catch(e){try{return n[r]=!1,!"/./"[t](n)}catch(t){}}return!0}},function(t,n,e){var r=e(42),i=e(5)("iterator"),o=Array.prototype;t.exports=function(t){return void 0!==t&&(r.Array===t||o[i]===t)}},function(t,n,e){"use strict";var r=e(9),i=e(30);t.exports=function(t,n,e){n in t?r.f(t,n,i(0,e)):t[n]=e}},function(t,n,e){var r=e(48),i=e(5)("iterator"),o=e(42);t.exports=e(7).getIteratorMethod=function(t){if(null!=t)return t[i]||t["@@iterator"]||o[r(t)]}},function(t,n,e){"use strict";var r=e(10),i=e(34),o=e(6);t.exports=function(t){for(var n=r(this),e=o(n.length),u=arguments.length,a=i(u>1?arguments[1]:void 0,e),c=u>2?arguments[2]:void 0,s=void 0===c?e:i(c,e);s>a;)n[a++]=t;return n}},function(t,n,e){"use strict";var r=e(38),i=e(115),o=e(42),u=e(15);t.exports=e(74)(Array,"Array",(function(t,n){this._t=u(t),this._i=0,this._k=n}),(function(){var t=this._t,n=this._k,e=this._i++;return!t||e>=t.length?(this._t=void 0,i(1)):i(0,"keys"==n?e:"values"==n?t[e]:[e,t[e]])}),"values"),o.Arguments=o.Array,r("keys"),r("values"),r("entries")},function(t,n,e){"use strict";var r,i,o=e(55),u=RegExp.prototype.exec,a=String.prototype.replace,c=u,s=(r=/a/,i=/b*/g,u.call(r,"a"),u.call(i,"a"),0!==r.lastIndex||0!==i.lastIndex),f=void 0!==/()??/.exec("")[1];(s||f)&&(c=function(t){var n,e,r,i,c=this;return f&&(e=new RegExp("^"+c.source+"$(?!\\s)",o.call(c))),s&&(n=c.lastIndex),r=u.call(c,t),s&&r&&(c.lastIndex=c.global?r.index+r[0].length:n),f&&r&&r.length>1&&a.call(r[0],e,(function(){for(i=1;ie;)n.push(arguments[e++]);return y[++g]=function(){a("function"==typeof t?t:Function(t),n)},r(g),g},d=function(t){delete y[t]},"process"==e(25)(l)?r=function(t){l.nextTick(u(m,t,1))}:v&&v.now?r=function(t){v.now(u(m,t,1))}:p?(o=(i=new p).port2,i.port1.onmessage=b,r=u(o.postMessage,o,1)):f.addEventListener&&"function"==typeof postMessage&&!f.importScripts?(r=function(t){f.postMessage(t+"","*")},f.addEventListener("message",b,!1)):r="onreadystatechange"in s("script")?function(t){c.appendChild(s("script")).onreadystatechange=function(){c.removeChild(this),m.call(t)}}:function(t){setTimeout(u(m,t,1),0)}),t.exports={set:h,clear:d}},function(t,n,e){"use strict";var r=e(1),i=e(8),o=e(32),u=e(61),a=e(14),c=e(45),s=e(2),f=e(44),l=e(21),h=e(6),d=e(123),p=e(36).f,v=e(9).f,g=e(81),y=e(40),m="prototype",b="Wrong index!",S=r.ArrayBuffer,w=r.DataView,_=r.Math,M=r.RangeError,x=r.Infinity,P=S,O=_.abs,F=_.pow,A=_.floor,E=_.log,N=_.LN2,R=i?"_b":"buffer",k=i?"_l":"byteLength",T=i?"_o":"byteOffset";function I(t,n,e){var r,i,o,u=new Array(e),a=8*e-n-1,c=(1<>1,f=23===n?F(2,-24)-F(2,-77):0,l=0,h=t<0||0===t&&1/t<0?1:0;for((t=O(t))!=t||t===x?(i=t!=t?1:0,r=c):(r=A(E(t)/N),t*(o=F(2,-r))<1&&(r--,o*=2),(t+=r+s>=1?f/o:f*F(2,1-s))*o>=2&&(r++,o/=2),r+s>=c?(i=0,r=c):r+s>=1?(i=(t*o-1)*F(2,n),r+=s):(i=t*F(2,s-1)*F(2,n),r=0));n>=8;u[l++]=255&i,i/=256,n-=8);for(r=r<0;u[l++]=255&r,r/=256,a-=8);return u[--l]|=128*h,u}function j(t,n,e){var r,i=8*e-n-1,o=(1<>1,a=i-7,c=e-1,s=t[c--],f=127&s;for(s>>=7;a>0;f=256*f+t[c],c--,a-=8);for(r=f&(1<<-a)-1,f>>=-a,a+=n;a>0;r=256*r+t[c],c--,a-=8);if(0===f)f=1-u;else{if(f===o)return r?NaN:s?-x:x;r+=F(2,n),f-=u}return(s?-1:1)*r*F(2,f-n)}function L(t){return t[3]<<24|t[2]<<16|t[1]<<8|t[0]}function B(t){return[255&t]}function C(t){return[255&t,t>>8&255]}function W(t){return[255&t,t>>8&255,t>>16&255,t>>24&255]}function V(t){return I(t,52,8)}function G(t){return I(t,23,4)}function D(t,n,e){v(t[m],n,{get:function(){return this[e]}})}function U(t,n,e,r){var i=d(+e);if(i+n>t[k])throw M(b);var o=t[R]._b,u=i+t[T],a=o.slice(u,u+n);return r?a:a.reverse()}function z(t,n,e,r,i,o){var u=d(+e);if(u+n>t[k])throw M(b);for(var a=t[R]._b,c=u+t[T],s=r(+i),f=0;fQ;)(q=Y[Q++])in S||a(S,q,P[q]);o||(K.constructor=S)}var H=new w(new S(2)),J=w[m].setInt8;H.setInt8(0,2147483648),H.setInt8(1,2147483649),!H.getInt8(0)&&H.getInt8(1)||c(w[m],{setInt8:function(t,n){J.call(this,t,n<<24>>24)},setUint8:function(t,n){J.call(this,t,n<<24>>24)}},!0)}else S=function(t){f(this,S,"ArrayBuffer");var n=d(t);this._b=g.call(new Array(n),0),this[k]=n},w=function(t,n,e){f(this,w,"DataView"),f(t,S,"DataView");var r=t[k],i=l(n);if(i<0||i>r)throw M("Wrong offset!");if(i+(e=void 0===e?r-i:h(e))>r)throw M("Wrong length!");this[R]=t,this[T]=i,this[k]=e},i&&(D(S,"byteLength","_l"),D(w,"buffer","_b"),D(w,"byteLength","_l"),D(w,"byteOffset","_o")),c(w[m],{getInt8:function(t){return U(this,1,t)[0]<<24>>24},getUint8:function(t){return U(this,1,t)[0]},getInt16:function(t){var n=U(this,2,t,arguments[1]);return(n[1]<<8|n[0])<<16>>16},getUint16:function(t){var n=U(this,2,t,arguments[1]);return n[1]<<8|n[0]},getInt32:function(t){return L(U(this,4,t,arguments[1]))},getUint32:function(t){return L(U(this,4,t,arguments[1]))>>>0},getFloat32:function(t){return j(U(this,4,t,arguments[1]),23,4)},getFloat64:function(t){return j(U(this,8,t,arguments[1]),52,8)},setInt8:function(t,n){z(this,1,t,B,n)},setUint8:function(t,n){z(this,1,t,B,n)},setInt16:function(t,n){z(this,2,t,C,n,arguments[2])},setUint16:function(t,n){z(this,2,t,C,n,arguments[2])},setInt32:function(t,n){z(this,4,t,W,n,arguments[2])},setUint32:function(t,n){z(this,4,t,W,n,arguments[2])},setFloat32:function(t,n){z(this,4,t,G,n,arguments[2])},setFloat64:function(t,n){z(this,8,t,V,n,arguments[2])}});y(S,"ArrayBuffer"),y(w,"DataView"),a(w[m],u.VIEW,!0),n.ArrayBuffer=S,n.DataView=w},function(t,n){var e=t.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=e)},function(t,n){t.exports=function(t){return"object"==typeof t?null!==t:"function"==typeof t}},function(t,n,e){t.exports=!e(128)((function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a}))},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(91))&&r.__esModule?r:{default:r},o=e(18);function u(t,n){for(var e=0;e0){var u=Object.keys(e),c=a.default.find(u,(function(t){return n.isOS(t)}));if(c){var s=this.satisfies(e[c]);if(void 0!==s)return s}var f=a.default.find(u,(function(t){return n.isPlatform(t)}));if(f){var l=this.satisfies(e[f]);if(void 0!==l)return l}}if(o>0){var h=Object.keys(i),d=a.default.find(h,(function(t){return n.isBrowser(t,!0)}));if(void 0!==d)return this.compareVersion(i[d])}},n.isBrowser=function(t,n){void 0===n&&(n=!1);var e=this.getBrowserName().toLowerCase(),r=t.toLowerCase(),i=a.default.getBrowserTypeByAlias(r);return n&&i&&(r=i.toLowerCase()),r===e},n.compareVersion=function(t){var n=[0],e=t,r=!1,i=this.getBrowserVersion();if("string"==typeof i)return">"===t[0]||"<"===t[0]?(e=t.substr(1),"="===t[1]?(r=!0,e=t.substr(2)):n=[],">"===t[0]?n.push(1):n.push(-1)):"="===t[0]?e=t.substr(1):"~"===t[0]&&(r=!0,e=t.substr(1)),n.indexOf(a.default.compareVersions(i,e,r))>-1},n.isOS=function(t){return this.getOSName(!0)===String(t).toLowerCase()},n.isPlatform=function(t){return this.getPlatformType(!0)===String(t).toLowerCase()},n.isEngine=function(t){return this.getEngineName(!0)===String(t).toLowerCase()},n.is=function(t,n){return void 0===n&&(n=!1),this.isBrowser(t,n)||this.isOS(t)||this.isPlatform(t)},n.some=function(t){var n=this;return void 0===t&&(t=[]),t.some((function(t){return n.is(t)}))},t}();n.default=s,t.exports=n.default},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(17))&&r.__esModule?r:{default:r};var o=/version\/(\d+(\.?_?\d+)+)/i,u=[{test:[/googlebot/i],describe:function(t){var n={name:"Googlebot"},e=i.default.getFirstMatch(/googlebot\/(\d+(\.\d+))/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/opera/i],describe:function(t){var n={name:"Opera"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:opera)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/opr\/|opios/i],describe:function(t){var n={name:"Opera"},e=i.default.getFirstMatch(/(?:opr|opios)[\s/](\S+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/SamsungBrowser/i],describe:function(t){var n={name:"Samsung Internet for Android"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:SamsungBrowser)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/Whale/i],describe:function(t){var n={name:"NAVER Whale Browser"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:whale)[\s/](\d+(?:\.\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/MZBrowser/i],describe:function(t){var n={name:"MZ Browser"},e=i.default.getFirstMatch(/(?:MZBrowser)[\s/](\d+(?:\.\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/focus/i],describe:function(t){var n={name:"Focus"},e=i.default.getFirstMatch(/(?:focus)[\s/](\d+(?:\.\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/swing/i],describe:function(t){var n={name:"Swing"},e=i.default.getFirstMatch(/(?:swing)[\s/](\d+(?:\.\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/coast/i],describe:function(t){var n={name:"Opera Coast"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:coast)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/opt\/\d+(?:.?_?\d+)+/i],describe:function(t){var n={name:"Opera Touch"},e=i.default.getFirstMatch(/(?:opt)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/yabrowser/i],describe:function(t){var n={name:"Yandex Browser"},e=i.default.getFirstMatch(/(?:yabrowser)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/ucbrowser/i],describe:function(t){var n={name:"UC Browser"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:ucbrowser)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/Maxthon|mxios/i],describe:function(t){var n={name:"Maxthon"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:Maxthon|mxios)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/epiphany/i],describe:function(t){var n={name:"Epiphany"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:epiphany)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/puffin/i],describe:function(t){var n={name:"Puffin"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:puffin)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/sleipnir/i],describe:function(t){var n={name:"Sleipnir"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:sleipnir)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/k-meleon/i],describe:function(t){var n={name:"K-Meleon"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:k-meleon)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/micromessenger/i],describe:function(t){var n={name:"WeChat"},e=i.default.getFirstMatch(/(?:micromessenger)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/qqbrowser/i],describe:function(t){var n={name:/qqbrowserlite/i.test(t)?"QQ Browser Lite":"QQ Browser"},e=i.default.getFirstMatch(/(?:qqbrowserlite|qqbrowser)[/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/msie|trident/i],describe:function(t){var n={name:"Internet Explorer"},e=i.default.getFirstMatch(/(?:msie |rv:)(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/\sedg\//i],describe:function(t){var n={name:"Microsoft Edge"},e=i.default.getFirstMatch(/\sedg\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/edg([ea]|ios)/i],describe:function(t){var n={name:"Microsoft Edge"},e=i.default.getSecondMatch(/edg([ea]|ios)\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/vivaldi/i],describe:function(t){var n={name:"Vivaldi"},e=i.default.getFirstMatch(/vivaldi\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/seamonkey/i],describe:function(t){var n={name:"SeaMonkey"},e=i.default.getFirstMatch(/seamonkey\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/sailfish/i],describe:function(t){var n={name:"Sailfish"},e=i.default.getFirstMatch(/sailfish\s?browser\/(\d+(\.\d+)?)/i,t);return e&&(n.version=e),n}},{test:[/silk/i],describe:function(t){var n={name:"Amazon Silk"},e=i.default.getFirstMatch(/silk\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/phantom/i],describe:function(t){var n={name:"PhantomJS"},e=i.default.getFirstMatch(/phantomjs\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/slimerjs/i],describe:function(t){var n={name:"SlimerJS"},e=i.default.getFirstMatch(/slimerjs\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/blackberry|\bbb\d+/i,/rim\stablet/i],describe:function(t){var n={name:"BlackBerry"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/blackberry[\d]+\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/(web|hpw)[o0]s/i],describe:function(t){var n={name:"WebOS Browser"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/w(?:eb)?[o0]sbrowser\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/bada/i],describe:function(t){var n={name:"Bada"},e=i.default.getFirstMatch(/dolfin\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/tizen/i],describe:function(t){var n={name:"Tizen"},e=i.default.getFirstMatch(/(?:tizen\s?)?browser\/(\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/qupzilla/i],describe:function(t){var n={name:"QupZilla"},e=i.default.getFirstMatch(/(?:qupzilla)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/firefox|iceweasel|fxios/i],describe:function(t){var n={name:"Firefox"},e=i.default.getFirstMatch(/(?:firefox|iceweasel|fxios)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/electron/i],describe:function(t){var n={name:"Electron"},e=i.default.getFirstMatch(/(?:electron)\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/MiuiBrowser/i],describe:function(t){var n={name:"Miui"},e=i.default.getFirstMatch(/(?:MiuiBrowser)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/chromium/i],describe:function(t){var n={name:"Chromium"},e=i.default.getFirstMatch(/(?:chromium)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/chrome|crios|crmo/i],describe:function(t){var n={name:"Chrome"},e=i.default.getFirstMatch(/(?:chrome|crios|crmo)\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/GSA/i],describe:function(t){var n={name:"Google Search"},e=i.default.getFirstMatch(/(?:GSA)\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:function(t){var n=!t.test(/like android/i),e=t.test(/android/i);return n&&e},describe:function(t){var n={name:"Android Browser"},e=i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/playstation 4/i],describe:function(t){var n={name:"PlayStation 4"},e=i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/safari|applewebkit/i],describe:function(t){var n={name:"Safari"},e=i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/.*/i],describe:function(t){var n=-1!==t.search("\\(")?/^(.*)\/(.*)[ \t]\((.*)/:/^(.*)\/(.*) /;return{name:i.default.getFirstMatch(n,t),version:i.default.getSecondMatch(n,t)}}}];n.default=u,t.exports=n.default},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(17))&&r.__esModule?r:{default:r},o=e(18);var u=[{test:[/Roku\/DVP/],describe:function(t){var n=i.default.getFirstMatch(/Roku\/DVP-(\d+\.\d+)/i,t);return{name:o.OS_MAP.Roku,version:n}}},{test:[/windows phone/i],describe:function(t){var n=i.default.getFirstMatch(/windows phone (?:os)?\s?(\d+(\.\d+)*)/i,t);return{name:o.OS_MAP.WindowsPhone,version:n}}},{test:[/windows /i],describe:function(t){var n=i.default.getFirstMatch(/Windows ((NT|XP)( \d\d?.\d)?)/i,t),e=i.default.getWindowsVersionName(n);return{name:o.OS_MAP.Windows,version:n,versionName:e}}},{test:[/Macintosh(.*?) FxiOS(.*?)\//],describe:function(t){var n={name:o.OS_MAP.iOS},e=i.default.getSecondMatch(/(Version\/)(\d[\d.]+)/,t);return e&&(n.version=e),n}},{test:[/macintosh/i],describe:function(t){var n=i.default.getFirstMatch(/mac os x (\d+(\.?_?\d+)+)/i,t).replace(/[_\s]/g,"."),e=i.default.getMacOSVersionName(n),r={name:o.OS_MAP.MacOS,version:n};return e&&(r.versionName=e),r}},{test:[/(ipod|iphone|ipad)/i],describe:function(t){var n=i.default.getFirstMatch(/os (\d+([_\s]\d+)*) like mac os x/i,t).replace(/[_\s]/g,".");return{name:o.OS_MAP.iOS,version:n}}},{test:function(t){var n=!t.test(/like android/i),e=t.test(/android/i);return n&&e},describe:function(t){var n=i.default.getFirstMatch(/android[\s/-](\d+(\.\d+)*)/i,t),e=i.default.getAndroidVersionName(n),r={name:o.OS_MAP.Android,version:n};return e&&(r.versionName=e),r}},{test:[/(web|hpw)[o0]s/i],describe:function(t){var n=i.default.getFirstMatch(/(?:web|hpw)[o0]s\/(\d+(\.\d+)*)/i,t),e={name:o.OS_MAP.WebOS};return n&&n.length&&(e.version=n),e}},{test:[/blackberry|\bbb\d+/i,/rim\stablet/i],describe:function(t){var n=i.default.getFirstMatch(/rim\stablet\sos\s(\d+(\.\d+)*)/i,t)||i.default.getFirstMatch(/blackberry\d+\/(\d+([_\s]\d+)*)/i,t)||i.default.getFirstMatch(/\bbb(\d+)/i,t);return{name:o.OS_MAP.BlackBerry,version:n}}},{test:[/bada/i],describe:function(t){var n=i.default.getFirstMatch(/bada\/(\d+(\.\d+)*)/i,t);return{name:o.OS_MAP.Bada,version:n}}},{test:[/tizen/i],describe:function(t){var n=i.default.getFirstMatch(/tizen[/\s](\d+(\.\d+)*)/i,t);return{name:o.OS_MAP.Tizen,version:n}}},{test:[/linux/i],describe:function(){return{name:o.OS_MAP.Linux}}},{test:[/CrOS/],describe:function(){return{name:o.OS_MAP.ChromeOS}}},{test:[/PlayStation 4/],describe:function(t){var n=i.default.getFirstMatch(/PlayStation 4[/\s](\d+(\.\d+)*)/i,t);return{name:o.OS_MAP.PlayStation4,version:n}}}];n.default=u,t.exports=n.default},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(17))&&r.__esModule?r:{default:r},o=e(18);var u=[{test:[/googlebot/i],describe:function(){return{type:"bot",vendor:"Google"}}},{test:[/huawei/i],describe:function(t){var n=i.default.getFirstMatch(/(can-l01)/i,t)&&"Nova",e={type:o.PLATFORMS_MAP.mobile,vendor:"Huawei"};return n&&(e.model=n),e}},{test:[/nexus\s*(?:7|8|9|10).*/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Nexus"}}},{test:[/ipad/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Apple",model:"iPad"}}},{test:[/Macintosh(.*?) FxiOS(.*?)\//],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Apple",model:"iPad"}}},{test:[/kftt build/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Amazon",model:"Kindle Fire HD 7"}}},{test:[/silk/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Amazon"}}},{test:[/tablet(?! pc)/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet}}},{test:function(t){var n=t.test(/ipod|iphone/i),e=t.test(/like (ipod|iphone)/i);return n&&!e},describe:function(t){var n=i.default.getFirstMatch(/(ipod|iphone)/i,t);return{type:o.PLATFORMS_MAP.mobile,vendor:"Apple",model:n}}},{test:[/nexus\s*[0-6].*/i,/galaxy nexus/i],describe:function(){return{type:o.PLATFORMS_MAP.mobile,vendor:"Nexus"}}},{test:[/[^-]mobi/i],describe:function(){return{type:o.PLATFORMS_MAP.mobile}}},{test:function(t){return"blackberry"===t.getBrowserName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.mobile,vendor:"BlackBerry"}}},{test:function(t){return"bada"===t.getBrowserName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.mobile}}},{test:function(t){return"windows phone"===t.getBrowserName()},describe:function(){return{type:o.PLATFORMS_MAP.mobile,vendor:"Microsoft"}}},{test:function(t){var n=Number(String(t.getOSVersion()).split(".")[0]);return"android"===t.getOSName(!0)&&n>=3},describe:function(){return{type:o.PLATFORMS_MAP.tablet}}},{test:function(t){return"android"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.mobile}}},{test:function(t){return"macos"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.desktop,vendor:"Apple"}}},{test:function(t){return"windows"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.desktop}}},{test:function(t){return"linux"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.desktop}}},{test:function(t){return"playstation 4"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.tv}}},{test:function(t){return"roku"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.tv}}}];n.default=u,t.exports=n.default},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(17))&&r.__esModule?r:{default:r},o=e(18);var u=[{test:function(t){return"microsoft edge"===t.getBrowserName(!0)},describe:function(t){if(/\sedg\//i.test(t))return{name:o.ENGINE_MAP.Blink};var n=i.default.getFirstMatch(/edge\/(\d+(\.?_?\d+)+)/i,t);return{name:o.ENGINE_MAP.EdgeHTML,version:n}}},{test:[/trident/i],describe:function(t){var n={name:o.ENGINE_MAP.Trident},e=i.default.getFirstMatch(/trident\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:function(t){return t.test(/presto/i)},describe:function(t){var n={name:o.ENGINE_MAP.Presto},e=i.default.getFirstMatch(/presto\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:function(t){var n=t.test(/gecko/i),e=t.test(/like gecko/i);return n&&!e},describe:function(t){var n={name:o.ENGINE_MAP.Gecko},e=i.default.getFirstMatch(/gecko\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/(apple)?webkit\/537\.36/i],describe:function(){return{name:o.ENGINE_MAP.Blink}}},{test:[/(apple)?webkit/i],describe:function(t){var n={name:o.ENGINE_MAP.WebKit},e=i.default.getFirstMatch(/webkit\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}}];n.default=u,t.exports=n.default},function(t,n,e){t.exports=!e(8)&&!e(2)((function(){return 7!=Object.defineProperty(e(62)("div"),"a",{get:function(){return 7}}).a}))},function(t,n,e){var r=e(1),i=e(7),o=e(32),u=e(63),a=e(9).f;t.exports=function(t){var n=i.Symbol||(i.Symbol=o?{}:r.Symbol||{});"_"==t.charAt(0)||t in n||a(n,t,{value:u.f(t)})}},function(t,n,e){var r=e(13),i=e(15),o=e(51)(!1),u=e(64)("IE_PROTO");t.exports=function(t,n){var e,a=i(t),c=0,s=[];for(e in a)e!=u&&r(a,e)&&s.push(e);for(;n.length>c;)r(a,e=n[c++])&&(~o(s,e)||s.push(e));return s}},function(t,n,e){var r=e(9),i=e(3),o=e(33);t.exports=e(8)?Object.defineProperties:function(t,n){i(t);for(var e,u=o(n),a=u.length,c=0;a>c;)r.f(t,e=u[c++],n[e]);return t}},function(t,n,e){var r=e(15),i=e(36).f,o={}.toString,u="object"==typeof window&&window&&Object.getOwnPropertyNames?Object.getOwnPropertyNames(window):[];t.exports.f=function(t){return u&&"[object Window]"==o.call(t)?function(t){try{return i(t)}catch(t){return u.slice()}}(t):i(r(t))}},function(t,n,e){"use strict";var r=e(8),i=e(33),o=e(52),u=e(47),a=e(10),c=e(46),s=Object.assign;t.exports=!s||e(2)((function(){var t={},n={},e=Symbol(),r="abcdefghijklmnopqrst";return t[e]=7,r.split("").forEach((function(t){n[t]=t})),7!=s({},t)[e]||Object.keys(s({},n)).join("")!=r}))?function(t,n){for(var e=a(t),s=arguments.length,f=1,l=o.f,h=u.f;s>f;)for(var d,p=c(arguments[f++]),v=l?i(p).concat(l(p)):i(p),g=v.length,y=0;g>y;)d=v[y++],r&&!h.call(p,d)||(e[d]=p[d]);return e}:s},function(t,n){t.exports=Object.is||function(t,n){return t===n?0!==t||1/t==1/n:t!=t&&n!=n}},function(t,n,e){"use strict";var r=e(20),i=e(4),o=e(104),u=[].slice,a={},c=function(t,n,e){if(!(n in a)){for(var r=[],i=0;i>>0||(u.test(e)?16:10))}:r},function(t,n,e){var r=e(1).parseFloat,i=e(41).trim;t.exports=1/r(e(68)+"-0")!=-1/0?function(t){var n=i(String(t),3),e=r(n);return 0===e&&"-"==n.charAt(0)?-0:e}:r},function(t,n,e){var r=e(25);t.exports=function(t,n){if("number"!=typeof t&&"Number"!=r(t))throw TypeError(n);return+t}},function(t,n,e){var r=e(4),i=Math.floor;t.exports=function(t){return!r(t)&&isFinite(t)&&i(t)===t}},function(t,n){t.exports=Math.log1p||function(t){return(t=+t)>-1e-8&&t<1e-8?t-t*t/2:Math.log(1+t)}},function(t,n,e){"use strict";var r=e(35),i=e(30),o=e(40),u={};e(14)(u,e(5)("iterator"),(function(){return this})),t.exports=function(t,n,e){t.prototype=r(u,{next:i(1,e)}),o(t,n+" Iterator")}},function(t,n,e){var r=e(3);t.exports=function(t,n,e,i){try{return i?n(r(e)[0],e[1]):n(e)}catch(n){var o=t.return;throw void 0!==o&&r(o.call(t)),n}}},function(t,n,e){var r=e(224);t.exports=function(t,n){return new(r(t))(n)}},function(t,n,e){var r=e(20),i=e(10),o=e(46),u=e(6);t.exports=function(t,n,e,a,c){r(n);var s=i(t),f=o(s),l=u(s.length),h=c?l-1:0,d=c?-1:1;if(e<2)for(;;){if(h in f){a=f[h],h+=d;break}if(h+=d,c?h<0:l<=h)throw TypeError("Reduce of empty array with no initial value")}for(;c?h>=0:l>h;h+=d)h in f&&(a=n(a,f[h],h,s));return a}},function(t,n,e){"use strict";var r=e(10),i=e(34),o=e(6);t.exports=[].copyWithin||function(t,n){var e=r(this),u=o(e.length),a=i(t,u),c=i(n,u),s=arguments.length>2?arguments[2]:void 0,f=Math.min((void 0===s?u:i(s,u))-c,u-a),l=1;for(c0;)c in e?e[a]=e[c]:delete e[a],a+=l,c+=l;return e}},function(t,n){t.exports=function(t,n){return{value:n,done:!!t}}},function(t,n,e){"use strict";var r=e(83);e(0)({target:"RegExp",proto:!0,forced:r!==/./.exec},{exec:r})},function(t,n,e){e(8)&&"g"!=/./g.flags&&e(9).f(RegExp.prototype,"flags",{configurable:!0,get:e(55)})},function(t,n,e){"use strict";var r,i,o,u,a=e(32),c=e(1),s=e(19),f=e(48),l=e(0),h=e(4),d=e(20),p=e(44),v=e(58),g=e(49),y=e(85).set,m=e(244)(),b=e(119),S=e(245),w=e(59),_=e(120),M=c.TypeError,x=c.process,P=x&&x.versions,O=P&&P.v8||"",F=c.Promise,A="process"==f(x),E=function(){},N=i=b.f,R=!!function(){try{var t=F.resolve(1),n=(t.constructor={})[e(5)("species")]=function(t){t(E,E)};return(A||"function"==typeof PromiseRejectionEvent)&&t.then(E)instanceof n&&0!==O.indexOf("6.6")&&-1===w.indexOf("Chrome/66")}catch(t){}}(),k=function(t){var n;return!(!h(t)||"function"!=typeof(n=t.then))&&n},T=function(t,n){if(!t._n){t._n=!0;var e=t._c;m((function(){for(var r=t._v,i=1==t._s,o=0,u=function(n){var e,o,u,a=i?n.ok:n.fail,c=n.resolve,s=n.reject,f=n.domain;try{a?(i||(2==t._h&&L(t),t._h=1),!0===a?e=r:(f&&f.enter(),e=a(r),f&&(f.exit(),u=!0)),e===n.promise?s(M("Promise-chain cycle")):(o=k(e))?o.call(e,c,s):c(e)):s(r)}catch(t){f&&!u&&f.exit(),s(t)}};e.length>o;)u(e[o++]);t._c=[],t._n=!1,n&&!t._h&&I(t)}))}},I=function(t){y.call(c,(function(){var n,e,r,i=t._v,o=j(t);if(o&&(n=S((function(){A?x.emit("unhandledRejection",i,t):(e=c.onunhandledrejection)?e({promise:t,reason:i}):(r=c.console)&&r.error&&r.error("Unhandled promise rejection",i)})),t._h=A||j(t)?2:1),t._a=void 0,o&&n.e)throw n.v}))},j=function(t){return 1!==t._h&&0===(t._a||t._c).length},L=function(t){y.call(c,(function(){var n;A?x.emit("rejectionHandled",t):(n=c.onrejectionhandled)&&n({promise:t,reason:t._v})}))},B=function(t){var n=this;n._d||(n._d=!0,(n=n._w||n)._v=t,n._s=2,n._a||(n._a=n._c.slice()),T(n,!0))},C=function(t){var n,e=this;if(!e._d){e._d=!0,e=e._w||e;try{if(e===t)throw M("Promise can't be resolved itself");(n=k(t))?m((function(){var r={_w:e,_d:!1};try{n.call(t,s(C,r,1),s(B,r,1))}catch(t){B.call(r,t)}})):(e._v=t,e._s=1,T(e,!1))}catch(t){B.call({_w:e,_d:!1},t)}}};R||(F=function(t){p(this,F,"Promise","_h"),d(t),r.call(this);try{t(s(C,this,1),s(B,this,1))}catch(t){B.call(this,t)}},(r=function(t){this._c=[],this._a=void 0,this._s=0,this._d=!1,this._v=void 0,this._h=0,this._n=!1}).prototype=e(45)(F.prototype,{then:function(t,n){var e=N(g(this,F));return e.ok="function"!=typeof t||t,e.fail="function"==typeof n&&n,e.domain=A?x.domain:void 0,this._c.push(e),this._a&&this._a.push(e),this._s&&T(this,!1),e.promise},catch:function(t){return this.then(void 0,t)}}),o=function(){var t=new r;this.promise=t,this.resolve=s(C,t,1),this.reject=s(B,t,1)},b.f=N=function(t){return t===F||t===u?new o(t):i(t)}),l(l.G+l.W+l.F*!R,{Promise:F}),e(40)(F,"Promise"),e(43)("Promise"),u=e(7).Promise,l(l.S+l.F*!R,"Promise",{reject:function(t){var n=N(this);return(0,n.reject)(t),n.promise}}),l(l.S+l.F*(a||!R),"Promise",{resolve:function(t){return _(a&&this===u?F:this,t)}}),l(l.S+l.F*!(R&&e(54)((function(t){F.all(t).catch(E)}))),"Promise",{all:function(t){var n=this,e=N(n),r=e.resolve,i=e.reject,o=S((function(){var e=[],o=0,u=1;v(t,!1,(function(t){var a=o++,c=!1;e.push(void 0),u++,n.resolve(t).then((function(t){c||(c=!0,e[a]=t,--u||r(e))}),i)})),--u||r(e)}));return o.e&&i(o.v),e.promise},race:function(t){var n=this,e=N(n),r=e.reject,i=S((function(){v(t,!1,(function(t){n.resolve(t).then(e.resolve,r)}))}));return i.e&&r(i.v),e.promise}})},function(t,n,e){"use strict";var r=e(20);function i(t){var n,e;this.promise=new t((function(t,r){if(void 0!==n||void 0!==e)throw TypeError("Bad Promise constructor");n=t,e=r})),this.resolve=r(n),this.reject=r(e)}t.exports.f=function(t){return new i(t)}},function(t,n,e){var r=e(3),i=e(4),o=e(119);t.exports=function(t,n){if(r(t),i(n)&&n.constructor===t)return n;var e=o.f(t);return(0,e.resolve)(n),e.promise}},function(t,n,e){"use strict";var r=e(9).f,i=e(35),o=e(45),u=e(19),a=e(44),c=e(58),s=e(74),f=e(115),l=e(43),h=e(8),d=e(29).fastKey,p=e(39),v=h?"_s":"size",g=function(t,n){var e,r=d(n);if("F"!==r)return t._i[r];for(e=t._f;e;e=e.n)if(e.k==n)return e};t.exports={getConstructor:function(t,n,e,s){var f=t((function(t,r){a(t,f,n,"_i"),t._t=n,t._i=i(null),t._f=void 0,t._l=void 0,t[v]=0,null!=r&&c(r,e,t[s],t)}));return o(f.prototype,{clear:function(){for(var t=p(this,n),e=t._i,r=t._f;r;r=r.n)r.r=!0,r.p&&(r.p=r.p.n=void 0),delete e[r.i];t._f=t._l=void 0,t[v]=0},delete:function(t){var e=p(this,n),r=g(e,t);if(r){var i=r.n,o=r.p;delete e._i[r.i],r.r=!0,o&&(o.n=i),i&&(i.p=o),e._f==r&&(e._f=i),e._l==r&&(e._l=o),e[v]--}return!!r},forEach:function(t){p(this,n);for(var e,r=u(t,arguments.length>1?arguments[1]:void 0,3);e=e?e.n:this._f;)for(r(e.v,e.k,this);e&&e.r;)e=e.p},has:function(t){return!!g(p(this,n),t)}}),h&&r(f.prototype,"size",{get:function(){return p(this,n)[v]}}),f},def:function(t,n,e){var r,i,o=g(t,n);return o?o.v=e:(t._l=o={i:i=d(n,!0),k:n,v:e,p:r=t._l,n:void 0,r:!1},t._f||(t._f=o),r&&(r.n=o),t[v]++,"F"!==i&&(t._i[i]=o)),t},getEntry:g,setStrong:function(t,n,e){s(t,n,(function(t,e){this._t=p(t,n),this._k=e,this._l=void 0}),(function(){for(var t=this._k,n=this._l;n&&n.r;)n=n.p;return this._t&&(this._l=n=n?n.n:this._t._f)?f(0,"keys"==t?n.k:"values"==t?n.v:[n.k,n.v]):(this._t=void 0,f(1))}),e?"entries":"values",!e,!0),l(n)}}},function(t,n,e){"use strict";var r=e(45),i=e(29).getWeak,o=e(3),u=e(4),a=e(44),c=e(58),s=e(24),f=e(13),l=e(39),h=s(5),d=s(6),p=0,v=function(t){return t._l||(t._l=new g)},g=function(){this.a=[]},y=function(t,n){return h(t.a,(function(t){return t[0]===n}))};g.prototype={get:function(t){var n=y(this,t);if(n)return n[1]},has:function(t){return!!y(this,t)},set:function(t,n){var e=y(this,t);e?e[1]=n:this.a.push([t,n])},delete:function(t){var n=d(this.a,(function(n){return n[0]===t}));return~n&&this.a.splice(n,1),!!~n}},t.exports={getConstructor:function(t,n,e,o){var s=t((function(t,r){a(t,s,n,"_i"),t._t=n,t._i=p++,t._l=void 0,null!=r&&c(r,e,t[o],t)}));return r(s.prototype,{delete:function(t){if(!u(t))return!1;var e=i(t);return!0===e?v(l(this,n)).delete(t):e&&f(e,this._i)&&delete e[this._i]},has:function(t){if(!u(t))return!1;var e=i(t);return!0===e?v(l(this,n)).has(t):e&&f(e,this._i)}}),s},def:function(t,n,e){var r=i(o(n),!0);return!0===r?v(t).set(n,e):r[t._i]=e,t},ufstore:v}},function(t,n,e){var r=e(21),i=e(6);t.exports=function(t){if(void 0===t)return 0;var n=r(t),e=i(n);if(n!==e)throw RangeError("Wrong length!");return e}},function(t,n,e){var r=e(36),i=e(52),o=e(3),u=e(1).Reflect;t.exports=u&&u.ownKeys||function(t){var n=r.f(o(t)),e=i.f;return e?n.concat(e(t)):n}},function(t,n,e){var r=e(6),i=e(70),o=e(26);t.exports=function(t,n,e,u){var a=String(o(t)),c=a.length,s=void 0===e?" ":String(e),f=r(n);if(f<=c||""==s)return a;var l=f-c,h=i.call(s,Math.ceil(l/s.length));return h.length>l&&(h=h.slice(0,l)),u?h+a:a+h}},function(t,n,e){var r=e(8),i=e(33),o=e(15),u=e(47).f;t.exports=function(t){return function(n){for(var e,a=o(n),c=i(a),s=c.length,f=0,l=[];s>f;)e=c[f++],r&&!u.call(a,e)||l.push(t?[e,a[e]]:a[e]);return l}}},function(t,n){var e=t.exports={version:"2.6.9"};"number"==typeof __e&&(__e=e)},function(t,n){t.exports=function(t){try{return!!t()}catch(t){return!0}}},function(t,n,e){e(130),t.exports=e(90)},function(t,n,e){"use strict";e(131);var r,i=(r=e(303))&&r.__esModule?r:{default:r};i.default._babelPolyfill&&"undefined"!=typeof console&&console.warn&&console.warn("@babel/polyfill is loaded more than once on this page. This is probably not desirable/intended and may have consequences if different versions of the polyfills are applied sequentially. If you do need to load the polyfill more than once, use @babel/polyfill/noConflict instead to bypass the warning."),i.default._babelPolyfill=!0},function(t,n,e){"use strict";e(132),e(275),e(277),e(280),e(282),e(284),e(286),e(288),e(290),e(292),e(294),e(296),e(298),e(302)},function(t,n,e){e(133),e(136),e(137),e(138),e(139),e(140),e(141),e(142),e(143),e(144),e(145),e(146),e(147),e(148),e(149),e(150),e(151),e(152),e(153),e(154),e(155),e(156),e(157),e(158),e(159),e(160),e(161),e(162),e(163),e(164),e(165),e(166),e(167),e(168),e(169),e(170),e(171),e(172),e(173),e(174),e(175),e(176),e(177),e(179),e(180),e(181),e(182),e(183),e(184),e(185),e(186),e(187),e(188),e(189),e(190),e(191),e(192),e(193),e(194),e(195),e(196),e(197),e(198),e(199),e(200),e(201),e(202),e(203),e(204),e(205),e(206),e(207),e(208),e(209),e(210),e(211),e(212),e(214),e(215),e(217),e(218),e(219),e(220),e(221),e(222),e(223),e(225),e(226),e(227),e(228),e(229),e(230),e(231),e(232),e(233),e(234),e(235),e(236),e(237),e(82),e(238),e(116),e(239),e(117),e(240),e(241),e(242),e(243),e(118),e(246),e(247),e(248),e(249),e(250),e(251),e(252),e(253),e(254),e(255),e(256),e(257),e(258),e(259),e(260),e(261),e(262),e(263),e(264),e(265),e(266),e(267),e(268),e(269),e(270),e(271),e(272),e(273),e(274),t.exports=e(7)},function(t,n,e){"use strict";var r=e(1),i=e(13),o=e(8),u=e(0),a=e(11),c=e(29).KEY,s=e(2),f=e(50),l=e(40),h=e(31),d=e(5),p=e(63),v=e(97),g=e(135),y=e(53),m=e(3),b=e(4),S=e(10),w=e(15),_=e(28),M=e(30),x=e(35),P=e(100),O=e(22),F=e(52),A=e(9),E=e(33),N=O.f,R=A.f,k=P.f,T=r.Symbol,I=r.JSON,j=I&&I.stringify,L=d("_hidden"),B=d("toPrimitive"),C={}.propertyIsEnumerable,W=f("symbol-registry"),V=f("symbols"),G=f("op-symbols"),D=Object.prototype,U="function"==typeof T&&!!F.f,z=r.QObject,q=!z||!z.prototype||!z.prototype.findChild,K=o&&s((function(){return 7!=x(R({},"a",{get:function(){return R(this,"a",{value:7}).a}})).a}))?function(t,n,e){var r=N(D,n);r&&delete D[n],R(t,n,e),r&&t!==D&&R(D,n,r)}:R,Y=function(t){var n=V[t]=x(T.prototype);return n._k=t,n},Q=U&&"symbol"==typeof T.iterator?function(t){return"symbol"==typeof t}:function(t){return t instanceof T},H=function(t,n,e){return t===D&&H(G,n,e),m(t),n=_(n,!0),m(e),i(V,n)?(e.enumerable?(i(t,L)&&t[L][n]&&(t[L][n]=!1),e=x(e,{enumerable:M(0,!1)})):(i(t,L)||R(t,L,M(1,{})),t[L][n]=!0),K(t,n,e)):R(t,n,e)},J=function(t,n){m(t);for(var e,r=g(n=w(n)),i=0,o=r.length;o>i;)H(t,e=r[i++],n[e]);return t},X=function(t){var n=C.call(this,t=_(t,!0));return!(this===D&&i(V,t)&&!i(G,t))&&(!(n||!i(this,t)||!i(V,t)||i(this,L)&&this[L][t])||n)},Z=function(t,n){if(t=w(t),n=_(n,!0),t!==D||!i(V,n)||i(G,n)){var e=N(t,n);return!e||!i(V,n)||i(t,L)&&t[L][n]||(e.enumerable=!0),e}},$=function(t){for(var n,e=k(w(t)),r=[],o=0;e.length>o;)i(V,n=e[o++])||n==L||n==c||r.push(n);return r},tt=function(t){for(var n,e=t===D,r=k(e?G:w(t)),o=[],u=0;r.length>u;)!i(V,n=r[u++])||e&&!i(D,n)||o.push(V[n]);return o};U||(a((T=function(){if(this instanceof T)throw TypeError("Symbol is not a constructor!");var t=h(arguments.length>0?arguments[0]:void 0),n=function(e){this===D&&n.call(G,e),i(this,L)&&i(this[L],t)&&(this[L][t]=!1),K(this,t,M(1,e))};return o&&q&&K(D,t,{configurable:!0,set:n}),Y(t)}).prototype,"toString",(function(){return this._k})),O.f=Z,A.f=H,e(36).f=P.f=$,e(47).f=X,F.f=tt,o&&!e(32)&&a(D,"propertyIsEnumerable",X,!0),p.f=function(t){return Y(d(t))}),u(u.G+u.W+u.F*!U,{Symbol:T});for(var nt="hasInstance,isConcatSpreadable,iterator,match,replace,search,species,split,toPrimitive,toStringTag,unscopables".split(","),et=0;nt.length>et;)d(nt[et++]);for(var rt=E(d.store),it=0;rt.length>it;)v(rt[it++]);u(u.S+u.F*!U,"Symbol",{for:function(t){return i(W,t+="")?W[t]:W[t]=T(t)},keyFor:function(t){if(!Q(t))throw TypeError(t+" is not a symbol!");for(var n in W)if(W[n]===t)return n},useSetter:function(){q=!0},useSimple:function(){q=!1}}),u(u.S+u.F*!U,"Object",{create:function(t,n){return void 0===n?x(t):J(x(t),n)},defineProperty:H,defineProperties:J,getOwnPropertyDescriptor:Z,getOwnPropertyNames:$,getOwnPropertySymbols:tt});var ot=s((function(){F.f(1)}));u(u.S+u.F*ot,"Object",{getOwnPropertySymbols:function(t){return F.f(S(t))}}),I&&u(u.S+u.F*(!U||s((function(){var t=T();return"[null]"!=j([t])||"{}"!=j({a:t})||"{}"!=j(Object(t))}))),"JSON",{stringify:function(t){for(var n,e,r=[t],i=1;arguments.length>i;)r.push(arguments[i++]);if(e=n=r[1],(b(n)||void 0!==t)&&!Q(t))return y(n)||(n=function(t,n){if("function"==typeof e&&(n=e.call(this,t,n)),!Q(n))return n}),r[1]=n,j.apply(I,r)}}),T.prototype[B]||e(14)(T.prototype,B,T.prototype.valueOf),l(T,"Symbol"),l(Math,"Math",!0),l(r.JSON,"JSON",!0)},function(t,n,e){t.exports=e(50)("native-function-to-string",Function.toString)},function(t,n,e){var r=e(33),i=e(52),o=e(47);t.exports=function(t){var n=r(t),e=i.f;if(e)for(var u,a=e(t),c=o.f,s=0;a.length>s;)c.call(t,u=a[s++])&&n.push(u);return n}},function(t,n,e){var r=e(0);r(r.S,"Object",{create:e(35)})},function(t,n,e){var r=e(0);r(r.S+r.F*!e(8),"Object",{defineProperty:e(9).f})},function(t,n,e){var r=e(0);r(r.S+r.F*!e(8),"Object",{defineProperties:e(99)})},function(t,n,e){var r=e(15),i=e(22).f;e(23)("getOwnPropertyDescriptor",(function(){return function(t,n){return i(r(t),n)}}))},function(t,n,e){var r=e(10),i=e(37);e(23)("getPrototypeOf",(function(){return function(t){return i(r(t))}}))},function(t,n,e){var r=e(10),i=e(33);e(23)("keys",(function(){return function(t){return i(r(t))}}))},function(t,n,e){e(23)("getOwnPropertyNames",(function(){return e(100).f}))},function(t,n,e){var r=e(4),i=e(29).onFreeze;e(23)("freeze",(function(t){return function(n){return t&&r(n)?t(i(n)):n}}))},function(t,n,e){var r=e(4),i=e(29).onFreeze;e(23)("seal",(function(t){return function(n){return t&&r(n)?t(i(n)):n}}))},function(t,n,e){var r=e(4),i=e(29).onFreeze;e(23)("preventExtensions",(function(t){return function(n){return t&&r(n)?t(i(n)):n}}))},function(t,n,e){var r=e(4);e(23)("isFrozen",(function(t){return function(n){return!r(n)||!!t&&t(n)}}))},function(t,n,e){var r=e(4);e(23)("isSealed",(function(t){return function(n){return!r(n)||!!t&&t(n)}}))},function(t,n,e){var r=e(4);e(23)("isExtensible",(function(t){return function(n){return!!r(n)&&(!t||t(n))}}))},function(t,n,e){var r=e(0);r(r.S+r.F,"Object",{assign:e(101)})},function(t,n,e){var r=e(0);r(r.S,"Object",{is:e(102)})},function(t,n,e){var r=e(0);r(r.S,"Object",{setPrototypeOf:e(67).set})},function(t,n,e){"use strict";var r=e(48),i={};i[e(5)("toStringTag")]="z",i+""!="[object z]"&&e(11)(Object.prototype,"toString",(function(){return"[object "+r(this)+"]"}),!0)},function(t,n,e){var r=e(0);r(r.P,"Function",{bind:e(103)})},function(t,n,e){var r=e(9).f,i=Function.prototype,o=/^\s*function ([^ (]*)/;"name"in i||e(8)&&r(i,"name",{configurable:!0,get:function(){try{return(""+this).match(o)[1]}catch(t){return""}}})},function(t,n,e){"use strict";var r=e(4),i=e(37),o=e(5)("hasInstance"),u=Function.prototype;o in u||e(9).f(u,o,{value:function(t){if("function"!=typeof this||!r(t))return!1;if(!r(this.prototype))return t instanceof this;for(;t=i(t);)if(this.prototype===t)return!0;return!1}})},function(t,n,e){var r=e(0),i=e(105);r(r.G+r.F*(parseInt!=i),{parseInt:i})},function(t,n,e){var r=e(0),i=e(106);r(r.G+r.F*(parseFloat!=i),{parseFloat:i})},function(t,n,e){"use strict";var r=e(1),i=e(13),o=e(25),u=e(69),a=e(28),c=e(2),s=e(36).f,f=e(22).f,l=e(9).f,h=e(41).trim,d=r.Number,p=d,v=d.prototype,g="Number"==o(e(35)(v)),y="trim"in String.prototype,m=function(t){var n=a(t,!1);if("string"==typeof n&&n.length>2){var e,r,i,o=(n=y?n.trim():h(n,3)).charCodeAt(0);if(43===o||45===o){if(88===(e=n.charCodeAt(2))||120===e)return NaN}else if(48===o){switch(n.charCodeAt(1)){case 66:case 98:r=2,i=49;break;case 79:case 111:r=8,i=55;break;default:return+n}for(var u,c=n.slice(2),s=0,f=c.length;si)return NaN;return parseInt(c,r)}}return+n};if(!d(" 0o1")||!d("0b1")||d("+0x1")){d=function(t){var n=arguments.length<1?0:t,e=this;return e instanceof d&&(g?c((function(){v.valueOf.call(e)})):"Number"!=o(e))?u(new p(m(n)),e,d):m(n)};for(var b,S=e(8)?s(p):"MAX_VALUE,MIN_VALUE,NaN,NEGATIVE_INFINITY,POSITIVE_INFINITY,EPSILON,isFinite,isInteger,isNaN,isSafeInteger,MAX_SAFE_INTEGER,MIN_SAFE_INTEGER,parseFloat,parseInt,isInteger".split(","),w=0;S.length>w;w++)i(p,b=S[w])&&!i(d,b)&&l(d,b,f(p,b));d.prototype=v,v.constructor=d,e(11)(r,"Number",d)}},function(t,n,e){"use strict";var r=e(0),i=e(21),o=e(107),u=e(70),a=1..toFixed,c=Math.floor,s=[0,0,0,0,0,0],f="Number.toFixed: incorrect invocation!",l=function(t,n){for(var e=-1,r=n;++e<6;)r+=t*s[e],s[e]=r%1e7,r=c(r/1e7)},h=function(t){for(var n=6,e=0;--n>=0;)e+=s[n],s[n]=c(e/t),e=e%t*1e7},d=function(){for(var t=6,n="";--t>=0;)if(""!==n||0===t||0!==s[t]){var e=String(s[t]);n=""===n?e:n+u.call("0",7-e.length)+e}return n},p=function(t,n,e){return 0===n?e:n%2==1?p(t,n-1,e*t):p(t*t,n/2,e)};r(r.P+r.F*(!!a&&("0.000"!==8e-5.toFixed(3)||"1"!==.9.toFixed(0)||"1.25"!==1.255.toFixed(2)||"1000000000000000128"!==(0xde0b6b3a7640080).toFixed(0))||!e(2)((function(){a.call({})}))),"Number",{toFixed:function(t){var n,e,r,a,c=o(this,f),s=i(t),v="",g="0";if(s<0||s>20)throw RangeError(f);if(c!=c)return"NaN";if(c<=-1e21||c>=1e21)return String(c);if(c<0&&(v="-",c=-c),c>1e-21)if(e=(n=function(t){for(var n=0,e=t;e>=4096;)n+=12,e/=4096;for(;e>=2;)n+=1,e/=2;return n}(c*p(2,69,1))-69)<0?c*p(2,-n,1):c/p(2,n,1),e*=4503599627370496,(n=52-n)>0){for(l(0,e),r=s;r>=7;)l(1e7,0),r-=7;for(l(p(10,r,1),0),r=n-1;r>=23;)h(1<<23),r-=23;h(1<0?v+((a=g.length)<=s?"0."+u.call("0",s-a)+g:g.slice(0,a-s)+"."+g.slice(a-s)):v+g}})},function(t,n,e){"use strict";var r=e(0),i=e(2),o=e(107),u=1..toPrecision;r(r.P+r.F*(i((function(){return"1"!==u.call(1,void 0)}))||!i((function(){u.call({})}))),"Number",{toPrecision:function(t){var n=o(this,"Number#toPrecision: incorrect invocation!");return void 0===t?u.call(n):u.call(n,t)}})},function(t,n,e){var r=e(0);r(r.S,"Number",{EPSILON:Math.pow(2,-52)})},function(t,n,e){var r=e(0),i=e(1).isFinite;r(r.S,"Number",{isFinite:function(t){return"number"==typeof t&&i(t)}})},function(t,n,e){var r=e(0);r(r.S,"Number",{isInteger:e(108)})},function(t,n,e){var r=e(0);r(r.S,"Number",{isNaN:function(t){return t!=t}})},function(t,n,e){var r=e(0),i=e(108),o=Math.abs;r(r.S,"Number",{isSafeInteger:function(t){return i(t)&&o(t)<=9007199254740991}})},function(t,n,e){var r=e(0);r(r.S,"Number",{MAX_SAFE_INTEGER:9007199254740991})},function(t,n,e){var r=e(0);r(r.S,"Number",{MIN_SAFE_INTEGER:-9007199254740991})},function(t,n,e){var r=e(0),i=e(106);r(r.S+r.F*(Number.parseFloat!=i),"Number",{parseFloat:i})},function(t,n,e){var r=e(0),i=e(105);r(r.S+r.F*(Number.parseInt!=i),"Number",{parseInt:i})},function(t,n,e){var r=e(0),i=e(109),o=Math.sqrt,u=Math.acosh;r(r.S+r.F*!(u&&710==Math.floor(u(Number.MAX_VALUE))&&u(1/0)==1/0),"Math",{acosh:function(t){return(t=+t)<1?NaN:t>94906265.62425156?Math.log(t)+Math.LN2:i(t-1+o(t-1)*o(t+1))}})},function(t,n,e){var r=e(0),i=Math.asinh;r(r.S+r.F*!(i&&1/i(0)>0),"Math",{asinh:function t(n){return isFinite(n=+n)&&0!=n?n<0?-t(-n):Math.log(n+Math.sqrt(n*n+1)):n}})},function(t,n,e){var r=e(0),i=Math.atanh;r(r.S+r.F*!(i&&1/i(-0)<0),"Math",{atanh:function(t){return 0==(t=+t)?t:Math.log((1+t)/(1-t))/2}})},function(t,n,e){var r=e(0),i=e(71);r(r.S,"Math",{cbrt:function(t){return i(t=+t)*Math.pow(Math.abs(t),1/3)}})},function(t,n,e){var r=e(0);r(r.S,"Math",{clz32:function(t){return(t>>>=0)?31-Math.floor(Math.log(t+.5)*Math.LOG2E):32}})},function(t,n,e){var r=e(0),i=Math.exp;r(r.S,"Math",{cosh:function(t){return(i(t=+t)+i(-t))/2}})},function(t,n,e){var r=e(0),i=e(72);r(r.S+r.F*(i!=Math.expm1),"Math",{expm1:i})},function(t,n,e){var r=e(0);r(r.S,"Math",{fround:e(178)})},function(t,n,e){var r=e(71),i=Math.pow,o=i(2,-52),u=i(2,-23),a=i(2,127)*(2-u),c=i(2,-126);t.exports=Math.fround||function(t){var n,e,i=Math.abs(t),s=r(t);return ia||e!=e?s*(1/0):s*e}},function(t,n,e){var r=e(0),i=Math.abs;r(r.S,"Math",{hypot:function(t,n){for(var e,r,o=0,u=0,a=arguments.length,c=0;u0?(r=e/c)*r:e;return c===1/0?1/0:c*Math.sqrt(o)}})},function(t,n,e){var r=e(0),i=Math.imul;r(r.S+r.F*e(2)((function(){return-5!=i(4294967295,5)||2!=i.length})),"Math",{imul:function(t,n){var e=+t,r=+n,i=65535&e,o=65535&r;return 0|i*o+((65535&e>>>16)*o+i*(65535&r>>>16)<<16>>>0)}})},function(t,n,e){var r=e(0);r(r.S,"Math",{log10:function(t){return Math.log(t)*Math.LOG10E}})},function(t,n,e){var r=e(0);r(r.S,"Math",{log1p:e(109)})},function(t,n,e){var r=e(0);r(r.S,"Math",{log2:function(t){return Math.log(t)/Math.LN2}})},function(t,n,e){var r=e(0);r(r.S,"Math",{sign:e(71)})},function(t,n,e){var r=e(0),i=e(72),o=Math.exp;r(r.S+r.F*e(2)((function(){return-2e-17!=!Math.sinh(-2e-17)})),"Math",{sinh:function(t){return Math.abs(t=+t)<1?(i(t)-i(-t))/2:(o(t-1)-o(-t-1))*(Math.E/2)}})},function(t,n,e){var r=e(0),i=e(72),o=Math.exp;r(r.S,"Math",{tanh:function(t){var n=i(t=+t),e=i(-t);return n==1/0?1:e==1/0?-1:(n-e)/(o(t)+o(-t))}})},function(t,n,e){var r=e(0);r(r.S,"Math",{trunc:function(t){return(t>0?Math.floor:Math.ceil)(t)}})},function(t,n,e){var r=e(0),i=e(34),o=String.fromCharCode,u=String.fromCodePoint;r(r.S+r.F*(!!u&&1!=u.length),"String",{fromCodePoint:function(t){for(var n,e=[],r=arguments.length,u=0;r>u;){if(n=+arguments[u++],i(n,1114111)!==n)throw RangeError(n+" is not a valid code point");e.push(n<65536?o(n):o(55296+((n-=65536)>>10),n%1024+56320))}return e.join("")}})},function(t,n,e){var r=e(0),i=e(15),o=e(6);r(r.S,"String",{raw:function(t){for(var n=i(t.raw),e=o(n.length),r=arguments.length,u=[],a=0;e>a;)u.push(String(n[a++])),a=n.length?{value:void 0,done:!0}:(t=r(n,e),this._i+=t.length,{value:t,done:!1})}))},function(t,n,e){"use strict";var r=e(0),i=e(73)(!1);r(r.P,"String",{codePointAt:function(t){return i(this,t)}})},function(t,n,e){"use strict";var r=e(0),i=e(6),o=e(75),u="".endsWith;r(r.P+r.F*e(77)("endsWith"),"String",{endsWith:function(t){var n=o(this,t,"endsWith"),e=arguments.length>1?arguments[1]:void 0,r=i(n.length),a=void 0===e?r:Math.min(i(e),r),c=String(t);return u?u.call(n,c,a):n.slice(a-c.length,a)===c}})},function(t,n,e){"use strict";var r=e(0),i=e(75);r(r.P+r.F*e(77)("includes"),"String",{includes:function(t){return!!~i(this,t,"includes").indexOf(t,arguments.length>1?arguments[1]:void 0)}})},function(t,n,e){var r=e(0);r(r.P,"String",{repeat:e(70)})},function(t,n,e){"use strict";var r=e(0),i=e(6),o=e(75),u="".startsWith;r(r.P+r.F*e(77)("startsWith"),"String",{startsWith:function(t){var n=o(this,t,"startsWith"),e=i(Math.min(arguments.length>1?arguments[1]:void 0,n.length)),r=String(t);return u?u.call(n,r,e):n.slice(e,e+r.length)===r}})},function(t,n,e){"use strict";e(12)("anchor",(function(t){return function(n){return t(this,"a","name",n)}}))},function(t,n,e){"use strict";e(12)("big",(function(t){return function(){return t(this,"big","","")}}))},function(t,n,e){"use strict";e(12)("blink",(function(t){return function(){return t(this,"blink","","")}}))},function(t,n,e){"use strict";e(12)("bold",(function(t){return function(){return t(this,"b","","")}}))},function(t,n,e){"use strict";e(12)("fixed",(function(t){return function(){return t(this,"tt","","")}}))},function(t,n,e){"use strict";e(12)("fontcolor",(function(t){return function(n){return t(this,"font","color",n)}}))},function(t,n,e){"use strict";e(12)("fontsize",(function(t){return function(n){return t(this,"font","size",n)}}))},function(t,n,e){"use strict";e(12)("italics",(function(t){return function(){return t(this,"i","","")}}))},function(t,n,e){"use strict";e(12)("link",(function(t){return function(n){return t(this,"a","href",n)}}))},function(t,n,e){"use strict";e(12)("small",(function(t){return function(){return t(this,"small","","")}}))},function(t,n,e){"use strict";e(12)("strike",(function(t){return function(){return t(this,"strike","","")}}))},function(t,n,e){"use strict";e(12)("sub",(function(t){return function(){return t(this,"sub","","")}}))},function(t,n,e){"use strict";e(12)("sup",(function(t){return function(){return t(this,"sup","","")}}))},function(t,n,e){var r=e(0);r(r.S,"Date",{now:function(){return(new Date).getTime()}})},function(t,n,e){"use strict";var r=e(0),i=e(10),o=e(28);r(r.P+r.F*e(2)((function(){return null!==new Date(NaN).toJSON()||1!==Date.prototype.toJSON.call({toISOString:function(){return 1}})})),"Date",{toJSON:function(t){var n=i(this),e=o(n);return"number"!=typeof e||isFinite(e)?n.toISOString():null}})},function(t,n,e){var r=e(0),i=e(213);r(r.P+r.F*(Date.prototype.toISOString!==i),"Date",{toISOString:i})},function(t,n,e){"use strict";var r=e(2),i=Date.prototype.getTime,o=Date.prototype.toISOString,u=function(t){return t>9?t:"0"+t};t.exports=r((function(){return"0385-07-25T07:06:39.999Z"!=o.call(new Date(-5e13-1))}))||!r((function(){o.call(new Date(NaN))}))?function(){if(!isFinite(i.call(this)))throw RangeError("Invalid time value");var t=this,n=t.getUTCFullYear(),e=t.getUTCMilliseconds(),r=n<0?"-":n>9999?"+":"";return r+("00000"+Math.abs(n)).slice(r?-6:-4)+"-"+u(t.getUTCMonth()+1)+"-"+u(t.getUTCDate())+"T"+u(t.getUTCHours())+":"+u(t.getUTCMinutes())+":"+u(t.getUTCSeconds())+"."+(e>99?e:"0"+u(e))+"Z"}:o},function(t,n,e){var r=Date.prototype,i=r.toString,o=r.getTime;new Date(NaN)+""!="Invalid Date"&&e(11)(r,"toString",(function(){var t=o.call(this);return t==t?i.call(this):"Invalid Date"}))},function(t,n,e){var r=e(5)("toPrimitive"),i=Date.prototype;r in i||e(14)(i,r,e(216))},function(t,n,e){"use strict";var r=e(3),i=e(28);t.exports=function(t){if("string"!==t&&"number"!==t&&"default"!==t)throw TypeError("Incorrect hint");return i(r(this),"number"!=t)}},function(t,n,e){var r=e(0);r(r.S,"Array",{isArray:e(53)})},function(t,n,e){"use strict";var r=e(19),i=e(0),o=e(10),u=e(111),a=e(78),c=e(6),s=e(79),f=e(80);i(i.S+i.F*!e(54)((function(t){Array.from(t)})),"Array",{from:function(t){var n,e,i,l,h=o(t),d="function"==typeof this?this:Array,p=arguments.length,v=p>1?arguments[1]:void 0,g=void 0!==v,y=0,m=f(h);if(g&&(v=r(v,p>2?arguments[2]:void 0,2)),null==m||d==Array&&a(m))for(e=new d(n=c(h.length));n>y;y++)s(e,y,g?v(h[y],y):h[y]);else for(l=m.call(h),e=new d;!(i=l.next()).done;y++)s(e,y,g?u(l,v,[i.value,y],!0):i.value);return e.length=y,e}})},function(t,n,e){"use strict";var r=e(0),i=e(79);r(r.S+r.F*e(2)((function(){function t(){}return!(Array.of.call(t)instanceof t)})),"Array",{of:function(){for(var t=0,n=arguments.length,e=new("function"==typeof this?this:Array)(n);n>t;)i(e,t,arguments[t++]);return e.length=n,e}})},function(t,n,e){"use strict";var r=e(0),i=e(15),o=[].join;r(r.P+r.F*(e(46)!=Object||!e(16)(o)),"Array",{join:function(t){return o.call(i(this),void 0===t?",":t)}})},function(t,n,e){"use strict";var r=e(0),i=e(66),o=e(25),u=e(34),a=e(6),c=[].slice;r(r.P+r.F*e(2)((function(){i&&c.call(i)})),"Array",{slice:function(t,n){var e=a(this.length),r=o(this);if(n=void 0===n?e:n,"Array"==r)return c.call(this,t,n);for(var i=u(t,e),s=u(n,e),f=a(s-i),l=new Array(f),h=0;h1&&(r=Math.min(r,o(arguments[1]))),r<0&&(r=e+r);r>=0;r--)if(r in n&&n[r]===t)return r||0;return-1}})},function(t,n,e){var r=e(0);r(r.P,"Array",{copyWithin:e(114)}),e(38)("copyWithin")},function(t,n,e){var r=e(0);r(r.P,"Array",{fill:e(81)}),e(38)("fill")},function(t,n,e){"use strict";var r=e(0),i=e(24)(5),o=!0;"find"in[]&&Array(1).find((function(){o=!1})),r(r.P+r.F*o,"Array",{find:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0)}}),e(38)("find")},function(t,n,e){"use strict";var r=e(0),i=e(24)(6),o="findIndex",u=!0;o in[]&&Array(1)[o]((function(){u=!1})),r(r.P+r.F*u,"Array",{findIndex:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0)}}),e(38)(o)},function(t,n,e){e(43)("Array")},function(t,n,e){var r=e(1),i=e(69),o=e(9).f,u=e(36).f,a=e(76),c=e(55),s=r.RegExp,f=s,l=s.prototype,h=/a/g,d=/a/g,p=new s(h)!==h;if(e(8)&&(!p||e(2)((function(){return d[e(5)("match")]=!1,s(h)!=h||s(d)==d||"/a/i"!=s(h,"i")})))){s=function(t,n){var e=this instanceof s,r=a(t),o=void 0===n;return!e&&r&&t.constructor===s&&o?t:i(p?new f(r&&!o?t.source:t,n):f((r=t instanceof s)?t.source:t,r&&o?c.call(t):n),e?this:l,s)};for(var v=function(t){t in s||o(s,t,{configurable:!0,get:function(){return f[t]},set:function(n){f[t]=n}})},g=u(f),y=0;g.length>y;)v(g[y++]);l.constructor=s,s.prototype=l,e(11)(r,"RegExp",s)}e(43)("RegExp")},function(t,n,e){"use strict";e(117);var r=e(3),i=e(55),o=e(8),u=/./.toString,a=function(t){e(11)(RegExp.prototype,"toString",t,!0)};e(2)((function(){return"/a/b"!=u.call({source:"a",flags:"b"})}))?a((function(){var t=r(this);return"/".concat(t.source,"/","flags"in t?t.flags:!o&&t instanceof RegExp?i.call(t):void 0)})):"toString"!=u.name&&a((function(){return u.call(this)}))},function(t,n,e){"use strict";var r=e(3),i=e(6),o=e(84),u=e(56);e(57)("match",1,(function(t,n,e,a){return[function(e){var r=t(this),i=null==e?void 0:e[n];return void 0!==i?i.call(e,r):new RegExp(e)[n](String(r))},function(t){var n=a(e,t,this);if(n.done)return n.value;var c=r(t),s=String(this);if(!c.global)return u(c,s);var f=c.unicode;c.lastIndex=0;for(var l,h=[],d=0;null!==(l=u(c,s));){var p=String(l[0]);h[d]=p,""===p&&(c.lastIndex=o(s,i(c.lastIndex),f)),d++}return 0===d?null:h}]}))},function(t,n,e){"use strict";var r=e(3),i=e(10),o=e(6),u=e(21),a=e(84),c=e(56),s=Math.max,f=Math.min,l=Math.floor,h=/\$([$&`']|\d\d?|<[^>]*>)/g,d=/\$([$&`']|\d\d?)/g;e(57)("replace",2,(function(t,n,e,p){return[function(r,i){var o=t(this),u=null==r?void 0:r[n];return void 0!==u?u.call(r,o,i):e.call(String(o),r,i)},function(t,n){var i=p(e,t,this,n);if(i.done)return i.value;var l=r(t),h=String(this),d="function"==typeof n;d||(n=String(n));var g=l.global;if(g){var y=l.unicode;l.lastIndex=0}for(var m=[];;){var b=c(l,h);if(null===b)break;if(m.push(b),!g)break;""===String(b[0])&&(l.lastIndex=a(h,o(l.lastIndex),y))}for(var S,w="",_=0,M=0;M=_&&(w+=h.slice(_,P)+N,_=P+x.length)}return w+h.slice(_)}];function v(t,n,r,o,u,a){var c=r+t.length,s=o.length,f=d;return void 0!==u&&(u=i(u),f=h),e.call(a,f,(function(e,i){var a;switch(i.charAt(0)){case"$":return"$";case"&":return t;case"`":return n.slice(0,r);case"'":return n.slice(c);case"<":a=u[i.slice(1,-1)];break;default:var f=+i;if(0===f)return e;if(f>s){var h=l(f/10);return 0===h?e:h<=s?void 0===o[h-1]?i.charAt(1):o[h-1]+i.charAt(1):e}a=o[f-1]}return void 0===a?"":a}))}}))},function(t,n,e){"use strict";var r=e(3),i=e(102),o=e(56);e(57)("search",1,(function(t,n,e,u){return[function(e){var r=t(this),i=null==e?void 0:e[n];return void 0!==i?i.call(e,r):new RegExp(e)[n](String(r))},function(t){var n=u(e,t,this);if(n.done)return n.value;var a=r(t),c=String(this),s=a.lastIndex;i(s,0)||(a.lastIndex=0);var f=o(a,c);return i(a.lastIndex,s)||(a.lastIndex=s),null===f?-1:f.index}]}))},function(t,n,e){"use strict";var r=e(76),i=e(3),o=e(49),u=e(84),a=e(6),c=e(56),s=e(83),f=e(2),l=Math.min,h=[].push,d=!f((function(){RegExp(4294967295,"y")}));e(57)("split",2,(function(t,n,e,f){var p;return p="c"=="abbc".split(/(b)*/)[1]||4!="test".split(/(?:)/,-1).length||2!="ab".split(/(?:ab)*/).length||4!=".".split(/(.?)(.?)/).length||".".split(/()()/).length>1||"".split(/.?/).length?function(t,n){var i=String(this);if(void 0===t&&0===n)return[];if(!r(t))return e.call(i,t,n);for(var o,u,a,c=[],f=(t.ignoreCase?"i":"")+(t.multiline?"m":"")+(t.unicode?"u":"")+(t.sticky?"y":""),l=0,d=void 0===n?4294967295:n>>>0,p=new RegExp(t.source,f+"g");(o=s.call(p,i))&&!((u=p.lastIndex)>l&&(c.push(i.slice(l,o.index)),o.length>1&&o.index=d));)p.lastIndex===o.index&&p.lastIndex++;return l===i.length?!a&&p.test("")||c.push(""):c.push(i.slice(l)),c.length>d?c.slice(0,d):c}:"0".split(void 0,0).length?function(t,n){return void 0===t&&0===n?[]:e.call(this,t,n)}:e,[function(e,r){var i=t(this),o=null==e?void 0:e[n];return void 0!==o?o.call(e,i,r):p.call(String(i),e,r)},function(t,n){var r=f(p,t,this,n,p!==e);if(r.done)return r.value;var s=i(t),h=String(this),v=o(s,RegExp),g=s.unicode,y=(s.ignoreCase?"i":"")+(s.multiline?"m":"")+(s.unicode?"u":"")+(d?"y":"g"),m=new v(d?s:"^(?:"+s.source+")",y),b=void 0===n?4294967295:n>>>0;if(0===b)return[];if(0===h.length)return null===c(m,h)?[h]:[];for(var S=0,w=0,_=[];w0?arguments[0]:void 0)}}),{get:function(t){var n=r.getEntry(i(this,"Map"),t);return n&&n.v},set:function(t,n){return r.def(i(this,"Map"),0===t?0:t,n)}},r,!0)},function(t,n,e){"use strict";var r=e(121),i=e(39);t.exports=e(60)("Set",(function(t){return function(){return t(this,arguments.length>0?arguments[0]:void 0)}}),{add:function(t){return r.def(i(this,"Set"),t=0===t?0:t,t)}},r)},function(t,n,e){"use strict";var r,i=e(1),o=e(24)(0),u=e(11),a=e(29),c=e(101),s=e(122),f=e(4),l=e(39),h=e(39),d=!i.ActiveXObject&&"ActiveXObject"in i,p=a.getWeak,v=Object.isExtensible,g=s.ufstore,y=function(t){return function(){return t(this,arguments.length>0?arguments[0]:void 0)}},m={get:function(t){if(f(t)){var n=p(t);return!0===n?g(l(this,"WeakMap")).get(t):n?n[this._i]:void 0}},set:function(t,n){return s.def(l(this,"WeakMap"),t,n)}},b=t.exports=e(60)("WeakMap",y,m,s,!0,!0);h&&d&&(c((r=s.getConstructor(y,"WeakMap")).prototype,m),a.NEED=!0,o(["delete","has","get","set"],(function(t){var n=b.prototype,e=n[t];u(n,t,(function(n,i){if(f(n)&&!v(n)){this._f||(this._f=new r);var o=this._f[t](n,i);return"set"==t?this:o}return e.call(this,n,i)}))})))},function(t,n,e){"use strict";var r=e(122),i=e(39);e(60)("WeakSet",(function(t){return function(){return t(this,arguments.length>0?arguments[0]:void 0)}}),{add:function(t){return r.def(i(this,"WeakSet"),t,!0)}},r,!1,!0)},function(t,n,e){"use strict";var r=e(0),i=e(61),o=e(86),u=e(3),a=e(34),c=e(6),s=e(4),f=e(1).ArrayBuffer,l=e(49),h=o.ArrayBuffer,d=o.DataView,p=i.ABV&&f.isView,v=h.prototype.slice,g=i.VIEW;r(r.G+r.W+r.F*(f!==h),{ArrayBuffer:h}),r(r.S+r.F*!i.CONSTR,"ArrayBuffer",{isView:function(t){return p&&p(t)||s(t)&&g in t}}),r(r.P+r.U+r.F*e(2)((function(){return!new h(2).slice(1,void 0).byteLength})),"ArrayBuffer",{slice:function(t,n){if(void 0!==v&&void 0===n)return v.call(u(this),t);for(var e=u(this).byteLength,r=a(t,e),i=a(void 0===n?e:n,e),o=new(l(this,h))(c(i-r)),s=new d(this),f=new d(o),p=0;r=n.length)return{value:void 0,done:!0}}while(!((t=n[this._i++])in this._t));return{value:t,done:!1}})),r(r.S,"Reflect",{enumerate:function(t){return new o(t)}})},function(t,n,e){var r=e(22),i=e(37),o=e(13),u=e(0),a=e(4),c=e(3);u(u.S,"Reflect",{get:function t(n,e){var u,s,f=arguments.length<3?n:arguments[2];return c(n)===f?n[e]:(u=r.f(n,e))?o(u,"value")?u.value:void 0!==u.get?u.get.call(f):void 0:a(s=i(n))?t(s,e,f):void 0}})},function(t,n,e){var r=e(22),i=e(0),o=e(3);i(i.S,"Reflect",{getOwnPropertyDescriptor:function(t,n){return r.f(o(t),n)}})},function(t,n,e){var r=e(0),i=e(37),o=e(3);r(r.S,"Reflect",{getPrototypeOf:function(t){return i(o(t))}})},function(t,n,e){var r=e(0);r(r.S,"Reflect",{has:function(t,n){return n in t}})},function(t,n,e){var r=e(0),i=e(3),o=Object.isExtensible;r(r.S,"Reflect",{isExtensible:function(t){return i(t),!o||o(t)}})},function(t,n,e){var r=e(0);r(r.S,"Reflect",{ownKeys:e(124)})},function(t,n,e){var r=e(0),i=e(3),o=Object.preventExtensions;r(r.S,"Reflect",{preventExtensions:function(t){i(t);try{return o&&o(t),!0}catch(t){return!1}}})},function(t,n,e){var r=e(9),i=e(22),o=e(37),u=e(13),a=e(0),c=e(30),s=e(3),f=e(4);a(a.S,"Reflect",{set:function t(n,e,a){var l,h,d=arguments.length<4?n:arguments[3],p=i.f(s(n),e);if(!p){if(f(h=o(n)))return t(h,e,a,d);p=c(0)}if(u(p,"value")){if(!1===p.writable||!f(d))return!1;if(l=i.f(d,e)){if(l.get||l.set||!1===l.writable)return!1;l.value=a,r.f(d,e,l)}else r.f(d,e,c(0,a));return!0}return void 0!==p.set&&(p.set.call(d,a),!0)}})},function(t,n,e){var r=e(0),i=e(67);i&&r(r.S,"Reflect",{setPrototypeOf:function(t,n){i.check(t,n);try{return i.set(t,n),!0}catch(t){return!1}}})},function(t,n,e){e(276),t.exports=e(7).Array.includes},function(t,n,e){"use strict";var r=e(0),i=e(51)(!0);r(r.P,"Array",{includes:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0)}}),e(38)("includes")},function(t,n,e){e(278),t.exports=e(7).Array.flatMap},function(t,n,e){"use strict";var r=e(0),i=e(279),o=e(10),u=e(6),a=e(20),c=e(112);r(r.P,"Array",{flatMap:function(t){var n,e,r=o(this);return a(t),n=u(r.length),e=c(r,0),i(e,r,r,n,0,1,t,arguments[1]),e}}),e(38)("flatMap")},function(t,n,e){"use strict";var r=e(53),i=e(4),o=e(6),u=e(19),a=e(5)("isConcatSpreadable");t.exports=function t(n,e,c,s,f,l,h,d){for(var p,v,g=f,y=0,m=!!h&&u(h,d,3);y0)g=t(n,e,p,o(p.length),g,l-1)-1;else{if(g>=9007199254740991)throw TypeError();n[g]=p}g++}y++}return g}},function(t,n,e){e(281),t.exports=e(7).String.padStart},function(t,n,e){"use strict";var r=e(0),i=e(125),o=e(59),u=/Version\/10\.\d+(\.\d+)?( Mobile\/\w+)? Safari\//.test(o);r(r.P+r.F*u,"String",{padStart:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0,!0)}})},function(t,n,e){e(283),t.exports=e(7).String.padEnd},function(t,n,e){"use strict";var r=e(0),i=e(125),o=e(59),u=/Version\/10\.\d+(\.\d+)?( Mobile\/\w+)? Safari\//.test(o);r(r.P+r.F*u,"String",{padEnd:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0,!1)}})},function(t,n,e){e(285),t.exports=e(7).String.trimLeft},function(t,n,e){"use strict";e(41)("trimLeft",(function(t){return function(){return t(this,1)}}),"trimStart")},function(t,n,e){e(287),t.exports=e(7).String.trimRight},function(t,n,e){"use strict";e(41)("trimRight",(function(t){return function(){return t(this,2)}}),"trimEnd")},function(t,n,e){e(289),t.exports=e(63).f("asyncIterator")},function(t,n,e){e(97)("asyncIterator")},function(t,n,e){e(291),t.exports=e(7).Object.getOwnPropertyDescriptors},function(t,n,e){var r=e(0),i=e(124),o=e(15),u=e(22),a=e(79);r(r.S,"Object",{getOwnPropertyDescriptors:function(t){for(var n,e,r=o(t),c=u.f,s=i(r),f={},l=0;s.length>l;)void 0!==(e=c(r,n=s[l++]))&&a(f,n,e);return f}})},function(t,n,e){e(293),t.exports=e(7).Object.values},function(t,n,e){var r=e(0),i=e(126)(!1);r(r.S,"Object",{values:function(t){return i(t)}})},function(t,n,e){e(295),t.exports=e(7).Object.entries},function(t,n,e){var r=e(0),i=e(126)(!0);r(r.S,"Object",{entries:function(t){return i(t)}})},function(t,n,e){"use strict";e(118),e(297),t.exports=e(7).Promise.finally},function(t,n,e){"use strict";var r=e(0),i=e(7),o=e(1),u=e(49),a=e(120);r(r.P+r.R,"Promise",{finally:function(t){var n=u(this,i.Promise||o.Promise),e="function"==typeof t;return this.then(e?function(e){return a(n,t()).then((function(){return e}))}:t,e?function(e){return a(n,t()).then((function(){throw e}))}:t)}})},function(t,n,e){e(299),e(300),e(301),t.exports=e(7)},function(t,n,e){var r=e(1),i=e(0),o=e(59),u=[].slice,a=/MSIE .\./.test(o),c=function(t){return function(n,e){var r=arguments.length>2,i=!!r&&u.call(arguments,2);return t(r?function(){("function"==typeof n?n:Function(n)).apply(this,i)}:n,e)}};i(i.G+i.B+i.F*a,{setTimeout:c(r.setTimeout),setInterval:c(r.setInterval)})},function(t,n,e){var r=e(0),i=e(85);r(r.G+r.B,{setImmediate:i.set,clearImmediate:i.clear})},function(t,n,e){for(var r=e(82),i=e(33),o=e(11),u=e(1),a=e(14),c=e(42),s=e(5),f=s("iterator"),l=s("toStringTag"),h=c.Array,d={CSSRuleList:!0,CSSStyleDeclaration:!1,CSSValueList:!1,ClientRectList:!1,DOMRectList:!1,DOMStringList:!1,DOMTokenList:!0,DataTransferItemList:!1,FileList:!1,HTMLAllCollection:!1,HTMLCollection:!1,HTMLFormElement:!1,HTMLSelectElement:!1,MediaList:!0,MimeTypeArray:!1,NamedNodeMap:!1,NodeList:!0,PaintRequestList:!1,Plugin:!1,PluginArray:!1,SVGLengthList:!1,SVGNumberList:!1,SVGPathSegList:!1,SVGPointList:!1,SVGStringList:!1,SVGTransformList:!1,SourceBufferList:!1,StyleSheetList:!0,TextTrackCueList:!1,TextTrackList:!1,TouchList:!1},p=i(d),v=0;v=0;--o){var u=this.tryEntries[o],a=u.completion;if("root"===u.tryLoc)return i("end");if(u.tryLoc<=this.prev){var c=r.call(u,"catchLoc"),s=r.call(u,"finallyLoc");if(c&&s){if(this.prev=0;--e){var i=this.tryEntries[e];if(i.tryLoc<=this.prev&&r.call(i,"finallyLoc")&&this.prev=0;--n){var e=this.tryEntries[n];if(e.finallyLoc===t)return this.complete(e.completion,e.afterLoc),O(e),p}},catch:function(t){for(var n=this.tryEntries.length-1;n>=0;--n){var e=this.tryEntries[n];if(e.tryLoc===t){var r=e.completion;if("throw"===r.type){var i=r.arg;O(e)}return i}}throw new Error("illegal catch attempt")},delegateYield:function(t,e,r){return this.delegate={iterator:A(t),resultName:e,nextLoc:r},"next"===this.method&&(this.arg=n),p}},t}(t.exports);try{regeneratorRuntime=r}catch(t){Function("r","regeneratorRuntime = r")(r)}},function(t,n,e){e(304),t.exports=e(127).global},function(t,n,e){var r=e(305);r(r.G,{global:e(87)})},function(t,n,e){var r=e(87),i=e(127),o=e(306),u=e(308),a=e(315),c=function(t,n,e){var s,f,l,h=t&c.F,d=t&c.G,p=t&c.S,v=t&c.P,g=t&c.B,y=t&c.W,m=d?i:i[n]||(i[n]={}),b=m.prototype,S=d?r:p?r[n]:(r[n]||{}).prototype;for(s in d&&(e=n),e)(f=!h&&S&&void 0!==S[s])&&a(m,s)||(l=f?S[s]:e[s],m[s]=d&&"function"!=typeof S[s]?e[s]:g&&f?o(l,r):y&&S[s]==l?function(t){var n=function(n,e,r){if(this instanceof t){switch(arguments.length){case 0:return new t;case 1:return new t(n);case 2:return new t(n,e)}return new t(n,e,r)}return t.apply(this,arguments)};return n.prototype=t.prototype,n}(l):v&&"function"==typeof l?o(Function.call,l):l,v&&((m.virtual||(m.virtual={}))[s]=l,t&c.R&&b&&!b[s]&&u(b,s,l)))};c.F=1,c.G=2,c.S=4,c.P=8,c.B=16,c.W=32,c.U=64,c.R=128,t.exports=c},function(t,n,e){var r=e(307);t.exports=function(t,n,e){if(r(t),void 0===n)return t;switch(e){case 1:return function(e){return t.call(n,e)};case 2:return function(e,r){return t.call(n,e,r)};case 3:return function(e,r,i){return t.call(n,e,r,i)}}return function(){return t.apply(n,arguments)}}},function(t,n){t.exports=function(t){if("function"!=typeof t)throw TypeError(t+" is not a function!");return t}},function(t,n,e){var r=e(309),i=e(314);t.exports=e(89)?function(t,n,e){return r.f(t,n,i(1,e))}:function(t,n,e){return t[n]=e,t}},function(t,n,e){var r=e(310),i=e(311),o=e(313),u=Object.defineProperty;n.f=e(89)?Object.defineProperty:function(t,n,e){if(r(t),n=o(n,!0),r(e),i)try{return u(t,n,e)}catch(t){}if("get"in e||"set"in e)throw TypeError("Accessors not supported!");return"value"in e&&(t[n]=e.value),t}},function(t,n,e){var r=e(88);t.exports=function(t){if(!r(t))throw TypeError(t+" is not an object!");return t}},function(t,n,e){t.exports=!e(89)&&!e(128)((function(){return 7!=Object.defineProperty(e(312)("div"),"a",{get:function(){return 7}}).a}))},function(t,n,e){var r=e(88),i=e(87).document,o=r(i)&&r(i.createElement);t.exports=function(t){return o?i.createElement(t):{}}},function(t,n,e){var r=e(88);t.exports=function(t,n){if(!r(t))return t;var e,i;if(n&&"function"==typeof(e=t.toString)&&!r(i=e.call(t)))return i;if("function"==typeof(e=t.valueOf)&&!r(i=e.call(t)))return i;if(!n&&"function"==typeof(e=t.toString)&&!r(i=e.call(t)))return i;throw TypeError("Can't convert object to primitive value")}},function(t,n){t.exports=function(t,n){return{enumerable:!(1&t),configurable:!(2&t),writable:!(4&t),value:n}}},function(t,n){var e={}.hasOwnProperty;t.exports=function(t,n){return e.call(t,n)}}])})); \ No newline at end of file diff --git a/node_modules/bowser/es5.js b/node_modules/bowser/es5.js new file mode 100644 index 00000000..bb8ec3dd --- /dev/null +++ b/node_modules/bowser/es5.js @@ -0,0 +1 @@ +!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.bowser=t():e.bowser=t()}(this,(function(){return function(e){var t={};function r(n){if(t[n])return t[n].exports;var i=t[n]={i:n,l:!1,exports:{}};return e[n].call(i.exports,i,i.exports,r),i.l=!0,i.exports}return r.m=e,r.c=t,r.d=function(e,t,n){r.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:n})},r.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r.t=function(e,t){if(1&t&&(e=r(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(r.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var i in e)r.d(n,i,function(t){return e[t]}.bind(null,i));return n},r.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return r.d(t,"a",t),t},r.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r.p="",r(r.s=90)}({17:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n=r(18),i=function(){function e(){}return e.getFirstMatch=function(e,t){var r=t.match(e);return r&&r.length>0&&r[1]||""},e.getSecondMatch=function(e,t){var r=t.match(e);return r&&r.length>1&&r[2]||""},e.matchAndReturnConst=function(e,t,r){if(e.test(t))return r},e.getWindowsVersionName=function(e){switch(e){case"NT":return"NT";case"XP":return"XP";case"NT 5.0":return"2000";case"NT 5.1":return"XP";case"NT 5.2":return"2003";case"NT 6.0":return"Vista";case"NT 6.1":return"7";case"NT 6.2":return"8";case"NT 6.3":return"8.1";case"NT 10.0":return"10";default:return}},e.getMacOSVersionName=function(e){var t=e.split(".").splice(0,2).map((function(e){return parseInt(e,10)||0}));if(t.push(0),10===t[0])switch(t[1]){case 5:return"Leopard";case 6:return"Snow Leopard";case 7:return"Lion";case 8:return"Mountain Lion";case 9:return"Mavericks";case 10:return"Yosemite";case 11:return"El Capitan";case 12:return"Sierra";case 13:return"High Sierra";case 14:return"Mojave";case 15:return"Catalina";default:return}},e.getAndroidVersionName=function(e){var t=e.split(".").splice(0,2).map((function(e){return parseInt(e,10)||0}));if(t.push(0),!(1===t[0]&&t[1]<5))return 1===t[0]&&t[1]<6?"Cupcake":1===t[0]&&t[1]>=6?"Donut":2===t[0]&&t[1]<2?"Eclair":2===t[0]&&2===t[1]?"Froyo":2===t[0]&&t[1]>2?"Gingerbread":3===t[0]?"Honeycomb":4===t[0]&&t[1]<1?"Ice Cream Sandwich":4===t[0]&&t[1]<4?"Jelly Bean":4===t[0]&&t[1]>=4?"KitKat":5===t[0]?"Lollipop":6===t[0]?"Marshmallow":7===t[0]?"Nougat":8===t[0]?"Oreo":9===t[0]?"Pie":void 0},e.getVersionPrecision=function(e){return e.split(".").length},e.compareVersions=function(t,r,n){void 0===n&&(n=!1);var i=e.getVersionPrecision(t),s=e.getVersionPrecision(r),a=Math.max(i,s),o=0,u=e.map([t,r],(function(t){var r=a-e.getVersionPrecision(t),n=t+new Array(r+1).join(".0");return e.map(n.split("."),(function(e){return new Array(20-e.length).join("0")+e})).reverse()}));for(n&&(o=a-Math.min(i,s)),a-=1;a>=o;){if(u[0][a]>u[1][a])return 1;if(u[0][a]===u[1][a]){if(a===o)return 0;a-=1}else if(u[0][a]1?i-1:0),a=1;a0){var a=Object.keys(r),u=o.default.find(a,(function(e){return t.isOS(e)}));if(u){var d=this.satisfies(r[u]);if(void 0!==d)return d}var c=o.default.find(a,(function(e){return t.isPlatform(e)}));if(c){var f=this.satisfies(r[c]);if(void 0!==f)return f}}if(s>0){var l=Object.keys(i),h=o.default.find(l,(function(e){return t.isBrowser(e,!0)}));if(void 0!==h)return this.compareVersion(i[h])}},t.isBrowser=function(e,t){void 0===t&&(t=!1);var r=this.getBrowserName().toLowerCase(),n=e.toLowerCase(),i=o.default.getBrowserTypeByAlias(n);return t&&i&&(n=i.toLowerCase()),n===r},t.compareVersion=function(e){var t=[0],r=e,n=!1,i=this.getBrowserVersion();if("string"==typeof i)return">"===e[0]||"<"===e[0]?(r=e.substr(1),"="===e[1]?(n=!0,r=e.substr(2)):t=[],">"===e[0]?t.push(1):t.push(-1)):"="===e[0]?r=e.substr(1):"~"===e[0]&&(n=!0,r=e.substr(1)),t.indexOf(o.default.compareVersions(i,r,n))>-1},t.isOS=function(e){return this.getOSName(!0)===String(e).toLowerCase()},t.isPlatform=function(e){return this.getPlatformType(!0)===String(e).toLowerCase()},t.isEngine=function(e){return this.getEngineName(!0)===String(e).toLowerCase()},t.is=function(e,t){return void 0===t&&(t=!1),this.isBrowser(e,t)||this.isOS(e)||this.isPlatform(e)},t.some=function(e){var t=this;return void 0===e&&(e=[]),e.some((function(e){return t.is(e)}))},e}();t.default=d,e.exports=t.default},92:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n,i=(n=r(17))&&n.__esModule?n:{default:n};var s=/version\/(\d+(\.?_?\d+)+)/i,a=[{test:[/googlebot/i],describe:function(e){var t={name:"Googlebot"},r=i.default.getFirstMatch(/googlebot\/(\d+(\.\d+))/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/opera/i],describe:function(e){var t={name:"Opera"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:opera)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/opr\/|opios/i],describe:function(e){var t={name:"Opera"},r=i.default.getFirstMatch(/(?:opr|opios)[\s/](\S+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/SamsungBrowser/i],describe:function(e){var t={name:"Samsung Internet for Android"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:SamsungBrowser)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/Whale/i],describe:function(e){var t={name:"NAVER Whale Browser"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:whale)[\s/](\d+(?:\.\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/MZBrowser/i],describe:function(e){var t={name:"MZ Browser"},r=i.default.getFirstMatch(/(?:MZBrowser)[\s/](\d+(?:\.\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/focus/i],describe:function(e){var t={name:"Focus"},r=i.default.getFirstMatch(/(?:focus)[\s/](\d+(?:\.\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/swing/i],describe:function(e){var t={name:"Swing"},r=i.default.getFirstMatch(/(?:swing)[\s/](\d+(?:\.\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/coast/i],describe:function(e){var t={name:"Opera Coast"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:coast)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/opt\/\d+(?:.?_?\d+)+/i],describe:function(e){var t={name:"Opera Touch"},r=i.default.getFirstMatch(/(?:opt)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/yabrowser/i],describe:function(e){var t={name:"Yandex Browser"},r=i.default.getFirstMatch(/(?:yabrowser)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/ucbrowser/i],describe:function(e){var t={name:"UC Browser"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:ucbrowser)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/Maxthon|mxios/i],describe:function(e){var t={name:"Maxthon"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:Maxthon|mxios)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/epiphany/i],describe:function(e){var t={name:"Epiphany"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:epiphany)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/puffin/i],describe:function(e){var t={name:"Puffin"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:puffin)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/sleipnir/i],describe:function(e){var t={name:"Sleipnir"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:sleipnir)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/k-meleon/i],describe:function(e){var t={name:"K-Meleon"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:k-meleon)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/micromessenger/i],describe:function(e){var t={name:"WeChat"},r=i.default.getFirstMatch(/(?:micromessenger)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/qqbrowser/i],describe:function(e){var t={name:/qqbrowserlite/i.test(e)?"QQ Browser Lite":"QQ Browser"},r=i.default.getFirstMatch(/(?:qqbrowserlite|qqbrowser)[/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/msie|trident/i],describe:function(e){var t={name:"Internet Explorer"},r=i.default.getFirstMatch(/(?:msie |rv:)(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/\sedg\//i],describe:function(e){var t={name:"Microsoft Edge"},r=i.default.getFirstMatch(/\sedg\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/edg([ea]|ios)/i],describe:function(e){var t={name:"Microsoft Edge"},r=i.default.getSecondMatch(/edg([ea]|ios)\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/vivaldi/i],describe:function(e){var t={name:"Vivaldi"},r=i.default.getFirstMatch(/vivaldi\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/seamonkey/i],describe:function(e){var t={name:"SeaMonkey"},r=i.default.getFirstMatch(/seamonkey\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/sailfish/i],describe:function(e){var t={name:"Sailfish"},r=i.default.getFirstMatch(/sailfish\s?browser\/(\d+(\.\d+)?)/i,e);return r&&(t.version=r),t}},{test:[/silk/i],describe:function(e){var t={name:"Amazon Silk"},r=i.default.getFirstMatch(/silk\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/phantom/i],describe:function(e){var t={name:"PhantomJS"},r=i.default.getFirstMatch(/phantomjs\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/slimerjs/i],describe:function(e){var t={name:"SlimerJS"},r=i.default.getFirstMatch(/slimerjs\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/blackberry|\bbb\d+/i,/rim\stablet/i],describe:function(e){var t={name:"BlackBerry"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/blackberry[\d]+\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/(web|hpw)[o0]s/i],describe:function(e){var t={name:"WebOS Browser"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/w(?:eb)?[o0]sbrowser\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/bada/i],describe:function(e){var t={name:"Bada"},r=i.default.getFirstMatch(/dolfin\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/tizen/i],describe:function(e){var t={name:"Tizen"},r=i.default.getFirstMatch(/(?:tizen\s?)?browser\/(\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/qupzilla/i],describe:function(e){var t={name:"QupZilla"},r=i.default.getFirstMatch(/(?:qupzilla)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/firefox|iceweasel|fxios/i],describe:function(e){var t={name:"Firefox"},r=i.default.getFirstMatch(/(?:firefox|iceweasel|fxios)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/electron/i],describe:function(e){var t={name:"Electron"},r=i.default.getFirstMatch(/(?:electron)\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/MiuiBrowser/i],describe:function(e){var t={name:"Miui"},r=i.default.getFirstMatch(/(?:MiuiBrowser)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/chromium/i],describe:function(e){var t={name:"Chromium"},r=i.default.getFirstMatch(/(?:chromium)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/chrome|crios|crmo/i],describe:function(e){var t={name:"Chrome"},r=i.default.getFirstMatch(/(?:chrome|crios|crmo)\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/GSA/i],describe:function(e){var t={name:"Google Search"},r=i.default.getFirstMatch(/(?:GSA)\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:function(e){var t=!e.test(/like android/i),r=e.test(/android/i);return t&&r},describe:function(e){var t={name:"Android Browser"},r=i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/playstation 4/i],describe:function(e){var t={name:"PlayStation 4"},r=i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/safari|applewebkit/i],describe:function(e){var t={name:"Safari"},r=i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/.*/i],describe:function(e){var t=-1!==e.search("\\(")?/^(.*)\/(.*)[ \t]\((.*)/:/^(.*)\/(.*) /;return{name:i.default.getFirstMatch(t,e),version:i.default.getSecondMatch(t,e)}}}];t.default=a,e.exports=t.default},93:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n,i=(n=r(17))&&n.__esModule?n:{default:n},s=r(18);var a=[{test:[/Roku\/DVP/],describe:function(e){var t=i.default.getFirstMatch(/Roku\/DVP-(\d+\.\d+)/i,e);return{name:s.OS_MAP.Roku,version:t}}},{test:[/windows phone/i],describe:function(e){var t=i.default.getFirstMatch(/windows phone (?:os)?\s?(\d+(\.\d+)*)/i,e);return{name:s.OS_MAP.WindowsPhone,version:t}}},{test:[/windows /i],describe:function(e){var t=i.default.getFirstMatch(/Windows ((NT|XP)( \d\d?.\d)?)/i,e),r=i.default.getWindowsVersionName(t);return{name:s.OS_MAP.Windows,version:t,versionName:r}}},{test:[/Macintosh(.*?) FxiOS(.*?)\//],describe:function(e){var t={name:s.OS_MAP.iOS},r=i.default.getSecondMatch(/(Version\/)(\d[\d.]+)/,e);return r&&(t.version=r),t}},{test:[/macintosh/i],describe:function(e){var t=i.default.getFirstMatch(/mac os x (\d+(\.?_?\d+)+)/i,e).replace(/[_\s]/g,"."),r=i.default.getMacOSVersionName(t),n={name:s.OS_MAP.MacOS,version:t};return r&&(n.versionName=r),n}},{test:[/(ipod|iphone|ipad)/i],describe:function(e){var t=i.default.getFirstMatch(/os (\d+([_\s]\d+)*) like mac os x/i,e).replace(/[_\s]/g,".");return{name:s.OS_MAP.iOS,version:t}}},{test:function(e){var t=!e.test(/like android/i),r=e.test(/android/i);return t&&r},describe:function(e){var t=i.default.getFirstMatch(/android[\s/-](\d+(\.\d+)*)/i,e),r=i.default.getAndroidVersionName(t),n={name:s.OS_MAP.Android,version:t};return r&&(n.versionName=r),n}},{test:[/(web|hpw)[o0]s/i],describe:function(e){var t=i.default.getFirstMatch(/(?:web|hpw)[o0]s\/(\d+(\.\d+)*)/i,e),r={name:s.OS_MAP.WebOS};return t&&t.length&&(r.version=t),r}},{test:[/blackberry|\bbb\d+/i,/rim\stablet/i],describe:function(e){var t=i.default.getFirstMatch(/rim\stablet\sos\s(\d+(\.\d+)*)/i,e)||i.default.getFirstMatch(/blackberry\d+\/(\d+([_\s]\d+)*)/i,e)||i.default.getFirstMatch(/\bbb(\d+)/i,e);return{name:s.OS_MAP.BlackBerry,version:t}}},{test:[/bada/i],describe:function(e){var t=i.default.getFirstMatch(/bada\/(\d+(\.\d+)*)/i,e);return{name:s.OS_MAP.Bada,version:t}}},{test:[/tizen/i],describe:function(e){var t=i.default.getFirstMatch(/tizen[/\s](\d+(\.\d+)*)/i,e);return{name:s.OS_MAP.Tizen,version:t}}},{test:[/linux/i],describe:function(){return{name:s.OS_MAP.Linux}}},{test:[/CrOS/],describe:function(){return{name:s.OS_MAP.ChromeOS}}},{test:[/PlayStation 4/],describe:function(e){var t=i.default.getFirstMatch(/PlayStation 4[/\s](\d+(\.\d+)*)/i,e);return{name:s.OS_MAP.PlayStation4,version:t}}}];t.default=a,e.exports=t.default},94:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n,i=(n=r(17))&&n.__esModule?n:{default:n},s=r(18);var a=[{test:[/googlebot/i],describe:function(){return{type:"bot",vendor:"Google"}}},{test:[/huawei/i],describe:function(e){var t=i.default.getFirstMatch(/(can-l01)/i,e)&&"Nova",r={type:s.PLATFORMS_MAP.mobile,vendor:"Huawei"};return t&&(r.model=t),r}},{test:[/nexus\s*(?:7|8|9|10).*/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Nexus"}}},{test:[/ipad/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Apple",model:"iPad"}}},{test:[/Macintosh(.*?) FxiOS(.*?)\//],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Apple",model:"iPad"}}},{test:[/kftt build/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Amazon",model:"Kindle Fire HD 7"}}},{test:[/silk/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Amazon"}}},{test:[/tablet(?! pc)/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet}}},{test:function(e){var t=e.test(/ipod|iphone/i),r=e.test(/like (ipod|iphone)/i);return t&&!r},describe:function(e){var t=i.default.getFirstMatch(/(ipod|iphone)/i,e);return{type:s.PLATFORMS_MAP.mobile,vendor:"Apple",model:t}}},{test:[/nexus\s*[0-6].*/i,/galaxy nexus/i],describe:function(){return{type:s.PLATFORMS_MAP.mobile,vendor:"Nexus"}}},{test:[/[^-]mobi/i],describe:function(){return{type:s.PLATFORMS_MAP.mobile}}},{test:function(e){return"blackberry"===e.getBrowserName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.mobile,vendor:"BlackBerry"}}},{test:function(e){return"bada"===e.getBrowserName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.mobile}}},{test:function(e){return"windows phone"===e.getBrowserName()},describe:function(){return{type:s.PLATFORMS_MAP.mobile,vendor:"Microsoft"}}},{test:function(e){var t=Number(String(e.getOSVersion()).split(".")[0]);return"android"===e.getOSName(!0)&&t>=3},describe:function(){return{type:s.PLATFORMS_MAP.tablet}}},{test:function(e){return"android"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.mobile}}},{test:function(e){return"macos"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.desktop,vendor:"Apple"}}},{test:function(e){return"windows"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.desktop}}},{test:function(e){return"linux"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.desktop}}},{test:function(e){return"playstation 4"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.tv}}},{test:function(e){return"roku"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.tv}}}];t.default=a,e.exports=t.default},95:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n,i=(n=r(17))&&n.__esModule?n:{default:n},s=r(18);var a=[{test:function(e){return"microsoft edge"===e.getBrowserName(!0)},describe:function(e){if(/\sedg\//i.test(e))return{name:s.ENGINE_MAP.Blink};var t=i.default.getFirstMatch(/edge\/(\d+(\.?_?\d+)+)/i,e);return{name:s.ENGINE_MAP.EdgeHTML,version:t}}},{test:[/trident/i],describe:function(e){var t={name:s.ENGINE_MAP.Trident},r=i.default.getFirstMatch(/trident\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:function(e){return e.test(/presto/i)},describe:function(e){var t={name:s.ENGINE_MAP.Presto},r=i.default.getFirstMatch(/presto\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:function(e){var t=e.test(/gecko/i),r=e.test(/like gecko/i);return t&&!r},describe:function(e){var t={name:s.ENGINE_MAP.Gecko},r=i.default.getFirstMatch(/gecko\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/(apple)?webkit\/537\.36/i],describe:function(){return{name:s.ENGINE_MAP.Blink}}},{test:[/(apple)?webkit/i],describe:function(e){var t={name:s.ENGINE_MAP.WebKit},r=i.default.getFirstMatch(/webkit\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}}];t.default=a,e.exports=t.default}})})); \ No newline at end of file diff --git a/node_modules/bowser/index.d.ts b/node_modules/bowser/index.d.ts new file mode 100644 index 00000000..d95656a4 --- /dev/null +++ b/node_modules/bowser/index.d.ts @@ -0,0 +1,250 @@ +// Type definitions for Bowser v2 +// Project: https://github.com/lancedikson/bowser +// Definitions by: Alexander P. Cerutti , + +export = Bowser; +export as namespace Bowser; + +declare namespace Bowser { + /** + * Creates a Parser instance + * @param {string} UA - User agent string + * @param {boolean} skipParsing + */ + + function getParser(UA: string, skipParsing?: boolean): Parser.Parser; + + /** + * Creates a Parser instance and runs Parser.getResult immediately + * @param UA - User agent string + * @returns {Parser.ParsedResult} + */ + + function parse(UA: string): Parser.ParsedResult; + + /** + * Constants exposed via bowser getters + */ + const BROWSER_MAP: Record; + const ENGINE_MAP: Record; + const OS_MAP: Record; + const PLATFORMS_MAP: Record; + + namespace Parser { + interface Parser { + constructor(UA: string, skipParsing?: boolean): Parser.Parser; + + /** + * Get parsed browser object + * @return {BrowserDetails} Browser's details + */ + + getBrowser(): BrowserDetails; + + /** + * Get browser's name + * @param {Boolean} [toLowerCase] return lower-cased value + * @return {String} Browser's name or an empty string + */ + + getBrowserName(toLowerCase?: boolean): string; + + /** + * Get browser's version + * @return {String} version of browser + */ + + getBrowserVersion(): string; + + /** + * Get OS + * @return {OSDetails} - OS Details + * + * @example + * this.getOS(); // { + * // name: 'macOS', + * // version: '10.11.12', + * // } + */ + + getOS(): OSDetails; + + /** + * Get OS name + * @param {Boolean} [toLowerCase] return lower-cased value + * @return {String} name of the OS — macOS, Windows, Linux, etc. + */ + + getOSName(toLowerCase?: boolean): string; + + /** + * Get OS version + * @return {String} full version with dots ('10.11.12', '5.6', etc) + */ + + getOSVersion(): string; + + /** + * Get parsed platform + * @returns {PlatformDetails} + */ + + getPlatform(): PlatformDetails; + + /** + * Get platform name + * @param {boolean} toLowerCase + */ + + getPlatformType(toLowerCase?: boolean): string; + + /** + * Get parsed engine + * @returns {EngineDetails} + */ + + getEngine(): EngineDetails; + + /** + * Get parsed engine's name + * @returns {String} Engine's name or an empty string + */ + + getEngineName(): string; + + /** + * Get parsed result + * @return {ParsedResult} + */ + + getResult(): ParsedResult; + + /** + * Get UserAgent string of current Parser instance + * @return {String} User-Agent String of the current object + */ + + getUA(): string; + + /** + * Is anything? Check if the browser is called "anything", + * the OS called "anything" or the platform called "anything" + * @param {String} anything + * @returns {Boolean} + */ + + is(anything: any): boolean; + + /** + * Parse full information about the browser + * @returns {Parser.Parser} + */ + + parse(): Parser.Parser; + + /** + * Get parsed browser object + * @returns {BrowserDetails} + */ + + parseBrowser(): BrowserDetails; + + /** + * Get parsed engine + * @returns {EngineDetails} + */ + + parseEngine(): EngineDetails; + + /** + * Parse OS and save it to this.parsedResult.os + * @returns {OSDetails} + */ + + parseOS(): OSDetails; + + /** + * Get parsed platform + * @returns {PlatformDetails} + */ + + parsePlatform(): PlatformDetails; + + /** + * Check if parsed browser matches certain conditions + * + * @param {checkTree} checkTree It's one or two layered object, + * which can include a platform or an OS on the first layer + * and should have browsers specs on the bottom-laying layer + * + * @returns {Boolean|undefined} Whether the browser satisfies the set conditions or not. + * Returns `undefined` when the browser is no described in the checkTree object. + * + * @example + * const browser = new Bowser(UA); + * if (browser.check({chrome: '>118.01.1322' })) + * // or with os + * if (browser.check({windows: { chrome: '>118.01.1322' } })) + * // or with platforms + * if (browser.check({desktop: { chrome: '>118.01.1322' } })) + */ + + satisfies(checkTree: checkTree): boolean | undefined; + + /** + * Check if the browser name equals the passed string + * @param browserName The string to compare with the browser name + * @param [includingAlias=false] The flag showing whether alias will be included into comparison + * @returns {boolean} + */ + + + isBrowser(browserName: string, includingAlias?: boolean): boolean; + + /** + * Check if any of the given values satifies `.is(anything)` + * @param {string[]} anythings + * @returns {boolean} true if at least one condition is satisfied, false otherwise. + */ + + some(anythings: string[]): boolean | undefined; + + /** + * Test a UA string for a regexp + * @param regex + * @returns {boolean} true if the regex matches the UA, false otherwise. + */ + + test(regex: RegExp): boolean; + } + + interface ParsedResult { + browser: BrowserDetails; + os: OSDetails; + platform: PlatformDetails; + engine: EngineDetails; + } + + interface Details { + name?: string; + version?: string; + } + + interface OSDetails extends Details { + versionName?: string; + } + + interface PlatformDetails { + type?: string; + vendor?: string; + model?: string; + } + + type BrowserDetails = Details; + type EngineDetails = Details; + + interface checkTree { + [key: string]: any; + } + } +} diff --git a/node_modules/bowser/package.json b/node_modules/bowser/package.json new file mode 100644 index 00000000..3fb7c83f --- /dev/null +++ b/node_modules/bowser/package.json @@ -0,0 +1,83 @@ +{ + "name": "bowser", + "version": "2.11.0", + "description": "Lightweight browser detector", + "keywords": [ + "browser", + "useragent", + "user-agent", + "parser", + "ua", + "detection", + "ender", + "sniff" + ], + "homepage": "https://github.com/lancedikson/bowser", + "author": "Dustin Diaz (http://dustindiaz.com)", + "contributors": [ + { + "name": "Denis Demchenko", + "url": "http://twitter.com/lancedikson" + } + ], + "main": "es5.js", + "browser": "es5.js", + "module": "src/bowser.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git+https://github.com/lancedikson/bowser.git" + }, + "devDependencies": { + "@babel/cli": "^7.11.6", + "@babel/core": "^7.8.0", + "@babel/polyfill": "^7.8.3", + "@babel/preset-env": "^7.8.2", + "@babel/register": "^7.8.3", + "ava": "^3.0.0", + "babel-eslint": "^10.0.3", + "babel-loader": "^8.0.6", + "babel-plugin-add-module-exports": "^1.0.2", + "babel-plugin-istanbul": "^6.0.0", + "compression-webpack-plugin": "^4.0.0", + "coveralls": "^3.0.6", + "docdash": "^1.1.1", + "eslint": "^6.5.1", + "eslint-config-airbnb-base": "^13.2.0", + "eslint-plugin-ava": "^10.0.0", + "eslint-plugin-import": "^2.18.2", + "gh-pages": "^3.0.0", + "jsdoc": "^3.6.3", + "nyc": "^15.0.0", + "sinon": "^9.0.0", + "testem": "^3.0.0", + "webpack": "^4.41.0", + "webpack-bundle-analyzer": "^3.5.2", + "webpack-cli": "^3.3.9", + "yamljs": "^0.3.0" + }, + "ava": { + "require": [ + "@babel/register" + ] + }, + "bugs": { + "url": "https://github.com/lancedikson/bowser/issues" + }, + "directories": { + "test": "test" + }, + "scripts": { + "build": "webpack --config webpack.config.js", + "generate-and-deploy-docs": "npm run generate-docs && gh-pages --dist docs --dest docs", + "watch": "webpack --watch --config webpack.config.js", + "prepublishOnly": "npm run build", + "lint": "eslint ./src", + "testem": "testem", + "test": "nyc --reporter=html --reporter=text ava", + "test:watch": "ava --watch", + "coverage": "nyc report --reporter=text-lcov | coveralls", + "generate-docs": "jsdoc -c jsdoc.json" + }, + "license": "MIT" +} diff --git a/node_modules/bowser/src/bowser.js b/node_modules/bowser/src/bowser.js new file mode 100644 index 00000000..f79e6e0e --- /dev/null +++ b/node_modules/bowser/src/bowser.js @@ -0,0 +1,77 @@ +/*! + * Bowser - a browser detector + * https://github.com/lancedikson/bowser + * MIT License | (c) Dustin Diaz 2012-2015 + * MIT License | (c) Denis Demchenko 2015-2019 + */ +import Parser from './parser.js'; +import { + BROWSER_MAP, + ENGINE_MAP, + OS_MAP, + PLATFORMS_MAP, +} from './constants.js'; + +/** + * Bowser class. + * Keep it simple as much as it can be. + * It's supposed to work with collections of {@link Parser} instances + * rather then solve one-instance problems. + * All the one-instance stuff is located in Parser class. + * + * @class + * @classdesc Bowser is a static object, that provides an API to the Parsers + * @hideconstructor + */ +class Bowser { + /** + * Creates a {@link Parser} instance + * + * @param {String} UA UserAgent string + * @param {Boolean} [skipParsing=false] Will make the Parser postpone parsing until you ask it + * explicitly. Same as `skipParsing` for {@link Parser}. + * @returns {Parser} + * @throws {Error} when UA is not a String + * + * @example + * const parser = Bowser.getParser(window.navigator.userAgent); + * const result = parser.getResult(); + */ + static getParser(UA, skipParsing = false) { + if (typeof UA !== 'string') { + throw new Error('UserAgent should be a string'); + } + return new Parser(UA, skipParsing); + } + + /** + * Creates a {@link Parser} instance and runs {@link Parser.getResult} immediately + * + * @param UA + * @return {ParsedResult} + * + * @example + * const result = Bowser.parse(window.navigator.userAgent); + */ + static parse(UA) { + return (new Parser(UA)).getResult(); + } + + static get BROWSER_MAP() { + return BROWSER_MAP; + } + + static get ENGINE_MAP() { + return ENGINE_MAP; + } + + static get OS_MAP() { + return OS_MAP; + } + + static get PLATFORMS_MAP() { + return PLATFORMS_MAP; + } +} + +export default Bowser; diff --git a/node_modules/bowser/src/constants.js b/node_modules/bowser/src/constants.js new file mode 100644 index 00000000..f3350325 --- /dev/null +++ b/node_modules/bowser/src/constants.js @@ -0,0 +1,116 @@ +// NOTE: this list must be up-to-date with browsers listed in +// test/acceptance/useragentstrings.yml +export const BROWSER_ALIASES_MAP = { + 'Amazon Silk': 'amazon_silk', + 'Android Browser': 'android', + Bada: 'bada', + BlackBerry: 'blackberry', + Chrome: 'chrome', + Chromium: 'chromium', + Electron: 'electron', + Epiphany: 'epiphany', + Firefox: 'firefox', + Focus: 'focus', + Generic: 'generic', + 'Google Search': 'google_search', + Googlebot: 'googlebot', + 'Internet Explorer': 'ie', + 'K-Meleon': 'k_meleon', + Maxthon: 'maxthon', + 'Microsoft Edge': 'edge', + 'MZ Browser': 'mz', + 'NAVER Whale Browser': 'naver', + Opera: 'opera', + 'Opera Coast': 'opera_coast', + PhantomJS: 'phantomjs', + Puffin: 'puffin', + QupZilla: 'qupzilla', + QQ: 'qq', + QQLite: 'qqlite', + Safari: 'safari', + Sailfish: 'sailfish', + 'Samsung Internet for Android': 'samsung_internet', + SeaMonkey: 'seamonkey', + Sleipnir: 'sleipnir', + Swing: 'swing', + Tizen: 'tizen', + 'UC Browser': 'uc', + Vivaldi: 'vivaldi', + 'WebOS Browser': 'webos', + WeChat: 'wechat', + 'Yandex Browser': 'yandex', + Roku: 'roku', +}; + +export const BROWSER_MAP = { + amazon_silk: 'Amazon Silk', + android: 'Android Browser', + bada: 'Bada', + blackberry: 'BlackBerry', + chrome: 'Chrome', + chromium: 'Chromium', + electron: 'Electron', + epiphany: 'Epiphany', + firefox: 'Firefox', + focus: 'Focus', + generic: 'Generic', + googlebot: 'Googlebot', + google_search: 'Google Search', + ie: 'Internet Explorer', + k_meleon: 'K-Meleon', + maxthon: 'Maxthon', + edge: 'Microsoft Edge', + mz: 'MZ Browser', + naver: 'NAVER Whale Browser', + opera: 'Opera', + opera_coast: 'Opera Coast', + phantomjs: 'PhantomJS', + puffin: 'Puffin', + qupzilla: 'QupZilla', + qq: 'QQ Browser', + qqlite: 'QQ Browser Lite', + safari: 'Safari', + sailfish: 'Sailfish', + samsung_internet: 'Samsung Internet for Android', + seamonkey: 'SeaMonkey', + sleipnir: 'Sleipnir', + swing: 'Swing', + tizen: 'Tizen', + uc: 'UC Browser', + vivaldi: 'Vivaldi', + webos: 'WebOS Browser', + wechat: 'WeChat', + yandex: 'Yandex Browser', +}; + +export const PLATFORMS_MAP = { + tablet: 'tablet', + mobile: 'mobile', + desktop: 'desktop', + tv: 'tv', +}; + +export const OS_MAP = { + WindowsPhone: 'Windows Phone', + Windows: 'Windows', + MacOS: 'macOS', + iOS: 'iOS', + Android: 'Android', + WebOS: 'WebOS', + BlackBerry: 'BlackBerry', + Bada: 'Bada', + Tizen: 'Tizen', + Linux: 'Linux', + ChromeOS: 'Chrome OS', + PlayStation4: 'PlayStation 4', + Roku: 'Roku', +}; + +export const ENGINE_MAP = { + EdgeHTML: 'EdgeHTML', + Blink: 'Blink', + Trident: 'Trident', + Presto: 'Presto', + Gecko: 'Gecko', + WebKit: 'WebKit', +}; diff --git a/node_modules/bowser/src/parser-browsers.js b/node_modules/bowser/src/parser-browsers.js new file mode 100644 index 00000000..ee7840c5 --- /dev/null +++ b/node_modules/bowser/src/parser-browsers.js @@ -0,0 +1,700 @@ +/** + * Browsers' descriptors + * + * The idea of descriptors is simple. You should know about them two simple things: + * 1. Every descriptor has a method or property called `test` and a `describe` method. + * 2. Order of descriptors is important. + * + * More details: + * 1. Method or property `test` serves as a way to detect whether the UA string + * matches some certain browser or not. The `describe` method helps to make a result + * object with params that show some browser-specific things: name, version, etc. + * 2. Order of descriptors is important because a Parser goes through them one by one + * in course. For example, if you insert Chrome's descriptor as the first one, + * more then a half of browsers will be described as Chrome, because they will pass + * the Chrome descriptor's test. + * + * Descriptor's `test` could be a property with an array of RegExps, where every RegExp + * will be applied to a UA string to test it whether it matches or not. + * If a descriptor has two or more regexps in the `test` array it tests them one by one + * with a logical sum operation. Parser stops if it has found any RegExp that matches the UA. + * + * Or `test` could be a method. In that case it gets a Parser instance and should + * return true/false to get the Parser know if this browser descriptor matches the UA or not. + */ + +import Utils from './utils.js'; + +const commonVersionIdentifier = /version\/(\d+(\.?_?\d+)+)/i; + +const browsersList = [ + /* Googlebot */ + { + test: [/googlebot/i], + describe(ua) { + const browser = { + name: 'Googlebot', + }; + const version = Utils.getFirstMatch(/googlebot\/(\d+(\.\d+))/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Opera < 13.0 */ + { + test: [/opera/i], + describe(ua) { + const browser = { + name: 'Opera', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:opera)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Opera > 13.0 */ + { + test: [/opr\/|opios/i], + describe(ua) { + const browser = { + name: 'Opera', + }; + const version = Utils.getFirstMatch(/(?:opr|opios)[\s/](\S+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/SamsungBrowser/i], + describe(ua) { + const browser = { + name: 'Samsung Internet for Android', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:SamsungBrowser)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/Whale/i], + describe(ua) { + const browser = { + name: 'NAVER Whale Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:whale)[\s/](\d+(?:\.\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/MZBrowser/i], + describe(ua) { + const browser = { + name: 'MZ Browser', + }; + const version = Utils.getFirstMatch(/(?:MZBrowser)[\s/](\d+(?:\.\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/focus/i], + describe(ua) { + const browser = { + name: 'Focus', + }; + const version = Utils.getFirstMatch(/(?:focus)[\s/](\d+(?:\.\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/swing/i], + describe(ua) { + const browser = { + name: 'Swing', + }; + const version = Utils.getFirstMatch(/(?:swing)[\s/](\d+(?:\.\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/coast/i], + describe(ua) { + const browser = { + name: 'Opera Coast', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:coast)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/opt\/\d+(?:.?_?\d+)+/i], + describe(ua) { + const browser = { + name: 'Opera Touch', + }; + const version = Utils.getFirstMatch(/(?:opt)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/yabrowser/i], + describe(ua) { + const browser = { + name: 'Yandex Browser', + }; + const version = Utils.getFirstMatch(/(?:yabrowser)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/ucbrowser/i], + describe(ua) { + const browser = { + name: 'UC Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:ucbrowser)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/Maxthon|mxios/i], + describe(ua) { + const browser = { + name: 'Maxthon', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:Maxthon|mxios)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/epiphany/i], + describe(ua) { + const browser = { + name: 'Epiphany', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:epiphany)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/puffin/i], + describe(ua) { + const browser = { + name: 'Puffin', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:puffin)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/sleipnir/i], + describe(ua) { + const browser = { + name: 'Sleipnir', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:sleipnir)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/k-meleon/i], + describe(ua) { + const browser = { + name: 'K-Meleon', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:k-meleon)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/micromessenger/i], + describe(ua) { + const browser = { + name: 'WeChat', + }; + const version = Utils.getFirstMatch(/(?:micromessenger)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/qqbrowser/i], + describe(ua) { + const browser = { + name: (/qqbrowserlite/i).test(ua) ? 'QQ Browser Lite' : 'QQ Browser', + }; + const version = Utils.getFirstMatch(/(?:qqbrowserlite|qqbrowser)[/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/msie|trident/i], + describe(ua) { + const browser = { + name: 'Internet Explorer', + }; + const version = Utils.getFirstMatch(/(?:msie |rv:)(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/\sedg\//i], + describe(ua) { + const browser = { + name: 'Microsoft Edge', + }; + + const version = Utils.getFirstMatch(/\sedg\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/edg([ea]|ios)/i], + describe(ua) { + const browser = { + name: 'Microsoft Edge', + }; + + const version = Utils.getSecondMatch(/edg([ea]|ios)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/vivaldi/i], + describe(ua) { + const browser = { + name: 'Vivaldi', + }; + const version = Utils.getFirstMatch(/vivaldi\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/seamonkey/i], + describe(ua) { + const browser = { + name: 'SeaMonkey', + }; + const version = Utils.getFirstMatch(/seamonkey\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/sailfish/i], + describe(ua) { + const browser = { + name: 'Sailfish', + }; + + const version = Utils.getFirstMatch(/sailfish\s?browser\/(\d+(\.\d+)?)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/silk/i], + describe(ua) { + const browser = { + name: 'Amazon Silk', + }; + const version = Utils.getFirstMatch(/silk\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/phantom/i], + describe(ua) { + const browser = { + name: 'PhantomJS', + }; + const version = Utils.getFirstMatch(/phantomjs\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/slimerjs/i], + describe(ua) { + const browser = { + name: 'SlimerJS', + }; + const version = Utils.getFirstMatch(/slimerjs\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/blackberry|\bbb\d+/i, /rim\stablet/i], + describe(ua) { + const browser = { + name: 'BlackBerry', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/blackberry[\d]+\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/(web|hpw)[o0]s/i], + describe(ua) { + const browser = { + name: 'WebOS Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/w(?:eb)?[o0]sbrowser\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/bada/i], + describe(ua) { + const browser = { + name: 'Bada', + }; + const version = Utils.getFirstMatch(/dolfin\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/tizen/i], + describe(ua) { + const browser = { + name: 'Tizen', + }; + const version = Utils.getFirstMatch(/(?:tizen\s?)?browser\/(\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/qupzilla/i], + describe(ua) { + const browser = { + name: 'QupZilla', + }; + const version = Utils.getFirstMatch(/(?:qupzilla)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/firefox|iceweasel|fxios/i], + describe(ua) { + const browser = { + name: 'Firefox', + }; + const version = Utils.getFirstMatch(/(?:firefox|iceweasel|fxios)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/electron/i], + describe(ua) { + const browser = { + name: 'Electron', + }; + const version = Utils.getFirstMatch(/(?:electron)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/MiuiBrowser/i], + describe(ua) { + const browser = { + name: 'Miui', + }; + const version = Utils.getFirstMatch(/(?:MiuiBrowser)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/chromium/i], + describe(ua) { + const browser = { + name: 'Chromium', + }; + const version = Utils.getFirstMatch(/(?:chromium)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/chrome|crios|crmo/i], + describe(ua) { + const browser = { + name: 'Chrome', + }; + const version = Utils.getFirstMatch(/(?:chrome|crios|crmo)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/GSA/i], + describe(ua) { + const browser = { + name: 'Google Search', + }; + const version = Utils.getFirstMatch(/(?:GSA)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Android Browser */ + { + test(parser) { + const notLikeAndroid = !parser.test(/like android/i); + const butAndroid = parser.test(/android/i); + return notLikeAndroid && butAndroid; + }, + describe(ua) { + const browser = { + name: 'Android Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* PlayStation 4 */ + { + test: [/playstation 4/i], + describe(ua) { + const browser = { + name: 'PlayStation 4', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Safari */ + { + test: [/safari|applewebkit/i], + describe(ua) { + const browser = { + name: 'Safari', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Something else */ + { + test: [/.*/i], + describe(ua) { + /* Here we try to make sure that there are explicit details about the device + * in order to decide what regexp exactly we want to apply + * (as there is a specific decision based on that conclusion) + */ + const regexpWithoutDeviceSpec = /^(.*)\/(.*) /; + const regexpWithDeviceSpec = /^(.*)\/(.*)[ \t]\((.*)/; + const hasDeviceSpec = ua.search('\\(') !== -1; + const regexp = hasDeviceSpec ? regexpWithDeviceSpec : regexpWithoutDeviceSpec; + return { + name: Utils.getFirstMatch(regexp, ua), + version: Utils.getSecondMatch(regexp, ua), + }; + }, + }, +]; + +export default browsersList; diff --git a/node_modules/bowser/src/parser-engines.js b/node_modules/bowser/src/parser-engines.js new file mode 100644 index 00000000..d46d0e51 --- /dev/null +++ b/node_modules/bowser/src/parser-engines.js @@ -0,0 +1,120 @@ +import Utils from './utils.js'; +import { ENGINE_MAP } from './constants.js'; + +/* + * More specific goes first + */ +export default [ + /* EdgeHTML */ + { + test(parser) { + return parser.getBrowserName(true) === 'microsoft edge'; + }, + describe(ua) { + const isBlinkBased = /\sedg\//i.test(ua); + + // return blink if it's blink-based one + if (isBlinkBased) { + return { + name: ENGINE_MAP.Blink, + }; + } + + // otherwise match the version and return EdgeHTML + const version = Utils.getFirstMatch(/edge\/(\d+(\.?_?\d+)+)/i, ua); + + return { + name: ENGINE_MAP.EdgeHTML, + version, + }; + }, + }, + + /* Trident */ + { + test: [/trident/i], + describe(ua) { + const engine = { + name: ENGINE_MAP.Trident, + }; + + const version = Utils.getFirstMatch(/trident\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, + + /* Presto */ + { + test(parser) { + return parser.test(/presto/i); + }, + describe(ua) { + const engine = { + name: ENGINE_MAP.Presto, + }; + + const version = Utils.getFirstMatch(/presto\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, + + /* Gecko */ + { + test(parser) { + const isGecko = parser.test(/gecko/i); + const likeGecko = parser.test(/like gecko/i); + return isGecko && !likeGecko; + }, + describe(ua) { + const engine = { + name: ENGINE_MAP.Gecko, + }; + + const version = Utils.getFirstMatch(/gecko\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, + + /* Blink */ + { + test: [/(apple)?webkit\/537\.36/i], + describe() { + return { + name: ENGINE_MAP.Blink, + }; + }, + }, + + /* WebKit */ + { + test: [/(apple)?webkit/i], + describe(ua) { + const engine = { + name: ENGINE_MAP.WebKit, + }; + + const version = Utils.getFirstMatch(/webkit\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, +]; diff --git a/node_modules/bowser/src/parser-os.js b/node_modules/bowser/src/parser-os.js new file mode 100644 index 00000000..4c516dd6 --- /dev/null +++ b/node_modules/bowser/src/parser-os.js @@ -0,0 +1,199 @@ +import Utils from './utils.js'; +import { OS_MAP } from './constants.js'; + +export default [ + /* Roku */ + { + test: [/Roku\/DVP/], + describe(ua) { + const version = Utils.getFirstMatch(/Roku\/DVP-(\d+\.\d+)/i, ua); + return { + name: OS_MAP.Roku, + version, + }; + }, + }, + + /* Windows Phone */ + { + test: [/windows phone/i], + describe(ua) { + const version = Utils.getFirstMatch(/windows phone (?:os)?\s?(\d+(\.\d+)*)/i, ua); + return { + name: OS_MAP.WindowsPhone, + version, + }; + }, + }, + + /* Windows */ + { + test: [/windows /i], + describe(ua) { + const version = Utils.getFirstMatch(/Windows ((NT|XP)( \d\d?.\d)?)/i, ua); + const versionName = Utils.getWindowsVersionName(version); + + return { + name: OS_MAP.Windows, + version, + versionName, + }; + }, + }, + + /* Firefox on iPad */ + { + test: [/Macintosh(.*?) FxiOS(.*?)\//], + describe(ua) { + const result = { + name: OS_MAP.iOS, + }; + const version = Utils.getSecondMatch(/(Version\/)(\d[\d.]+)/, ua); + if (version) { + result.version = version; + } + return result; + }, + }, + + /* macOS */ + { + test: [/macintosh/i], + describe(ua) { + const version = Utils.getFirstMatch(/mac os x (\d+(\.?_?\d+)+)/i, ua).replace(/[_\s]/g, '.'); + const versionName = Utils.getMacOSVersionName(version); + + const os = { + name: OS_MAP.MacOS, + version, + }; + if (versionName) { + os.versionName = versionName; + } + return os; + }, + }, + + /* iOS */ + { + test: [/(ipod|iphone|ipad)/i], + describe(ua) { + const version = Utils.getFirstMatch(/os (\d+([_\s]\d+)*) like mac os x/i, ua).replace(/[_\s]/g, '.'); + + return { + name: OS_MAP.iOS, + version, + }; + }, + }, + + /* Android */ + { + test(parser) { + const notLikeAndroid = !parser.test(/like android/i); + const butAndroid = parser.test(/android/i); + return notLikeAndroid && butAndroid; + }, + describe(ua) { + const version = Utils.getFirstMatch(/android[\s/-](\d+(\.\d+)*)/i, ua); + const versionName = Utils.getAndroidVersionName(version); + const os = { + name: OS_MAP.Android, + version, + }; + if (versionName) { + os.versionName = versionName; + } + return os; + }, + }, + + /* WebOS */ + { + test: [/(web|hpw)[o0]s/i], + describe(ua) { + const version = Utils.getFirstMatch(/(?:web|hpw)[o0]s\/(\d+(\.\d+)*)/i, ua); + const os = { + name: OS_MAP.WebOS, + }; + + if (version && version.length) { + os.version = version; + } + return os; + }, + }, + + /* BlackBerry */ + { + test: [/blackberry|\bbb\d+/i, /rim\stablet/i], + describe(ua) { + const version = Utils.getFirstMatch(/rim\stablet\sos\s(\d+(\.\d+)*)/i, ua) + || Utils.getFirstMatch(/blackberry\d+\/(\d+([_\s]\d+)*)/i, ua) + || Utils.getFirstMatch(/\bbb(\d+)/i, ua); + + return { + name: OS_MAP.BlackBerry, + version, + }; + }, + }, + + /* Bada */ + { + test: [/bada/i], + describe(ua) { + const version = Utils.getFirstMatch(/bada\/(\d+(\.\d+)*)/i, ua); + + return { + name: OS_MAP.Bada, + version, + }; + }, + }, + + /* Tizen */ + { + test: [/tizen/i], + describe(ua) { + const version = Utils.getFirstMatch(/tizen[/\s](\d+(\.\d+)*)/i, ua); + + return { + name: OS_MAP.Tizen, + version, + }; + }, + }, + + /* Linux */ + { + test: [/linux/i], + describe() { + return { + name: OS_MAP.Linux, + }; + }, + }, + + /* Chrome OS */ + { + test: [/CrOS/], + describe() { + return { + name: OS_MAP.ChromeOS, + }; + }, + }, + + /* Playstation 4 */ + { + test: [/PlayStation 4/], + describe(ua) { + const version = Utils.getFirstMatch(/PlayStation 4[/\s](\d+(\.\d+)*)/i, ua); + return { + name: OS_MAP.PlayStation4, + version, + }; + }, + }, +]; diff --git a/node_modules/bowser/src/parser-platforms.js b/node_modules/bowser/src/parser-platforms.js new file mode 100644 index 00000000..48b1eb10 --- /dev/null +++ b/node_modules/bowser/src/parser-platforms.js @@ -0,0 +1,266 @@ +import Utils from './utils.js'; +import { PLATFORMS_MAP } from './constants.js'; + +/* + * Tablets go first since usually they have more specific + * signs to detect. + */ + +export default [ + /* Googlebot */ + { + test: [/googlebot/i], + describe() { + return { + type: 'bot', + vendor: 'Google', + }; + }, + }, + + /* Huawei */ + { + test: [/huawei/i], + describe(ua) { + const model = Utils.getFirstMatch(/(can-l01)/i, ua) && 'Nova'; + const platform = { + type: PLATFORMS_MAP.mobile, + vendor: 'Huawei', + }; + if (model) { + platform.model = model; + } + return platform; + }, + }, + + /* Nexus Tablet */ + { + test: [/nexus\s*(?:7|8|9|10).*/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Nexus', + }; + }, + }, + + /* iPad */ + { + test: [/ipad/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Apple', + model: 'iPad', + }; + }, + }, + + /* Firefox on iPad */ + { + test: [/Macintosh(.*?) FxiOS(.*?)\//], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Apple', + model: 'iPad', + }; + }, + }, + + /* Amazon Kindle Fire */ + { + test: [/kftt build/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Amazon', + model: 'Kindle Fire HD 7', + }; + }, + }, + + /* Another Amazon Tablet with Silk */ + { + test: [/silk/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Amazon', + }; + }, + }, + + /* Tablet */ + { + test: [/tablet(?! pc)/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + }; + }, + }, + + /* iPod/iPhone */ + { + test(parser) { + const iDevice = parser.test(/ipod|iphone/i); + const likeIDevice = parser.test(/like (ipod|iphone)/i); + return iDevice && !likeIDevice; + }, + describe(ua) { + const model = Utils.getFirstMatch(/(ipod|iphone)/i, ua); + return { + type: PLATFORMS_MAP.mobile, + vendor: 'Apple', + model, + }; + }, + }, + + /* Nexus Mobile */ + { + test: [/nexus\s*[0-6].*/i, /galaxy nexus/i], + describe() { + return { + type: PLATFORMS_MAP.mobile, + vendor: 'Nexus', + }; + }, + }, + + /* Mobile */ + { + test: [/[^-]mobi/i], + describe() { + return { + type: PLATFORMS_MAP.mobile, + }; + }, + }, + + /* BlackBerry */ + { + test(parser) { + return parser.getBrowserName(true) === 'blackberry'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + vendor: 'BlackBerry', + }; + }, + }, + + /* Bada */ + { + test(parser) { + return parser.getBrowserName(true) === 'bada'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + }; + }, + }, + + /* Windows Phone */ + { + test(parser) { + return parser.getBrowserName() === 'windows phone'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + vendor: 'Microsoft', + }; + }, + }, + + /* Android Tablet */ + { + test(parser) { + const osMajorVersion = Number(String(parser.getOSVersion()).split('.')[0]); + return parser.getOSName(true) === 'android' && (osMajorVersion >= 3); + }, + describe() { + return { + type: PLATFORMS_MAP.tablet, + }; + }, + }, + + /* Android Mobile */ + { + test(parser) { + return parser.getOSName(true) === 'android'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + }; + }, + }, + + /* desktop */ + { + test(parser) { + return parser.getOSName(true) === 'macos'; + }, + describe() { + return { + type: PLATFORMS_MAP.desktop, + vendor: 'Apple', + }; + }, + }, + + /* Windows */ + { + test(parser) { + return parser.getOSName(true) === 'windows'; + }, + describe() { + return { + type: PLATFORMS_MAP.desktop, + }; + }, + }, + + /* Linux */ + { + test(parser) { + return parser.getOSName(true) === 'linux'; + }, + describe() { + return { + type: PLATFORMS_MAP.desktop, + }; + }, + }, + + /* PlayStation 4 */ + { + test(parser) { + return parser.getOSName(true) === 'playstation 4'; + }, + describe() { + return { + type: PLATFORMS_MAP.tv, + }; + }, + }, + + /* Roku */ + { + test(parser) { + return parser.getOSName(true) === 'roku'; + }, + describe() { + return { + type: PLATFORMS_MAP.tv, + }; + }, + }, +]; diff --git a/node_modules/bowser/src/parser.js b/node_modules/bowser/src/parser.js new file mode 100644 index 00000000..2f9f39f2 --- /dev/null +++ b/node_modules/bowser/src/parser.js @@ -0,0 +1,496 @@ +import browserParsersList from './parser-browsers.js'; +import osParsersList from './parser-os.js'; +import platformParsersList from './parser-platforms.js'; +import enginesParsersList from './parser-engines.js'; +import Utils from './utils.js'; + +/** + * The main class that arranges the whole parsing process. + */ +class Parser { + /** + * Create instance of Parser + * + * @param {String} UA User-Agent string + * @param {Boolean} [skipParsing=false] parser can skip parsing in purpose of performance + * improvements if you need to make a more particular parsing + * like {@link Parser#parseBrowser} or {@link Parser#parsePlatform} + * + * @throw {Error} in case of empty UA String + * + * @constructor + */ + constructor(UA, skipParsing = false) { + if (UA === void (0) || UA === null || UA === '') { + throw new Error("UserAgent parameter can't be empty"); + } + + this._ua = UA; + + /** + * @typedef ParsedResult + * @property {Object} browser + * @property {String|undefined} [browser.name] + * Browser name, like `"Chrome"` or `"Internet Explorer"` + * @property {String|undefined} [browser.version] Browser version as a String `"12.01.45334.10"` + * @property {Object} os + * @property {String|undefined} [os.name] OS name, like `"Windows"` or `"macOS"` + * @property {String|undefined} [os.version] OS version, like `"NT 5.1"` or `"10.11.1"` + * @property {String|undefined} [os.versionName] OS name, like `"XP"` or `"High Sierra"` + * @property {Object} platform + * @property {String|undefined} [platform.type] + * platform type, can be either `"desktop"`, `"tablet"` or `"mobile"` + * @property {String|undefined} [platform.vendor] Vendor of the device, + * like `"Apple"` or `"Samsung"` + * @property {String|undefined} [platform.model] Device model, + * like `"iPhone"` or `"Kindle Fire HD 7"` + * @property {Object} engine + * @property {String|undefined} [engine.name] + * Can be any of this: `WebKit`, `Blink`, `Gecko`, `Trident`, `Presto`, `EdgeHTML` + * @property {String|undefined} [engine.version] String version of the engine + */ + this.parsedResult = {}; + + if (skipParsing !== true) { + this.parse(); + } + } + + /** + * Get UserAgent string of current Parser instance + * @return {String} User-Agent String of the current object + * + * @public + */ + getUA() { + return this._ua; + } + + /** + * Test a UA string for a regexp + * @param {RegExp} regex + * @return {Boolean} + */ + test(regex) { + return regex.test(this._ua); + } + + /** + * Get parsed browser object + * @return {Object} + */ + parseBrowser() { + this.parsedResult.browser = {}; + + const browserDescriptor = Utils.find(browserParsersList, (_browser) => { + if (typeof _browser.test === 'function') { + return _browser.test(this); + } + + if (_browser.test instanceof Array) { + return _browser.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (browserDescriptor) { + this.parsedResult.browser = browserDescriptor.describe(this.getUA()); + } + + return this.parsedResult.browser; + } + + /** + * Get parsed browser object + * @return {Object} + * + * @public + */ + getBrowser() { + if (this.parsedResult.browser) { + return this.parsedResult.browser; + } + + return this.parseBrowser(); + } + + /** + * Get browser's name + * @return {String} Browser's name or an empty string + * + * @public + */ + getBrowserName(toLowerCase) { + if (toLowerCase) { + return String(this.getBrowser().name).toLowerCase() || ''; + } + return this.getBrowser().name || ''; + } + + + /** + * Get browser's version + * @return {String} version of browser + * + * @public + */ + getBrowserVersion() { + return this.getBrowser().version; + } + + /** + * Get OS + * @return {Object} + * + * @example + * this.getOS(); + * { + * name: 'macOS', + * version: '10.11.12' + * } + */ + getOS() { + if (this.parsedResult.os) { + return this.parsedResult.os; + } + + return this.parseOS(); + } + + /** + * Parse OS and save it to this.parsedResult.os + * @return {*|{}} + */ + parseOS() { + this.parsedResult.os = {}; + + const os = Utils.find(osParsersList, (_os) => { + if (typeof _os.test === 'function') { + return _os.test(this); + } + + if (_os.test instanceof Array) { + return _os.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (os) { + this.parsedResult.os = os.describe(this.getUA()); + } + + return this.parsedResult.os; + } + + /** + * Get OS name + * @param {Boolean} [toLowerCase] return lower-cased value + * @return {String} name of the OS — macOS, Windows, Linux, etc. + */ + getOSName(toLowerCase) { + const { name } = this.getOS(); + + if (toLowerCase) { + return String(name).toLowerCase() || ''; + } + + return name || ''; + } + + /** + * Get OS version + * @return {String} full version with dots ('10.11.12', '5.6', etc) + */ + getOSVersion() { + return this.getOS().version; + } + + /** + * Get parsed platform + * @return {{}} + */ + getPlatform() { + if (this.parsedResult.platform) { + return this.parsedResult.platform; + } + + return this.parsePlatform(); + } + + /** + * Get platform name + * @param {Boolean} [toLowerCase=false] + * @return {*} + */ + getPlatformType(toLowerCase = false) { + const { type } = this.getPlatform(); + + if (toLowerCase) { + return String(type).toLowerCase() || ''; + } + + return type || ''; + } + + /** + * Get parsed platform + * @return {{}} + */ + parsePlatform() { + this.parsedResult.platform = {}; + + const platform = Utils.find(platformParsersList, (_platform) => { + if (typeof _platform.test === 'function') { + return _platform.test(this); + } + + if (_platform.test instanceof Array) { + return _platform.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (platform) { + this.parsedResult.platform = platform.describe(this.getUA()); + } + + return this.parsedResult.platform; + } + + /** + * Get parsed engine + * @return {{}} + */ + getEngine() { + if (this.parsedResult.engine) { + return this.parsedResult.engine; + } + + return this.parseEngine(); + } + + /** + * Get engines's name + * @return {String} Engines's name or an empty string + * + * @public + */ + getEngineName(toLowerCase) { + if (toLowerCase) { + return String(this.getEngine().name).toLowerCase() || ''; + } + return this.getEngine().name || ''; + } + + /** + * Get parsed platform + * @return {{}} + */ + parseEngine() { + this.parsedResult.engine = {}; + + const engine = Utils.find(enginesParsersList, (_engine) => { + if (typeof _engine.test === 'function') { + return _engine.test(this); + } + + if (_engine.test instanceof Array) { + return _engine.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (engine) { + this.parsedResult.engine = engine.describe(this.getUA()); + } + + return this.parsedResult.engine; + } + + /** + * Parse full information about the browser + * @returns {Parser} + */ + parse() { + this.parseBrowser(); + this.parseOS(); + this.parsePlatform(); + this.parseEngine(); + + return this; + } + + /** + * Get parsed result + * @return {ParsedResult} + */ + getResult() { + return Utils.assign({}, this.parsedResult); + } + + /** + * Check if parsed browser matches certain conditions + * + * @param {Object} checkTree It's one or two layered object, + * which can include a platform or an OS on the first layer + * and should have browsers specs on the bottom-laying layer + * + * @returns {Boolean|undefined} Whether the browser satisfies the set conditions or not. + * Returns `undefined` when the browser is no described in the checkTree object. + * + * @example + * const browser = Bowser.getParser(window.navigator.userAgent); + * if (browser.satisfies({chrome: '>118.01.1322' })) + * // or with os + * if (browser.satisfies({windows: { chrome: '>118.01.1322' } })) + * // or with platforms + * if (browser.satisfies({desktop: { chrome: '>118.01.1322' } })) + */ + satisfies(checkTree) { + const platformsAndOSes = {}; + let platformsAndOSCounter = 0; + const browsers = {}; + let browsersCounter = 0; + + const allDefinitions = Object.keys(checkTree); + + allDefinitions.forEach((key) => { + const currentDefinition = checkTree[key]; + if (typeof currentDefinition === 'string') { + browsers[key] = currentDefinition; + browsersCounter += 1; + } else if (typeof currentDefinition === 'object') { + platformsAndOSes[key] = currentDefinition; + platformsAndOSCounter += 1; + } + }); + + if (platformsAndOSCounter > 0) { + const platformsAndOSNames = Object.keys(platformsAndOSes); + const OSMatchingDefinition = Utils.find(platformsAndOSNames, name => (this.isOS(name))); + + if (OSMatchingDefinition) { + const osResult = this.satisfies(platformsAndOSes[OSMatchingDefinition]); + + if (osResult !== void 0) { + return osResult; + } + } + + const platformMatchingDefinition = Utils.find( + platformsAndOSNames, + name => (this.isPlatform(name)), + ); + if (platformMatchingDefinition) { + const platformResult = this.satisfies(platformsAndOSes[platformMatchingDefinition]); + + if (platformResult !== void 0) { + return platformResult; + } + } + } + + if (browsersCounter > 0) { + const browserNames = Object.keys(browsers); + const matchingDefinition = Utils.find(browserNames, name => (this.isBrowser(name, true))); + + if (matchingDefinition !== void 0) { + return this.compareVersion(browsers[matchingDefinition]); + } + } + + return undefined; + } + + /** + * Check if the browser name equals the passed string + * @param browserName The string to compare with the browser name + * @param [includingAlias=false] The flag showing whether alias will be included into comparison + * @returns {boolean} + */ + isBrowser(browserName, includingAlias = false) { + const defaultBrowserName = this.getBrowserName().toLowerCase(); + let browserNameLower = browserName.toLowerCase(); + const alias = Utils.getBrowserTypeByAlias(browserNameLower); + + if (includingAlias && alias) { + browserNameLower = alias.toLowerCase(); + } + return browserNameLower === defaultBrowserName; + } + + compareVersion(version) { + let expectedResults = [0]; + let comparableVersion = version; + let isLoose = false; + + const currentBrowserVersion = this.getBrowserVersion(); + + if (typeof currentBrowserVersion !== 'string') { + return void 0; + } + + if (version[0] === '>' || version[0] === '<') { + comparableVersion = version.substr(1); + if (version[1] === '=') { + isLoose = true; + comparableVersion = version.substr(2); + } else { + expectedResults = []; + } + if (version[0] === '>') { + expectedResults.push(1); + } else { + expectedResults.push(-1); + } + } else if (version[0] === '=') { + comparableVersion = version.substr(1); + } else if (version[0] === '~') { + isLoose = true; + comparableVersion = version.substr(1); + } + + return expectedResults.indexOf( + Utils.compareVersions(currentBrowserVersion, comparableVersion, isLoose), + ) > -1; + } + + isOS(osName) { + return this.getOSName(true) === String(osName).toLowerCase(); + } + + isPlatform(platformType) { + return this.getPlatformType(true) === String(platformType).toLowerCase(); + } + + isEngine(engineName) { + return this.getEngineName(true) === String(engineName).toLowerCase(); + } + + /** + * Is anything? Check if the browser is called "anything", + * the OS called "anything" or the platform called "anything" + * @param {String} anything + * @param [includingAlias=false] The flag showing whether alias will be included into comparison + * @returns {Boolean} + */ + is(anything, includingAlias = false) { + return this.isBrowser(anything, includingAlias) || this.isOS(anything) + || this.isPlatform(anything); + } + + /** + * Check if any of the given values satisfies this.is(anything) + * @param {String[]} anythings + * @returns {Boolean} + */ + some(anythings = []) { + return anythings.some(anything => this.is(anything)); + } +} + +export default Parser; diff --git a/node_modules/bowser/src/utils.js b/node_modules/bowser/src/utils.js new file mode 100644 index 00000000..d1174bf0 --- /dev/null +++ b/node_modules/bowser/src/utils.js @@ -0,0 +1,309 @@ +import { BROWSER_MAP, BROWSER_ALIASES_MAP } from './constants.js'; + +export default class Utils { + /** + * Get first matched item for a string + * @param {RegExp} regexp + * @param {String} ua + * @return {Array|{index: number, input: string}|*|boolean|string} + */ + static getFirstMatch(regexp, ua) { + const match = ua.match(regexp); + return (match && match.length > 0 && match[1]) || ''; + } + + /** + * Get second matched item for a string + * @param regexp + * @param {String} ua + * @return {Array|{index: number, input: string}|*|boolean|string} + */ + static getSecondMatch(regexp, ua) { + const match = ua.match(regexp); + return (match && match.length > 1 && match[2]) || ''; + } + + /** + * Match a regexp and return a constant or undefined + * @param {RegExp} regexp + * @param {String} ua + * @param {*} _const Any const that will be returned if regexp matches the string + * @return {*} + */ + static matchAndReturnConst(regexp, ua, _const) { + if (regexp.test(ua)) { + return _const; + } + return void (0); + } + + static getWindowsVersionName(version) { + switch (version) { + case 'NT': return 'NT'; + case 'XP': return 'XP'; + case 'NT 5.0': return '2000'; + case 'NT 5.1': return 'XP'; + case 'NT 5.2': return '2003'; + case 'NT 6.0': return 'Vista'; + case 'NT 6.1': return '7'; + case 'NT 6.2': return '8'; + case 'NT 6.3': return '8.1'; + case 'NT 10.0': return '10'; + default: return undefined; + } + } + + /** + * Get macOS version name + * 10.5 - Leopard + * 10.6 - Snow Leopard + * 10.7 - Lion + * 10.8 - Mountain Lion + * 10.9 - Mavericks + * 10.10 - Yosemite + * 10.11 - El Capitan + * 10.12 - Sierra + * 10.13 - High Sierra + * 10.14 - Mojave + * 10.15 - Catalina + * + * @example + * getMacOSVersionName("10.14") // 'Mojave' + * + * @param {string} version + * @return {string} versionName + */ + static getMacOSVersionName(version) { + const v = version.split('.').splice(0, 2).map(s => parseInt(s, 10) || 0); + v.push(0); + if (v[0] !== 10) return undefined; + switch (v[1]) { + case 5: return 'Leopard'; + case 6: return 'Snow Leopard'; + case 7: return 'Lion'; + case 8: return 'Mountain Lion'; + case 9: return 'Mavericks'; + case 10: return 'Yosemite'; + case 11: return 'El Capitan'; + case 12: return 'Sierra'; + case 13: return 'High Sierra'; + case 14: return 'Mojave'; + case 15: return 'Catalina'; + default: return undefined; + } + } + + /** + * Get Android version name + * 1.5 - Cupcake + * 1.6 - Donut + * 2.0 - Eclair + * 2.1 - Eclair + * 2.2 - Froyo + * 2.x - Gingerbread + * 3.x - Honeycomb + * 4.0 - Ice Cream Sandwich + * 4.1 - Jelly Bean + * 4.4 - KitKat + * 5.x - Lollipop + * 6.x - Marshmallow + * 7.x - Nougat + * 8.x - Oreo + * 9.x - Pie + * + * @example + * getAndroidVersionName("7.0") // 'Nougat' + * + * @param {string} version + * @return {string} versionName + */ + static getAndroidVersionName(version) { + const v = version.split('.').splice(0, 2).map(s => parseInt(s, 10) || 0); + v.push(0); + if (v[0] === 1 && v[1] < 5) return undefined; + if (v[0] === 1 && v[1] < 6) return 'Cupcake'; + if (v[0] === 1 && v[1] >= 6) return 'Donut'; + if (v[0] === 2 && v[1] < 2) return 'Eclair'; + if (v[0] === 2 && v[1] === 2) return 'Froyo'; + if (v[0] === 2 && v[1] > 2) return 'Gingerbread'; + if (v[0] === 3) return 'Honeycomb'; + if (v[0] === 4 && v[1] < 1) return 'Ice Cream Sandwich'; + if (v[0] === 4 && v[1] < 4) return 'Jelly Bean'; + if (v[0] === 4 && v[1] >= 4) return 'KitKat'; + if (v[0] === 5) return 'Lollipop'; + if (v[0] === 6) return 'Marshmallow'; + if (v[0] === 7) return 'Nougat'; + if (v[0] === 8) return 'Oreo'; + if (v[0] === 9) return 'Pie'; + return undefined; + } + + /** + * Get version precisions count + * + * @example + * getVersionPrecision("1.10.3") // 3 + * + * @param {string} version + * @return {number} + */ + static getVersionPrecision(version) { + return version.split('.').length; + } + + /** + * Calculate browser version weight + * + * @example + * compareVersions('1.10.2.1', '1.8.2.1.90') // 1 + * compareVersions('1.010.2.1', '1.09.2.1.90'); // 1 + * compareVersions('1.10.2.1', '1.10.2.1'); // 0 + * compareVersions('1.10.2.1', '1.0800.2'); // -1 + * compareVersions('1.10.2.1', '1.10', true); // 0 + * + * @param {String} versionA versions versions to compare + * @param {String} versionB versions versions to compare + * @param {boolean} [isLoose] enable loose comparison + * @return {Number} comparison result: -1 when versionA is lower, + * 1 when versionA is bigger, 0 when both equal + */ + /* eslint consistent-return: 1 */ + static compareVersions(versionA, versionB, isLoose = false) { + // 1) get common precision for both versions, for example for "10.0" and "9" it should be 2 + const versionAPrecision = Utils.getVersionPrecision(versionA); + const versionBPrecision = Utils.getVersionPrecision(versionB); + + let precision = Math.max(versionAPrecision, versionBPrecision); + let lastPrecision = 0; + + const chunks = Utils.map([versionA, versionB], (version) => { + const delta = precision - Utils.getVersionPrecision(version); + + // 2) "9" -> "9.0" (for precision = 2) + const _version = version + new Array(delta + 1).join('.0'); + + // 3) "9.0" -> ["000000000"", "000000009"] + return Utils.map(_version.split('.'), chunk => new Array(20 - chunk.length).join('0') + chunk).reverse(); + }); + + // adjust precision for loose comparison + if (isLoose) { + lastPrecision = precision - Math.min(versionAPrecision, versionBPrecision); + } + + // iterate in reverse order by reversed chunks array + precision -= 1; + while (precision >= lastPrecision) { + // 4) compare: "000000009" > "000000010" = false (but "9" > "10" = true) + if (chunks[0][precision] > chunks[1][precision]) { + return 1; + } + + if (chunks[0][precision] === chunks[1][precision]) { + if (precision === lastPrecision) { + // all version chunks are same + return 0; + } + + precision -= 1; + } else if (chunks[0][precision] < chunks[1][precision]) { + return -1; + } + } + + return undefined; + } + + /** + * Array::map polyfill + * + * @param {Array} arr + * @param {Function} iterator + * @return {Array} + */ + static map(arr, iterator) { + const result = []; + let i; + if (Array.prototype.map) { + return Array.prototype.map.call(arr, iterator); + } + for (i = 0; i < arr.length; i += 1) { + result.push(iterator(arr[i])); + } + return result; + } + + /** + * Array::find polyfill + * + * @param {Array} arr + * @param {Function} predicate + * @return {Array} + */ + static find(arr, predicate) { + let i; + let l; + if (Array.prototype.find) { + return Array.prototype.find.call(arr, predicate); + } + for (i = 0, l = arr.length; i < l; i += 1) { + const value = arr[i]; + if (predicate(value, i)) { + return value; + } + } + return undefined; + } + + /** + * Object::assign polyfill + * + * @param {Object} obj + * @param {Object} ...objs + * @return {Object} + */ + static assign(obj, ...assigners) { + const result = obj; + let i; + let l; + if (Object.assign) { + return Object.assign(obj, ...assigners); + } + for (i = 0, l = assigners.length; i < l; i += 1) { + const assigner = assigners[i]; + if (typeof assigner === 'object' && assigner !== null) { + const keys = Object.keys(assigner); + keys.forEach((key) => { + result[key] = assigner[key]; + }); + } + } + return obj; + } + + /** + * Get short version/alias for a browser name + * + * @example + * getBrowserAlias('Microsoft Edge') // edge + * + * @param {string} browserName + * @return {string} + */ + static getBrowserAlias(browserName) { + return BROWSER_ALIASES_MAP[browserName]; + } + + /** + * Get short version/alias for a browser name + * + * @example + * getBrowserAlias('edge') // Microsoft Edge + * + * @param {string} browserAlias + * @return {string} + */ + static getBrowserTypeByAlias(browserAlias) { + return BROWSER_MAP[browserAlias] || ''; + } +} diff --git a/package-lock.json b/package-lock.json index 612ac2fa..c79bcc75 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8,6 +8,8 @@ "name": "neah", "version": "0.1.0", "dependencies": { + "@aws-sdk/client-s3": "^3.802.0", + "@aws-sdk/s3-request-presigner": "^3.802.0", "@fullcalendar/react": "^6.1.15", "@hookform/resolvers": "^3.9.1", "@keycloak/keycloak-admin-client": "^26.2.2", @@ -137,6 +139,912 @@ "lru-cache": "^10.4.3" } }, + "node_modules/@aws-crypto/crc32": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/crc32/-/crc32-5.2.0.tgz", + "integrity": "sha512-nLbCWqQNgUiwwtFsen1AdzAtvuLRsQS8rYgMuxCrdKf9kOssamGLuPwyTY9wyYblNr9+1XM8v6zoDTPPSIeANg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-crypto/crc32c": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/crc32c/-/crc32c-5.2.0.tgz", + "integrity": "sha512-+iWb8qaHLYKrNvGRbiYRHSdKRWhto5XlZUEBwDjYNf+ly5SVYG6zEoYIdxvf5R3zyeP16w4PLBn3rH1xc74Rag==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/sha1-browser": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha1-browser/-/sha1-browser-5.2.0.tgz", + "integrity": "sha512-OH6lveCFfcDjX4dbAvCFSYUjJZjDr/3XJ3xHtjn3Oj5b9RjojQo8npoLeA/bNwkOkrSQ0wgrHzXk4tDRxGKJeg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/supports-web-crypto": "^5.2.0", + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha1-browser/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz", + "integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-js": "^5.2.0", + "@aws-crypto/supports-web-crypto": "^5.2.0", + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz", + "integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-crypto/supports-web-crypto": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz", + "integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz", + "integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.222.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-s3": { + "version": "3.802.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.802.0.tgz", + "integrity": "sha512-YIwLLiqRZArEmRI94X8MOpWuXlmxI3NnxYv+3kk6HIc2YWPaOAf0YN7vWlnQFWo6Yi1gBRtP0HM8WzK4Bn5ORQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha1-browser": "5.2.0", + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-node": "3.799.0", + "@aws-sdk/middleware-bucket-endpoint": "3.775.0", + "@aws-sdk/middleware-expect-continue": "3.775.0", + "@aws-sdk/middleware-flexible-checksums": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-location-constraint": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-sdk-s3": "3.799.0", + "@aws-sdk/middleware-ssec": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/signature-v4-multi-region": "3.800.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@aws-sdk/xml-builder": "3.775.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/eventstream-serde-browser": "^4.0.2", + "@smithy/eventstream-serde-config-resolver": "^4.1.0", + "@smithy/eventstream-serde-node": "^4.0.2", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-blob-browser": "^4.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/hash-stream-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/md5-js": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "@smithy/util-waiter": "^4.0.3", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-sso": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.799.0.tgz", + "integrity": "sha512-/i/LG7AiWPmPxKCA2jnR2zaf7B3HYSTbxaZI21ElIz9wASlNAsKr8CnLY7qb50kOyXiNfQ834S5Q3Gl8dX9o3Q==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.2", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/core": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.799.0.tgz", + "integrity": "sha512-hkKF3Zpc6+H8GI1rlttYVRh9uEE77cqAzLmLpY3iu7sql8cZgPERRBfaFct8p1SaDyrksLNiboD1vKW58mbsYg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/core": "^3.3.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "fast-xml-parser": "4.4.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/core/node_modules/fast-xml-parser": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.4.1.tgz", + "integrity": "sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + }, + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + } + ], + "license": "MIT", + "dependencies": { + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, + "node_modules/@aws-sdk/credential-provider-env": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.799.0.tgz", + "integrity": "sha512-vT/SSWtbUIOW/U21qgEySmmO44SFWIA7WeQPX1OrI8WJ5n7OEI23JWLHjLvHTkYmuZK6z1rPcv7HzRgmuGRibA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-http": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.799.0.tgz", + "integrity": "sha512-2CjBpOWmhaPAExOgHnIB5nOkS5ef+mfRlJ1JC4nsnjAx0nrK4tk0XRE0LYz11P3+ue+a86cU8WTmBo+qjnGxPQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.799.0.tgz", + "integrity": "sha512-M9ubILFxerqw4QJwk83MnjtZyoA2eNCiea5V+PzZeHlwk2PON/EnawKqy65x9/hMHGoSvvNuby7iMAmPptu7yw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.799.0", + "@aws-sdk/credential-provider-web-identity": "3.799.0", + "@aws-sdk/nested-clients": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-node": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.799.0.tgz", + "integrity": "sha512-nd9fSJc0wUlgKUkIr2ldJhcIIrzJFS29AGZoyY22J3xih63nNDv61eTGVMsDZzHlV21XzMlPEljTR7axiimckg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-ini": "3.799.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.799.0", + "@aws-sdk/credential-provider-web-identity": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-process": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.799.0.tgz", + "integrity": "sha512-g8jmNs2k98WNHMYcea1YKA+7ao2Ma4w0P42Dz4YpcI155pQHxHx25RwbOG+rsAKuo3bKwkW53HVE/ZTKhcWFgw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.799.0.tgz", + "integrity": "sha512-lQv27QkNU9FJFZqEf5DIEN3uXEN409Iaym9WJzhOouGtxvTIAWiD23OYh1u8PvBdrordJGS2YddfQvhcmq9akw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/client-sso": "3.799.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/token-providers": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-web-identity": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.799.0.tgz", + "integrity": "sha512-8k1i9ut+BEg0QZ+I6UQMxGNR1T8paLmAOAZXU+nLQR0lcxS6lr8v+dqofgzQPuHLBkWNCr1Av1IKeL3bJjgU7g==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/nested-clients": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-bucket-endpoint": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.775.0.tgz", + "integrity": "sha512-qogMIpVChDYr4xiUNC19/RDSw/sKoHkAhouS6Skxiy6s27HBhow1L3Z1qVYXuBmOZGSWPU0xiyZCvOyWrv9s+Q==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-arn-parser": "3.723.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-expect-continue": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.775.0.tgz", + "integrity": "sha512-Apd3owkIeUW5dnk3au9np2IdW2N0zc9NjTjHiH+Mx3zqwSrc+m+ANgJVgk9mnQjMzU/vb7VuxJ0eqdEbp5gYsg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-flexible-checksums": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.799.0.tgz", + "integrity": "sha512-vBIAdDl2neaFiUMxyr7dAtX7m9Iw5c0bz7OirD0JGW0nYn0mBcqKpFZEU75ewA5p2+Cm7RQDdt6099ne3gj0WA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/crc32": "5.2.0", + "@aws-crypto/crc32c": "5.2.0", + "@aws-crypto/util": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/is-array-buffer": "^4.0.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-host-header": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.775.0.tgz", + "integrity": "sha512-tkSegM0Z6WMXpLB8oPys/d+umYIocvO298mGvcMCncpRl77L9XkvSLJIFzaHes+o7djAgIduYw8wKIMStFss2w==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-location-constraint": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.775.0.tgz", + "integrity": "sha512-8TMXEHZXZTFTckQLyBT5aEI8fX11HZcwZseRifvBKKpj0RZDk4F0EEYGxeNSPpUQ7n+PRWyfAEnnZNRdAj/1NQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-logger": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.775.0.tgz", + "integrity": "sha512-FaxO1xom4MAoUJsldmR92nT1G6uZxTdNYOFYtdHfd6N2wcNaTuxgjIvqzg5y7QIH9kn58XX/dzf1iTjgqUStZw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-recursion-detection": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.775.0.tgz", + "integrity": "sha512-GLCzC8D0A0YDG5u3F5U03Vb9j5tcOEFhr8oc6PDk0k0vm5VwtZOE6LvK7hcCSoAB4HXyOUM0sQuXrbaAh9OwXA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-sdk-s3": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.799.0.tgz", + "integrity": "sha512-Zwdge5NArgcJwPuGZwgfXY6XXkWEBmMS9dqu5g3DcfHmZUuSjQUqmOsDdSZlE3RFHrDAEbuGQlrFUE8zuwdKQA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-arn-parser": "3.723.0", + "@smithy/core": "^3.3.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-ssec": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.775.0.tgz", + "integrity": "sha512-Iw1RHD8vfAWWPzBBIKaojO4GAvQkHOYIpKdAfis/EUSUmSa79QsnXnRqsdcE0mCB0Ylj23yi+ah4/0wh9FsekA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-user-agent": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.799.0.tgz", + "integrity": "sha512-TropQZanbOTxa+p+Nl4fWkzlRhgFwDfW+Wb6TR3jZN7IXHNlPpgGFpdrgvBExhW/RBhqr+94OsR8Ou58lp3hhA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@smithy/core": "^3.3.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/nested-clients": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.799.0.tgz", + "integrity": "sha512-zILlWh7asrcQG9JYMYgnvEQBfwmWKfED0yWCf3UNAmQcfS9wkCAWCgicNy/y5KvNvEYnHidsU117STtyuUNG5g==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.2", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/region-config-resolver": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.775.0.tgz", + "integrity": "sha512-40iH3LJjrQS3LKUJAl7Wj0bln7RFPEvUYKFxtP8a+oKFDO0F65F52xZxIJbPn6sHkxWDAnZlGgdjZXM3p2g5wQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/s3-request-presigner": { + "version": "3.802.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/s3-request-presigner/-/s3-request-presigner-3.802.0.tgz", + "integrity": "sha512-gXavOjkIIbd89ooAKziPWPnvwA3ZuJvpLMWr2xB/RyDZIgnjSwvRGFpRJ5EZPZjkhBFk0QQcEVp2FIrPI9wHlA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/signature-v4-multi-region": "3.800.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-format-url": "3.775.0", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/signature-v4-multi-region": { + "version": "3.800.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.800.0.tgz", + "integrity": "sha512-c71wZuiSUHNFCvcuqOv3jbqP+NquB2YKN4qX90OwYXEqUKn8F8fKJPpjjHjz1eK6qWKtECR4V/NTno2P70Yz/Q==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-sdk-s3": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/token-providers": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.799.0.tgz", + "integrity": "sha512-/8iDjnsJs/D8AhGbDAmdF5oSHzE4jsDsM2RIIxmBAKTZXkaaclQBNX9CmAqLKQmO3IUMZsDH2KENHLVAk/N/mw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/nested-clients": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/types": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.775.0.tgz", + "integrity": "sha512-ZoGKwa4C9fC9Av6bdfqcW6Ix5ot05F/S4VxWR2nHuMv7hzfmAjTOcUiWT7UR4hM/U0whf84VhDtXN/DWAk52KA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-arn-parser": { + "version": "3.723.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.723.0.tgz", + "integrity": "sha512-ZhEfvUwNliOQROcAk34WJWVYTlTa4694kSVhDSjW6lE1bMataPnIN8A0ycukEzBXmd8ZSoBcQLn6lKGl7XIJ5w==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-endpoints": { + "version": "3.787.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.787.0.tgz", + "integrity": "sha512-fd3zkiOkwnbdbN0Xp9TsP5SWrmv0SpT70YEdbb8wAj2DWQwiCmFszaSs+YCvhoCdmlR3Wl9Spu0pGpSAGKeYvQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "@smithy/util-endpoints": "^3.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-format-url": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-format-url/-/util-format-url-3.775.0.tgz", + "integrity": "sha512-Nw4nBeyCbWixoGh8NcVpa/i8McMA6RXJIjQFyloJLaPr7CPquz7ZbSl0MUWMFVwP/VHaJ7B+lNN3Qz1iFCEP/Q==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-locate-window": { + "version": "3.723.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.723.0.tgz", + "integrity": "sha512-Yf2CS10BqK688DRsrKI/EO6B8ff5J86NXe4C+VCysK7UOgN0l1zOTeTukZ3H8Q9tYYX3oaF1961o8vRkFm7Nmw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-user-agent-browser": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.775.0.tgz", + "integrity": "sha512-txw2wkiJmZKVdDbscK7VBK+u+TJnRtlUjRTLei+elZg2ADhpQxfVAQl436FUeIv6AhB/oRHW6/K/EAGXUSWi0A==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-sdk/util-user-agent-node": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.799.0.tgz", + "integrity": "sha512-iXBk38RbIWPF5Nq9O4AnktORAzXovSVqWYClvS1qbE7ILsnTLJbagU9HlU25O2iV5COVh1qZkwuP5NHQ2yTEyw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + } + }, + "node_modules/@aws-sdk/xml-builder": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.775.0.tgz", + "integrity": "sha512-b9NGO6FKJeLGYnV7Z1yvcP1TNU4dkD5jNsLWOF1/sygZoASaQhNOlaiJ/1OH331YQ1R1oWk38nBb0frsYkDsOQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, "node_modules/@babel/runtime": { "version": "7.26.0", "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.0.tgz", @@ -3314,6 +4222,736 @@ "url": "https://ko-fi.com/killymxi" } }, + "node_modules/@smithy/abort-controller": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.0.2.tgz", + "integrity": "sha512-Sl/78VDtgqKxN2+1qduaVE140XF+Xg+TafkncspwM4jFP/LHr76ZHmIY/y3V1M0mMLNk+Je6IGbzxy23RSToMw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/chunked-blob-reader": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@smithy/chunked-blob-reader/-/chunked-blob-reader-5.0.0.tgz", + "integrity": "sha512-+sKqDBQqb036hh4NPaUiEkYFkTUGYzRsn3EuFhyfQfMy6oGHEUJDurLP9Ufb5dasr/XiAmPNMr6wa9afjQB+Gw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/chunked-blob-reader-native": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/chunked-blob-reader-native/-/chunked-blob-reader-native-4.0.0.tgz", + "integrity": "sha512-R9wM2yPmfEMsUmlMlIgSzOyICs0x9uu7UTHoccMyt7BWw8shcGM8HqB355+BZCPBcySvbTYMs62EgEQkNxz2ig==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-base64": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/config-resolver": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.1.0.tgz", + "integrity": "sha512-8smPlwhga22pwl23fM5ew4T9vfLUCeFXlcqNOCD5M5h8VmNPNUE9j6bQSuRXpDSV11L/E/SwEBQuW8hr6+nS1A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/core": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.3.0.tgz", + "integrity": "sha512-r6gvs5OfRq/w+9unPm7B3po4rmWaGh0CIL/OwHntGGux7+RhOOZLGuurbeMgWV6W55ZuyMTypJLeH0vn/ZRaWQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/middleware-serde": "^4.0.3", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/credential-provider-imds": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.0.2.tgz", + "integrity": "sha512-32lVig6jCaWBHnY+OEQ6e6Vnt5vDHaLiydGrwYMW9tPqO688hPGTYRamYJ1EptxEC2rAwJrHWmPoKRBl4iTa8w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/eventstream-codec": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-4.0.2.tgz", + "integrity": "sha512-p+f2kLSK7ZrXVfskU/f5dzksKTewZk8pJLPvER3aFHPt76C2MxD9vNatSfLzzQSQB4FNO96RK4PSXfhD1TTeMQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/crc32": "5.2.0", + "@smithy/types": "^4.2.0", + "@smithy/util-hex-encoding": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/eventstream-serde-browser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-4.0.2.tgz", + "integrity": "sha512-CepZCDs2xgVUtH7ZZ7oDdZFH8e6Y2zOv8iiX6RhndH69nlojCALSKK+OXwZUgOtUZEUaZ5e1hULVCHYbCn7pug==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/eventstream-serde-universal": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/eventstream-serde-config-resolver": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-4.1.0.tgz", + "integrity": "sha512-1PI+WPZ5TWXrfj3CIoKyUycYynYJgZjuQo8U+sphneOtjsgrttYybdqESFReQrdWJ+LKt6NEdbYzmmfDBmjX2A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/eventstream-serde-node": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-4.0.2.tgz", + "integrity": "sha512-C5bJ/C6x9ENPMx2cFOirspnF9ZsBVnBMtP6BdPl/qYSuUawdGQ34Lq0dMcf42QTjUZgWGbUIZnz6+zLxJlb9aw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/eventstream-serde-universal": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/eventstream-serde-universal": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-4.0.2.tgz", + "integrity": "sha512-St8h9JqzvnbB52FtckiHPN4U/cnXcarMniXRXTKn0r4b4XesZOGiAyUdj1aXbqqn1icSqBlzzUsCl6nPB018ng==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/eventstream-codec": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/fetch-http-handler": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.0.2.tgz", + "integrity": "sha512-+9Dz8sakS9pe7f2cBocpJXdeVjMopUDLgZs1yWeu7h++WqSbjUYv/JAJwKwXw1HV6gq1jyWjxuyn24E2GhoEcQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/hash-blob-browser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/hash-blob-browser/-/hash-blob-browser-4.0.2.tgz", + "integrity": "sha512-3g188Z3DyhtzfBRxpZjU8R9PpOQuYsbNnyStc/ZVS+9nVX1f6XeNOa9IrAh35HwwIZg+XWk8bFVtNINVscBP+g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/chunked-blob-reader": "^5.0.0", + "@smithy/chunked-blob-reader-native": "^4.0.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/hash-node": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.0.2.tgz", + "integrity": "sha512-VnTpYPnRUE7yVhWozFdlxcYknv9UN7CeOqSrMH+V877v4oqtVYuoqhIhtSjmGPvYrYnAkaM61sLMKHvxL138yg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/hash-stream-node": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-4.0.2.tgz", + "integrity": "sha512-POWDuTznzbIwlEXEvvXoPMS10y0WKXK790soe57tFRfvf4zBHyzE529HpZMqmDdwG9MfFflnyzndUQ8j78ZdSg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/invalid-dependency": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.0.2.tgz", + "integrity": "sha512-GatB4+2DTpgWPday+mnUkoumP54u/MDM/5u44KF9hIu8jF0uafZtQLcdfIKkIcUNuF/fBojpLEHZS/56JqPeXQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/is-array-buffer": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-4.0.0.tgz", + "integrity": "sha512-saYhF8ZZNoJDTvJBEWgeBccCg+yvp1CX+ed12yORU3NilJScfc6gfch2oVb4QgxZrGUx3/ZJlb+c/dJbyupxlw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/md5-js": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-4.0.2.tgz", + "integrity": "sha512-Hc0R8EiuVunUewCse2syVgA2AfSRco3LyAv07B/zCOMa+jpXI9ll+Q21Nc6FAlYPcpNcAXqBzMhNs1CD/pP2bA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-content-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.0.2.tgz", + "integrity": "sha512-hAfEXm1zU+ELvucxqQ7I8SszwQ4znWMbNv6PLMndN83JJN41EPuS93AIyh2N+gJ6x8QFhzSO6b7q2e6oClDI8A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-endpoint": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.1.1.tgz", + "integrity": "sha512-z5RmcHxjvScL+LwEDU2mTNCOhgUs4lu5PGdF1K36IPRmUHhNFxNxgenSB7smyDiYD4vdKQ7CAZtG5cUErqib9w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.0", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-retry": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.1.2.tgz", + "integrity": "sha512-qN/Mmxm8JWtFAjozJ8VSTM83KOX4cIks8UjDqqNkKIegzPrE5ZKPNCQ/DqUSIF90pue5a/NycNXnBod2NwvZZQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/service-error-classification": "^4.0.3", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-retry/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@smithy/middleware-serde": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.0.3.tgz", + "integrity": "sha512-rfgDVrgLEVMmMn0BI8O+8OVr6vXzjV7HZj57l0QxslhzbvVfikZbVfBVthjLHqib4BW44QhcIgJpvebHlRaC9A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-stack": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.0.2.tgz", + "integrity": "sha512-eSPVcuJJGVYrFYu2hEq8g8WWdJav3sdrI4o2c6z/rjnYDd3xH9j9E7deZQCzFn4QvGPouLngH3dQ+QVTxv5bOQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/node-config-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.0.2.tgz", + "integrity": "sha512-WgCkILRZfJwJ4Da92a6t3ozN/zcvYyJGUTmfGbgS/FkCcoCjl7G4FJaCDN1ySdvLvemnQeo25FdkyMSTSwulsw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/node-http-handler": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.0.4.tgz", + "integrity": "sha512-/mdqabuAT3o/ihBGjL94PUbTSPSRJ0eeVTdgADzow0wRJ0rN4A27EOrtlK56MYiO1fDvlO3jVTCxQtQmK9dZ1g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/property-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.0.2.tgz", + "integrity": "sha512-wNRoQC1uISOuNc2s4hkOYwYllmiyrvVXWMtq+TysNRVQaHm4yoafYQyjN/goYZS+QbYlPIbb/QRjaUZMuzwQ7A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/protocol-http": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.1.0.tgz", + "integrity": "sha512-KxAOL1nUNw2JTYrtviRRjEnykIDhxc84qMBzxvu1MUfQfHTuBlCG7PA6EdVwqpJjH7glw7FqQoFxUJSyBQgu7g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/querystring-builder": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.0.2.tgz", + "integrity": "sha512-NTOs0FwHw1vimmQM4ebh+wFQvOwkEf/kQL6bSM1Lock+Bv4I89B3hGYoUEPkmvYPkDKyp5UdXJYu+PoTQ3T31Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-uri-escape": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/querystring-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.0.2.tgz", + "integrity": "sha512-v6w8wnmZcVXjfVLjxw8qF7OwESD9wnpjp0Dqry/Pod0/5vcEA3qxCr+BhbOHlxS8O+29eLpT3aagxXGwIoEk7Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/service-error-classification": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.0.3.tgz", + "integrity": "sha512-FTbcajmltovWMjj3tksDQdD23b2w6gH+A0DYA1Yz3iSpjDj8fmkwy62UnXcWMy4d5YoMoSyLFHMfkEVEzbiN8Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/shared-ini-file-loader": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.0.2.tgz", + "integrity": "sha512-J9/gTWBGVuFZ01oVA6vdb4DAjf1XbDhK6sLsu3OS9qmLrS6KB5ygpeHiM3miIbj1qgSJ96GYszXFWv6ErJ8QEw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/signature-v4": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.1.0.tgz", + "integrity": "sha512-4t5WX60sL3zGJF/CtZsUQTs3UrZEDO2P7pEaElrekbLqkWPYkgqNW1oeiNYC6xXifBnT9dVBOnNQRvOE9riU9w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-uri-escape": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/smithy-client": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.2.1.tgz", + "integrity": "sha512-fbniZef60QdsBc4ZY0iyI8xbFHIiC/QRtPi66iE4ufjiE/aaz7AfUXzcWMkpO8r+QhLeNRIfmPchIG+3/QDZ6g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.0", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/types": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.2.0.tgz", + "integrity": "sha512-7eMk09zQKCO+E/ivsjQv+fDlOupcFUCSC/L2YUPgwhvowVGWbPQHjEFcmjt7QQ4ra5lyowS92SV53Zc6XD4+fg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/url-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.0.2.tgz", + "integrity": "sha512-Bm8n3j2ScqnT+kJaClSVCMeiSenK6jVAzZCNewsYWuZtnBehEz4r2qP0riZySZVfzB+03XZHJeqfmJDkeeSLiQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/querystring-parser": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-base64": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-4.0.0.tgz", + "integrity": "sha512-CvHfCmO2mchox9kjrtzoHkWHxjHZzaFojLc8quxXY7WAAMAg43nuxwv95tATVgQFNDwd4M9S1qFzj40Ul41Kmg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-body-length-browser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-4.0.0.tgz", + "integrity": "sha512-sNi3DL0/k64/LO3A256M+m3CDdG6V7WKWHdAiBBMUN8S3hK3aMPhwnPik2A/a2ONN+9doY9UxaLfgqsIRg69QA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-body-length-node": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-node/-/util-body-length-node-4.0.0.tgz", + "integrity": "sha512-q0iDP3VsZzqJyje8xJWEJCNIu3lktUGVoSy1KB0UWym2CL1siV3artm+u1DFYTLejpsrdGyCSWBdGNjJzfDPjg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-buffer-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-4.0.0.tgz", + "integrity": "sha512-9TOQ7781sZvddgO8nxueKi3+yGvkY35kotA0Y6BWRajAv8jjmigQ1sBwz0UX47pQMYXJPahSKEKYFgt+rXdcug==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-config-provider": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-4.0.0.tgz", + "integrity": "sha512-L1RBVzLyfE8OXH+1hsJ8p+acNUSirQnWQ6/EgpchV88G6zGBTDPdXiiExei6Z1wR2RxYvxY/XLw6AMNCCt8H3w==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-browser": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.0.9.tgz", + "integrity": "sha512-B8j0XsElvyhv6+5hlFf6vFV/uCSyLKcInpeXOGnOImX2mGXshE01RvPoGipTlRpIk53e6UfYj7WdDdgbVfXDZw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-node": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.0.9.tgz", + "integrity": "sha512-wTDU8P/zdIf9DOpV5qm64HVgGRXvqjqB/fJZTEQbrz3s79JHM/E7XkMm/876Oq+ZLHJQgnXM9QHDo29dlM62eA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/config-resolver": "^4.1.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-endpoints": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.0.2.tgz", + "integrity": "sha512-6QSutU5ZyrpNbnd51zRTL7goojlcnuOB55+F9VBD+j8JpRY50IGamsjlycrmpn8PQkmJucFW8A0LSfXj7jjtLQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-hex-encoding": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-4.0.0.tgz", + "integrity": "sha512-Yk5mLhHtfIgW2W2WQZWSg5kuMZCVbvhFmC7rV4IO2QqnZdbEFPmQnCcGMAX2z/8Qj3B9hYYNjZOhWym+RwhePw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-middleware": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.0.2.tgz", + "integrity": "sha512-6GDamTGLuBQVAEuQ4yDQ+ti/YINf/MEmIegrEeg7DdB/sld8BX1lqt9RRuIcABOhAGTA50bRbPzErez7SlDtDQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-retry": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.0.3.tgz", + "integrity": "sha512-DPuYjZQDXmKr/sNvy9Spu8R/ESa2e22wXZzSAY6NkjOLj6spbIje/Aq8rT97iUMdDj0qHMRIe+bTxvlU74d9Ng==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/service-error-classification": "^4.0.3", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-stream": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.2.0.tgz", + "integrity": "sha512-Vj1TtwWnuWqdgQI6YTUF5hQ/0jmFiOYsc51CSMgj7QfyO+RF4EnT2HNjoviNlOOmgzgvf3f5yno+EiC4vrnaWQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-uri-escape": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-4.0.0.tgz", + "integrity": "sha512-77yfbCbQMtgtTylO9itEAdpPXSog3ZxMe09AEhm0dU0NLTalV70ghDZFR+Nfi1C60jnJoh/Re4090/DuZh2Omg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-utf8": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-4.0.0.tgz", + "integrity": "sha512-b+zebfKCfRdgNJDknHCob3O7FpeYQN6ZG6YLExMcasDHsCXlsXCEuiPZeLnJLpwa5dvPetGlnGCiMHuLwGvFow==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-waiter": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.0.3.tgz", + "integrity": "sha512-JtaY3FxmD+te+KSI2FJuEcfNC9T/DGGVf551babM7fAaXhjJUt7oSYurH1Devxd2+BOSUACCgt3buinx4UnmEA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, "node_modules/@swc/counter": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz", @@ -3735,6 +5373,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/bowser": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz", + "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==", + "license": "MIT" + }, "node_modules/brace-expansion": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", diff --git a/package.json b/package.json index 69423f27..18ddac72 100644 --- a/package.json +++ b/package.json @@ -9,6 +9,8 @@ "lint": "next lint" }, "dependencies": { + "@aws-sdk/client-s3": "^3.802.0", + "@aws-sdk/s3-request-presigner": "^3.802.0", "@fullcalendar/react": "^6.1.15", "@hookform/resolvers": "^3.9.1", "@keycloak/keycloak-admin-client": "^26.2.2", diff --git a/yarn.lock b/yarn.lock index 5c9b248f..4cb50a85 100644 --- a/yarn.lock +++ b/yarn.lock @@ -18,6 +18,586 @@ "@csstools/css-tokenizer" "^3.0.3" lru-cache "^10.4.3" +"@aws-crypto/crc32@5.2.0": + version "5.2.0" + resolved "https://registry.npmjs.org/@aws-crypto/crc32/-/crc32-5.2.0.tgz" + integrity sha512-nLbCWqQNgUiwwtFsen1AdzAtvuLRsQS8rYgMuxCrdKf9kOssamGLuPwyTY9wyYblNr9+1XM8v6zoDTPPSIeANg== + dependencies: + "@aws-crypto/util" "^5.2.0" + "@aws-sdk/types" "^3.222.0" + tslib "^2.6.2" + +"@aws-crypto/crc32c@5.2.0": + version "5.2.0" + resolved "https://registry.npmjs.org/@aws-crypto/crc32c/-/crc32c-5.2.0.tgz" + integrity sha512-+iWb8qaHLYKrNvGRbiYRHSdKRWhto5XlZUEBwDjYNf+ly5SVYG6zEoYIdxvf5R3zyeP16w4PLBn3rH1xc74Rag== + dependencies: + "@aws-crypto/util" "^5.2.0" + "@aws-sdk/types" "^3.222.0" + tslib "^2.6.2" + +"@aws-crypto/sha1-browser@5.2.0": + version "5.2.0" + resolved "https://registry.npmjs.org/@aws-crypto/sha1-browser/-/sha1-browser-5.2.0.tgz" + integrity sha512-OH6lveCFfcDjX4dbAvCFSYUjJZjDr/3XJ3xHtjn3Oj5b9RjojQo8npoLeA/bNwkOkrSQ0wgrHzXk4tDRxGKJeg== + dependencies: + "@aws-crypto/supports-web-crypto" "^5.2.0" + "@aws-crypto/util" "^5.2.0" + "@aws-sdk/types" "^3.222.0" + "@aws-sdk/util-locate-window" "^3.0.0" + "@smithy/util-utf8" "^2.0.0" + tslib "^2.6.2" + +"@aws-crypto/sha256-browser@5.2.0": + version "5.2.0" + resolved "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz" + integrity sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw== + dependencies: + "@aws-crypto/sha256-js" "^5.2.0" + "@aws-crypto/supports-web-crypto" "^5.2.0" + "@aws-crypto/util" "^5.2.0" + "@aws-sdk/types" "^3.222.0" + "@aws-sdk/util-locate-window" "^3.0.0" + "@smithy/util-utf8" "^2.0.0" + tslib "^2.6.2" + +"@aws-crypto/sha256-js@^5.2.0", "@aws-crypto/sha256-js@5.2.0": + version "5.2.0" + resolved "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz" + integrity sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA== + dependencies: + "@aws-crypto/util" "^5.2.0" + "@aws-sdk/types" "^3.222.0" + tslib "^2.6.2" + +"@aws-crypto/supports-web-crypto@^5.2.0": + version "5.2.0" + resolved "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz" + integrity sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg== + dependencies: + tslib "^2.6.2" + +"@aws-crypto/util@^5.2.0", "@aws-crypto/util@5.2.0": + version "5.2.0" + resolved "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz" + integrity sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ== + dependencies: + "@aws-sdk/types" "^3.222.0" + "@smithy/util-utf8" "^2.0.0" + tslib "^2.6.2" + +"@aws-sdk/client-s3@^3.802.0": + version "3.802.0" + resolved "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.802.0.tgz" + integrity sha512-YIwLLiqRZArEmRI94X8MOpWuXlmxI3NnxYv+3kk6HIc2YWPaOAf0YN7vWlnQFWo6Yi1gBRtP0HM8WzK4Bn5ORQ== + dependencies: + "@aws-crypto/sha1-browser" "5.2.0" + "@aws-crypto/sha256-browser" "5.2.0" + "@aws-crypto/sha256-js" "5.2.0" + "@aws-sdk/core" "3.799.0" + "@aws-sdk/credential-provider-node" "3.799.0" + "@aws-sdk/middleware-bucket-endpoint" "3.775.0" + "@aws-sdk/middleware-expect-continue" "3.775.0" + "@aws-sdk/middleware-flexible-checksums" "3.799.0" + "@aws-sdk/middleware-host-header" "3.775.0" + "@aws-sdk/middleware-location-constraint" "3.775.0" + "@aws-sdk/middleware-logger" "3.775.0" + "@aws-sdk/middleware-recursion-detection" "3.775.0" + "@aws-sdk/middleware-sdk-s3" "3.799.0" + "@aws-sdk/middleware-ssec" "3.775.0" + "@aws-sdk/middleware-user-agent" "3.799.0" + "@aws-sdk/region-config-resolver" "3.775.0" + "@aws-sdk/signature-v4-multi-region" "3.800.0" + "@aws-sdk/types" "3.775.0" + "@aws-sdk/util-endpoints" "3.787.0" + "@aws-sdk/util-user-agent-browser" "3.775.0" + "@aws-sdk/util-user-agent-node" "3.799.0" + "@aws-sdk/xml-builder" "3.775.0" + "@smithy/config-resolver" "^4.1.0" + "@smithy/core" "^3.3.0" + "@smithy/eventstream-serde-browser" "^4.0.2" + "@smithy/eventstream-serde-config-resolver" "^4.1.0" + "@smithy/eventstream-serde-node" "^4.0.2" + "@smithy/fetch-http-handler" "^5.0.2" + "@smithy/hash-blob-browser" "^4.0.2" + "@smithy/hash-node" "^4.0.2" + "@smithy/hash-stream-node" "^4.0.2" + "@smithy/invalid-dependency" "^4.0.2" + "@smithy/md5-js" "^4.0.2" + "@smithy/middleware-content-length" "^4.0.2" + "@smithy/middleware-endpoint" "^4.1.1" + "@smithy/middleware-retry" "^4.1.1" + "@smithy/middleware-serde" "^4.0.3" + "@smithy/middleware-stack" "^4.0.2" + "@smithy/node-config-provider" "^4.0.2" + "@smithy/node-http-handler" "^4.0.4" + "@smithy/protocol-http" "^5.1.0" + "@smithy/smithy-client" "^4.2.1" + "@smithy/types" "^4.2.0" + "@smithy/url-parser" "^4.0.2" + "@smithy/util-base64" "^4.0.0" + "@smithy/util-body-length-browser" "^4.0.0" + "@smithy/util-body-length-node" "^4.0.0" + "@smithy/util-defaults-mode-browser" "^4.0.9" + "@smithy/util-defaults-mode-node" "^4.0.9" + "@smithy/util-endpoints" "^3.0.2" + "@smithy/util-middleware" "^4.0.2" + "@smithy/util-retry" "^4.0.2" + "@smithy/util-stream" "^4.2.0" + "@smithy/util-utf8" "^4.0.0" + "@smithy/util-waiter" "^4.0.3" + tslib "^2.6.2" + +"@aws-sdk/client-sso@3.799.0": + version "3.799.0" + resolved "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.799.0.tgz" + integrity sha512-/i/LG7AiWPmPxKCA2jnR2zaf7B3HYSTbxaZI21ElIz9wASlNAsKr8CnLY7qb50kOyXiNfQ834S5Q3Gl8dX9o3Q== + dependencies: + "@aws-crypto/sha256-browser" "5.2.0" + "@aws-crypto/sha256-js" "5.2.0" + "@aws-sdk/core" "3.799.0" + "@aws-sdk/middleware-host-header" "3.775.0" + "@aws-sdk/middleware-logger" "3.775.0" + "@aws-sdk/middleware-recursion-detection" "3.775.0" + "@aws-sdk/middleware-user-agent" "3.799.0" + "@aws-sdk/region-config-resolver" "3.775.0" + "@aws-sdk/types" "3.775.0" + "@aws-sdk/util-endpoints" "3.787.0" + "@aws-sdk/util-user-agent-browser" "3.775.0" + "@aws-sdk/util-user-agent-node" "3.799.0" + "@smithy/config-resolver" "^4.1.0" + "@smithy/core" "^3.3.0" + "@smithy/fetch-http-handler" "^5.0.2" + "@smithy/hash-node" "^4.0.2" + "@smithy/invalid-dependency" "^4.0.2" + "@smithy/middleware-content-length" "^4.0.2" + "@smithy/middleware-endpoint" "^4.1.1" + "@smithy/middleware-retry" "^4.1.1" + "@smithy/middleware-serde" "^4.0.3" + "@smithy/middleware-stack" "^4.0.2" + "@smithy/node-config-provider" "^4.0.2" + "@smithy/node-http-handler" "^4.0.4" + "@smithy/protocol-http" "^5.1.0" + "@smithy/smithy-client" "^4.2.1" + "@smithy/types" "^4.2.0" + "@smithy/url-parser" "^4.0.2" + "@smithy/util-base64" "^4.0.0" + "@smithy/util-body-length-browser" "^4.0.0" + "@smithy/util-body-length-node" "^4.0.0" + "@smithy/util-defaults-mode-browser" "^4.0.9" + "@smithy/util-defaults-mode-node" "^4.0.9" + "@smithy/util-endpoints" "^3.0.2" + "@smithy/util-middleware" "^4.0.2" + "@smithy/util-retry" "^4.0.2" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@aws-sdk/core@3.799.0": + version "3.799.0" + resolved "https://registry.npmjs.org/@aws-sdk/core/-/core-3.799.0.tgz" + integrity sha512-hkKF3Zpc6+H8GI1rlttYVRh9uEE77cqAzLmLpY3iu7sql8cZgPERRBfaFct8p1SaDyrksLNiboD1vKW58mbsYg== + dependencies: + "@aws-sdk/types" "3.775.0" + "@smithy/core" "^3.3.0" + "@smithy/node-config-provider" "^4.0.2" + "@smithy/property-provider" "^4.0.2" + "@smithy/protocol-http" "^5.1.0" + "@smithy/signature-v4" "^5.1.0" + "@smithy/smithy-client" "^4.2.1" + "@smithy/types" "^4.2.0" + "@smithy/util-middleware" "^4.0.2" + fast-xml-parser "4.4.1" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-env@3.799.0": + version "3.799.0" + resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.799.0.tgz" + integrity sha512-vT/SSWtbUIOW/U21qgEySmmO44SFWIA7WeQPX1OrI8WJ5n7OEI23JWLHjLvHTkYmuZK6z1rPcv7HzRgmuGRibA== + dependencies: + "@aws-sdk/core" "3.799.0" + "@aws-sdk/types" "3.775.0" + "@smithy/property-provider" "^4.0.2" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-http@3.799.0": + version "3.799.0" + resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.799.0.tgz" + integrity sha512-2CjBpOWmhaPAExOgHnIB5nOkS5ef+mfRlJ1JC4nsnjAx0nrK4tk0XRE0LYz11P3+ue+a86cU8WTmBo+qjnGxPQ== + dependencies: + "@aws-sdk/core" "3.799.0" + "@aws-sdk/types" "3.775.0" + "@smithy/fetch-http-handler" "^5.0.2" + "@smithy/node-http-handler" "^4.0.4" + "@smithy/property-provider" "^4.0.2" + "@smithy/protocol-http" "^5.1.0" + "@smithy/smithy-client" "^4.2.1" + "@smithy/types" "^4.2.0" + "@smithy/util-stream" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-ini@3.799.0": + version "3.799.0" + resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.799.0.tgz" + integrity sha512-M9ubILFxerqw4QJwk83MnjtZyoA2eNCiea5V+PzZeHlwk2PON/EnawKqy65x9/hMHGoSvvNuby7iMAmPptu7yw== + dependencies: + "@aws-sdk/core" "3.799.0" + "@aws-sdk/credential-provider-env" "3.799.0" + "@aws-sdk/credential-provider-http" "3.799.0" + "@aws-sdk/credential-provider-process" "3.799.0" + "@aws-sdk/credential-provider-sso" "3.799.0" + "@aws-sdk/credential-provider-web-identity" "3.799.0" + "@aws-sdk/nested-clients" "3.799.0" + "@aws-sdk/types" "3.775.0" + "@smithy/credential-provider-imds" "^4.0.2" + "@smithy/property-provider" "^4.0.2" + "@smithy/shared-ini-file-loader" "^4.0.2" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-node@3.799.0": + version "3.799.0" + resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.799.0.tgz" + integrity sha512-nd9fSJc0wUlgKUkIr2ldJhcIIrzJFS29AGZoyY22J3xih63nNDv61eTGVMsDZzHlV21XzMlPEljTR7axiimckg== + dependencies: + "@aws-sdk/credential-provider-env" "3.799.0" + "@aws-sdk/credential-provider-http" "3.799.0" + "@aws-sdk/credential-provider-ini" "3.799.0" + "@aws-sdk/credential-provider-process" "3.799.0" + "@aws-sdk/credential-provider-sso" "3.799.0" + "@aws-sdk/credential-provider-web-identity" "3.799.0" + "@aws-sdk/types" "3.775.0" + "@smithy/credential-provider-imds" "^4.0.2" + "@smithy/property-provider" "^4.0.2" + "@smithy/shared-ini-file-loader" "^4.0.2" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-process@3.799.0": + version "3.799.0" + resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.799.0.tgz" + integrity sha512-g8jmNs2k98WNHMYcea1YKA+7ao2Ma4w0P42Dz4YpcI155pQHxHx25RwbOG+rsAKuo3bKwkW53HVE/ZTKhcWFgw== + dependencies: + "@aws-sdk/core" "3.799.0" + "@aws-sdk/types" "3.775.0" + "@smithy/property-provider" "^4.0.2" + "@smithy/shared-ini-file-loader" "^4.0.2" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-sso@3.799.0": + version "3.799.0" + resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.799.0.tgz" + integrity sha512-lQv27QkNU9FJFZqEf5DIEN3uXEN409Iaym9WJzhOouGtxvTIAWiD23OYh1u8PvBdrordJGS2YddfQvhcmq9akw== + dependencies: + "@aws-sdk/client-sso" "3.799.0" + "@aws-sdk/core" "3.799.0" + "@aws-sdk/token-providers" "3.799.0" + "@aws-sdk/types" "3.775.0" + "@smithy/property-provider" "^4.0.2" + "@smithy/shared-ini-file-loader" "^4.0.2" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-web-identity@3.799.0": + version "3.799.0" + resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.799.0.tgz" + integrity sha512-8k1i9ut+BEg0QZ+I6UQMxGNR1T8paLmAOAZXU+nLQR0lcxS6lr8v+dqofgzQPuHLBkWNCr1Av1IKeL3bJjgU7g== + dependencies: + "@aws-sdk/core" "3.799.0" + "@aws-sdk/nested-clients" "3.799.0" + "@aws-sdk/types" "3.775.0" + "@smithy/property-provider" "^4.0.2" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-bucket-endpoint@3.775.0": + version "3.775.0" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.775.0.tgz" + integrity sha512-qogMIpVChDYr4xiUNC19/RDSw/sKoHkAhouS6Skxiy6s27HBhow1L3Z1qVYXuBmOZGSWPU0xiyZCvOyWrv9s+Q== + dependencies: + "@aws-sdk/types" "3.775.0" + "@aws-sdk/util-arn-parser" "3.723.0" + "@smithy/node-config-provider" "^4.0.2" + "@smithy/protocol-http" "^5.1.0" + "@smithy/types" "^4.2.0" + "@smithy/util-config-provider" "^4.0.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-expect-continue@3.775.0": + version "3.775.0" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.775.0.tgz" + integrity sha512-Apd3owkIeUW5dnk3au9np2IdW2N0zc9NjTjHiH+Mx3zqwSrc+m+ANgJVgk9mnQjMzU/vb7VuxJ0eqdEbp5gYsg== + dependencies: + "@aws-sdk/types" "3.775.0" + "@smithy/protocol-http" "^5.1.0" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-flexible-checksums@3.799.0": + version "3.799.0" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.799.0.tgz" + integrity sha512-vBIAdDl2neaFiUMxyr7dAtX7m9Iw5c0bz7OirD0JGW0nYn0mBcqKpFZEU75ewA5p2+Cm7RQDdt6099ne3gj0WA== + dependencies: + "@aws-crypto/crc32" "5.2.0" + "@aws-crypto/crc32c" "5.2.0" + "@aws-crypto/util" "5.2.0" + "@aws-sdk/core" "3.799.0" + "@aws-sdk/types" "3.775.0" + "@smithy/is-array-buffer" "^4.0.0" + "@smithy/node-config-provider" "^4.0.2" + "@smithy/protocol-http" "^5.1.0" + "@smithy/types" "^4.2.0" + "@smithy/util-middleware" "^4.0.2" + "@smithy/util-stream" "^4.2.0" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-host-header@3.775.0": + version "3.775.0" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.775.0.tgz" + integrity sha512-tkSegM0Z6WMXpLB8oPys/d+umYIocvO298mGvcMCncpRl77L9XkvSLJIFzaHes+o7djAgIduYw8wKIMStFss2w== + dependencies: + "@aws-sdk/types" "3.775.0" + "@smithy/protocol-http" "^5.1.0" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-location-constraint@3.775.0": + version "3.775.0" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.775.0.tgz" + integrity sha512-8TMXEHZXZTFTckQLyBT5aEI8fX11HZcwZseRifvBKKpj0RZDk4F0EEYGxeNSPpUQ7n+PRWyfAEnnZNRdAj/1NQ== + dependencies: + "@aws-sdk/types" "3.775.0" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-logger@3.775.0": + version "3.775.0" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.775.0.tgz" + integrity sha512-FaxO1xom4MAoUJsldmR92nT1G6uZxTdNYOFYtdHfd6N2wcNaTuxgjIvqzg5y7QIH9kn58XX/dzf1iTjgqUStZw== + dependencies: + "@aws-sdk/types" "3.775.0" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-recursion-detection@3.775.0": + version "3.775.0" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.775.0.tgz" + integrity sha512-GLCzC8D0A0YDG5u3F5U03Vb9j5tcOEFhr8oc6PDk0k0vm5VwtZOE6LvK7hcCSoAB4HXyOUM0sQuXrbaAh9OwXA== + dependencies: + "@aws-sdk/types" "3.775.0" + "@smithy/protocol-http" "^5.1.0" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-sdk-s3@3.799.0": + version "3.799.0" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.799.0.tgz" + integrity sha512-Zwdge5NArgcJwPuGZwgfXY6XXkWEBmMS9dqu5g3DcfHmZUuSjQUqmOsDdSZlE3RFHrDAEbuGQlrFUE8zuwdKQA== + dependencies: + "@aws-sdk/core" "3.799.0" + "@aws-sdk/types" "3.775.0" + "@aws-sdk/util-arn-parser" "3.723.0" + "@smithy/core" "^3.3.0" + "@smithy/node-config-provider" "^4.0.2" + "@smithy/protocol-http" "^5.1.0" + "@smithy/signature-v4" "^5.1.0" + "@smithy/smithy-client" "^4.2.1" + "@smithy/types" "^4.2.0" + "@smithy/util-config-provider" "^4.0.0" + "@smithy/util-middleware" "^4.0.2" + "@smithy/util-stream" "^4.2.0" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-ssec@3.775.0": + version "3.775.0" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.775.0.tgz" + integrity sha512-Iw1RHD8vfAWWPzBBIKaojO4GAvQkHOYIpKdAfis/EUSUmSa79QsnXnRqsdcE0mCB0Ylj23yi+ah4/0wh9FsekA== + dependencies: + "@aws-sdk/types" "3.775.0" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-user-agent@3.799.0": + version "3.799.0" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.799.0.tgz" + integrity sha512-TropQZanbOTxa+p+Nl4fWkzlRhgFwDfW+Wb6TR3jZN7IXHNlPpgGFpdrgvBExhW/RBhqr+94OsR8Ou58lp3hhA== + dependencies: + "@aws-sdk/core" "3.799.0" + "@aws-sdk/types" "3.775.0" + "@aws-sdk/util-endpoints" "3.787.0" + "@smithy/core" "^3.3.0" + "@smithy/protocol-http" "^5.1.0" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/nested-clients@3.799.0": + version "3.799.0" + resolved "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.799.0.tgz" + integrity sha512-zILlWh7asrcQG9JYMYgnvEQBfwmWKfED0yWCf3UNAmQcfS9wkCAWCgicNy/y5KvNvEYnHidsU117STtyuUNG5g== + dependencies: + "@aws-crypto/sha256-browser" "5.2.0" + "@aws-crypto/sha256-js" "5.2.0" + "@aws-sdk/core" "3.799.0" + "@aws-sdk/middleware-host-header" "3.775.0" + "@aws-sdk/middleware-logger" "3.775.0" + "@aws-sdk/middleware-recursion-detection" "3.775.0" + "@aws-sdk/middleware-user-agent" "3.799.0" + "@aws-sdk/region-config-resolver" "3.775.0" + "@aws-sdk/types" "3.775.0" + "@aws-sdk/util-endpoints" "3.787.0" + "@aws-sdk/util-user-agent-browser" "3.775.0" + "@aws-sdk/util-user-agent-node" "3.799.0" + "@smithy/config-resolver" "^4.1.0" + "@smithy/core" "^3.3.0" + "@smithy/fetch-http-handler" "^5.0.2" + "@smithy/hash-node" "^4.0.2" + "@smithy/invalid-dependency" "^4.0.2" + "@smithy/middleware-content-length" "^4.0.2" + "@smithy/middleware-endpoint" "^4.1.1" + "@smithy/middleware-retry" "^4.1.1" + "@smithy/middleware-serde" "^4.0.3" + "@smithy/middleware-stack" "^4.0.2" + "@smithy/node-config-provider" "^4.0.2" + "@smithy/node-http-handler" "^4.0.4" + "@smithy/protocol-http" "^5.1.0" + "@smithy/smithy-client" "^4.2.1" + "@smithy/types" "^4.2.0" + "@smithy/url-parser" "^4.0.2" + "@smithy/util-base64" "^4.0.0" + "@smithy/util-body-length-browser" "^4.0.0" + "@smithy/util-body-length-node" "^4.0.0" + "@smithy/util-defaults-mode-browser" "^4.0.9" + "@smithy/util-defaults-mode-node" "^4.0.9" + "@smithy/util-endpoints" "^3.0.2" + "@smithy/util-middleware" "^4.0.2" + "@smithy/util-retry" "^4.0.2" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@aws-sdk/region-config-resolver@3.775.0": + version "3.775.0" + resolved "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.775.0.tgz" + integrity sha512-40iH3LJjrQS3LKUJAl7Wj0bln7RFPEvUYKFxtP8a+oKFDO0F65F52xZxIJbPn6sHkxWDAnZlGgdjZXM3p2g5wQ== + dependencies: + "@aws-sdk/types" "3.775.0" + "@smithy/node-config-provider" "^4.0.2" + "@smithy/types" "^4.2.0" + "@smithy/util-config-provider" "^4.0.0" + "@smithy/util-middleware" "^4.0.2" + tslib "^2.6.2" + +"@aws-sdk/s3-request-presigner@^3.802.0": + version "3.802.0" + resolved "https://registry.npmjs.org/@aws-sdk/s3-request-presigner/-/s3-request-presigner-3.802.0.tgz" + integrity sha512-gXavOjkIIbd89ooAKziPWPnvwA3ZuJvpLMWr2xB/RyDZIgnjSwvRGFpRJ5EZPZjkhBFk0QQcEVp2FIrPI9wHlA== + dependencies: + "@aws-sdk/signature-v4-multi-region" "3.800.0" + "@aws-sdk/types" "3.775.0" + "@aws-sdk/util-format-url" "3.775.0" + "@smithy/middleware-endpoint" "^4.1.1" + "@smithy/protocol-http" "^5.1.0" + "@smithy/smithy-client" "^4.2.1" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/signature-v4-multi-region@3.800.0": + version "3.800.0" + resolved "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.800.0.tgz" + integrity sha512-c71wZuiSUHNFCvcuqOv3jbqP+NquB2YKN4qX90OwYXEqUKn8F8fKJPpjjHjz1eK6qWKtECR4V/NTno2P70Yz/Q== + dependencies: + "@aws-sdk/middleware-sdk-s3" "3.799.0" + "@aws-sdk/types" "3.775.0" + "@smithy/protocol-http" "^5.1.0" + "@smithy/signature-v4" "^5.1.0" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/token-providers@3.799.0": + version "3.799.0" + resolved "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.799.0.tgz" + integrity sha512-/8iDjnsJs/D8AhGbDAmdF5oSHzE4jsDsM2RIIxmBAKTZXkaaclQBNX9CmAqLKQmO3IUMZsDH2KENHLVAk/N/mw== + dependencies: + "@aws-sdk/nested-clients" "3.799.0" + "@aws-sdk/types" "3.775.0" + "@smithy/property-provider" "^4.0.2" + "@smithy/shared-ini-file-loader" "^4.0.2" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/types@^3.222.0", "@aws-sdk/types@3.775.0": + version "3.775.0" + resolved "https://registry.npmjs.org/@aws-sdk/types/-/types-3.775.0.tgz" + integrity sha512-ZoGKwa4C9fC9Av6bdfqcW6Ix5ot05F/S4VxWR2nHuMv7hzfmAjTOcUiWT7UR4hM/U0whf84VhDtXN/DWAk52KA== + dependencies: + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/util-arn-parser@3.723.0": + version "3.723.0" + resolved "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.723.0.tgz" + integrity sha512-ZhEfvUwNliOQROcAk34WJWVYTlTa4694kSVhDSjW6lE1bMataPnIN8A0ycukEzBXmd8ZSoBcQLn6lKGl7XIJ5w== + dependencies: + tslib "^2.6.2" + +"@aws-sdk/util-endpoints@3.787.0": + version "3.787.0" + resolved "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.787.0.tgz" + integrity sha512-fd3zkiOkwnbdbN0Xp9TsP5SWrmv0SpT70YEdbb8wAj2DWQwiCmFszaSs+YCvhoCdmlR3Wl9Spu0pGpSAGKeYvQ== + dependencies: + "@aws-sdk/types" "3.775.0" + "@smithy/types" "^4.2.0" + "@smithy/util-endpoints" "^3.0.2" + tslib "^2.6.2" + +"@aws-sdk/util-format-url@3.775.0": + version "3.775.0" + resolved "https://registry.npmjs.org/@aws-sdk/util-format-url/-/util-format-url-3.775.0.tgz" + integrity sha512-Nw4nBeyCbWixoGh8NcVpa/i8McMA6RXJIjQFyloJLaPr7CPquz7ZbSl0MUWMFVwP/VHaJ7B+lNN3Qz1iFCEP/Q== + dependencies: + "@aws-sdk/types" "3.775.0" + "@smithy/querystring-builder" "^4.0.2" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/util-locate-window@^3.0.0": + version "3.723.0" + resolved "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.723.0.tgz" + integrity sha512-Yf2CS10BqK688DRsrKI/EO6B8ff5J86NXe4C+VCysK7UOgN0l1zOTeTukZ3H8Q9tYYX3oaF1961o8vRkFm7Nmw== + dependencies: + tslib "^2.6.2" + +"@aws-sdk/util-user-agent-browser@3.775.0": + version "3.775.0" + resolved "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.775.0.tgz" + integrity sha512-txw2wkiJmZKVdDbscK7VBK+u+TJnRtlUjRTLei+elZg2ADhpQxfVAQl436FUeIv6AhB/oRHW6/K/EAGXUSWi0A== + dependencies: + "@aws-sdk/types" "3.775.0" + "@smithy/types" "^4.2.0" + bowser "^2.11.0" + tslib "^2.6.2" + +"@aws-sdk/util-user-agent-node@3.799.0": + version "3.799.0" + resolved "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.799.0.tgz" + integrity sha512-iXBk38RbIWPF5Nq9O4AnktORAzXovSVqWYClvS1qbE7ILsnTLJbagU9HlU25O2iV5COVh1qZkwuP5NHQ2yTEyw== + dependencies: + "@aws-sdk/middleware-user-agent" "3.799.0" + "@aws-sdk/types" "3.775.0" + "@smithy/node-config-provider" "^4.0.2" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@aws-sdk/xml-builder@3.775.0": + version "3.775.0" + resolved "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.775.0.tgz" + integrity sha512-b9NGO6FKJeLGYnV7Z1yvcP1TNU4dkD5jNsLWOF1/sygZoASaQhNOlaiJ/1OH331YQ1R1oWk38nBb0frsYkDsOQ== + dependencies: + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + "@babel/runtime@^7.20.13", "@babel/runtime@^7.5.5", "@babel/runtime@^7.8.7": version "7.26.0" resolved "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.0.tgz" @@ -1070,6 +1650,496 @@ domhandler "^5.0.3" selderee "^0.11.0" +"@smithy/abort-controller@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.0.2.tgz" + integrity sha512-Sl/78VDtgqKxN2+1qduaVE140XF+Xg+TafkncspwM4jFP/LHr76ZHmIY/y3V1M0mMLNk+Je6IGbzxy23RSToMw== + dependencies: + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/chunked-blob-reader-native@^4.0.0": + version "4.0.0" + resolved "https://registry.npmjs.org/@smithy/chunked-blob-reader-native/-/chunked-blob-reader-native-4.0.0.tgz" + integrity sha512-R9wM2yPmfEMsUmlMlIgSzOyICs0x9uu7UTHoccMyt7BWw8shcGM8HqB355+BZCPBcySvbTYMs62EgEQkNxz2ig== + dependencies: + "@smithy/util-base64" "^4.0.0" + tslib "^2.6.2" + +"@smithy/chunked-blob-reader@^5.0.0": + version "5.0.0" + resolved "https://registry.npmjs.org/@smithy/chunked-blob-reader/-/chunked-blob-reader-5.0.0.tgz" + integrity sha512-+sKqDBQqb036hh4NPaUiEkYFkTUGYzRsn3EuFhyfQfMy6oGHEUJDurLP9Ufb5dasr/XiAmPNMr6wa9afjQB+Gw== + dependencies: + tslib "^2.6.2" + +"@smithy/config-resolver@^4.1.0": + version "4.1.0" + resolved "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.1.0.tgz" + integrity sha512-8smPlwhga22pwl23fM5ew4T9vfLUCeFXlcqNOCD5M5h8VmNPNUE9j6bQSuRXpDSV11L/E/SwEBQuW8hr6+nS1A== + dependencies: + "@smithy/node-config-provider" "^4.0.2" + "@smithy/types" "^4.2.0" + "@smithy/util-config-provider" "^4.0.0" + "@smithy/util-middleware" "^4.0.2" + tslib "^2.6.2" + +"@smithy/core@^3.3.0": + version "3.3.0" + resolved "https://registry.npmjs.org/@smithy/core/-/core-3.3.0.tgz" + integrity sha512-r6gvs5OfRq/w+9unPm7B3po4rmWaGh0CIL/OwHntGGux7+RhOOZLGuurbeMgWV6W55ZuyMTypJLeH0vn/ZRaWQ== + dependencies: + "@smithy/middleware-serde" "^4.0.3" + "@smithy/protocol-http" "^5.1.0" + "@smithy/types" "^4.2.0" + "@smithy/util-body-length-browser" "^4.0.0" + "@smithy/util-middleware" "^4.0.2" + "@smithy/util-stream" "^4.2.0" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@smithy/credential-provider-imds@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.0.2.tgz" + integrity sha512-32lVig6jCaWBHnY+OEQ6e6Vnt5vDHaLiydGrwYMW9tPqO688hPGTYRamYJ1EptxEC2rAwJrHWmPoKRBl4iTa8w== + dependencies: + "@smithy/node-config-provider" "^4.0.2" + "@smithy/property-provider" "^4.0.2" + "@smithy/types" "^4.2.0" + "@smithy/url-parser" "^4.0.2" + tslib "^2.6.2" + +"@smithy/eventstream-codec@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-4.0.2.tgz" + integrity sha512-p+f2kLSK7ZrXVfskU/f5dzksKTewZk8pJLPvER3aFHPt76C2MxD9vNatSfLzzQSQB4FNO96RK4PSXfhD1TTeMQ== + dependencies: + "@aws-crypto/crc32" "5.2.0" + "@smithy/types" "^4.2.0" + "@smithy/util-hex-encoding" "^4.0.0" + tslib "^2.6.2" + +"@smithy/eventstream-serde-browser@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-4.0.2.tgz" + integrity sha512-CepZCDs2xgVUtH7ZZ7oDdZFH8e6Y2zOv8iiX6RhndH69nlojCALSKK+OXwZUgOtUZEUaZ5e1hULVCHYbCn7pug== + dependencies: + "@smithy/eventstream-serde-universal" "^4.0.2" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/eventstream-serde-config-resolver@^4.1.0": + version "4.1.0" + resolved "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-4.1.0.tgz" + integrity sha512-1PI+WPZ5TWXrfj3CIoKyUycYynYJgZjuQo8U+sphneOtjsgrttYybdqESFReQrdWJ+LKt6NEdbYzmmfDBmjX2A== + dependencies: + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/eventstream-serde-node@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-4.0.2.tgz" + integrity sha512-C5bJ/C6x9ENPMx2cFOirspnF9ZsBVnBMtP6BdPl/qYSuUawdGQ34Lq0dMcf42QTjUZgWGbUIZnz6+zLxJlb9aw== + dependencies: + "@smithy/eventstream-serde-universal" "^4.0.2" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/eventstream-serde-universal@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-4.0.2.tgz" + integrity sha512-St8h9JqzvnbB52FtckiHPN4U/cnXcarMniXRXTKn0r4b4XesZOGiAyUdj1aXbqqn1icSqBlzzUsCl6nPB018ng== + dependencies: + "@smithy/eventstream-codec" "^4.0.2" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/fetch-http-handler@^5.0.2": + version "5.0.2" + resolved "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.0.2.tgz" + integrity sha512-+9Dz8sakS9pe7f2cBocpJXdeVjMopUDLgZs1yWeu7h++WqSbjUYv/JAJwKwXw1HV6gq1jyWjxuyn24E2GhoEcQ== + dependencies: + "@smithy/protocol-http" "^5.1.0" + "@smithy/querystring-builder" "^4.0.2" + "@smithy/types" "^4.2.0" + "@smithy/util-base64" "^4.0.0" + tslib "^2.6.2" + +"@smithy/hash-blob-browser@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/hash-blob-browser/-/hash-blob-browser-4.0.2.tgz" + integrity sha512-3g188Z3DyhtzfBRxpZjU8R9PpOQuYsbNnyStc/ZVS+9nVX1f6XeNOa9IrAh35HwwIZg+XWk8bFVtNINVscBP+g== + dependencies: + "@smithy/chunked-blob-reader" "^5.0.0" + "@smithy/chunked-blob-reader-native" "^4.0.0" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/hash-node@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.0.2.tgz" + integrity sha512-VnTpYPnRUE7yVhWozFdlxcYknv9UN7CeOqSrMH+V877v4oqtVYuoqhIhtSjmGPvYrYnAkaM61sLMKHvxL138yg== + dependencies: + "@smithy/types" "^4.2.0" + "@smithy/util-buffer-from" "^4.0.0" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@smithy/hash-stream-node@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-4.0.2.tgz" + integrity sha512-POWDuTznzbIwlEXEvvXoPMS10y0WKXK790soe57tFRfvf4zBHyzE529HpZMqmDdwG9MfFflnyzndUQ8j78ZdSg== + dependencies: + "@smithy/types" "^4.2.0" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@smithy/invalid-dependency@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.0.2.tgz" + integrity sha512-GatB4+2DTpgWPday+mnUkoumP54u/MDM/5u44KF9hIu8jF0uafZtQLcdfIKkIcUNuF/fBojpLEHZS/56JqPeXQ== + dependencies: + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/is-array-buffer@^2.2.0": + version "2.2.0" + resolved "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz" + integrity sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA== + dependencies: + tslib "^2.6.2" + +"@smithy/is-array-buffer@^4.0.0": + version "4.0.0" + resolved "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-4.0.0.tgz" + integrity sha512-saYhF8ZZNoJDTvJBEWgeBccCg+yvp1CX+ed12yORU3NilJScfc6gfch2oVb4QgxZrGUx3/ZJlb+c/dJbyupxlw== + dependencies: + tslib "^2.6.2" + +"@smithy/md5-js@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-4.0.2.tgz" + integrity sha512-Hc0R8EiuVunUewCse2syVgA2AfSRco3LyAv07B/zCOMa+jpXI9ll+Q21Nc6FAlYPcpNcAXqBzMhNs1CD/pP2bA== + dependencies: + "@smithy/types" "^4.2.0" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@smithy/middleware-content-length@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.0.2.tgz" + integrity sha512-hAfEXm1zU+ELvucxqQ7I8SszwQ4znWMbNv6PLMndN83JJN41EPuS93AIyh2N+gJ6x8QFhzSO6b7q2e6oClDI8A== + dependencies: + "@smithy/protocol-http" "^5.1.0" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/middleware-endpoint@^4.1.1": + version "4.1.1" + resolved "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.1.1.tgz" + integrity sha512-z5RmcHxjvScL+LwEDU2mTNCOhgUs4lu5PGdF1K36IPRmUHhNFxNxgenSB7smyDiYD4vdKQ7CAZtG5cUErqib9w== + dependencies: + "@smithy/core" "^3.3.0" + "@smithy/middleware-serde" "^4.0.3" + "@smithy/node-config-provider" "^4.0.2" + "@smithy/shared-ini-file-loader" "^4.0.2" + "@smithy/types" "^4.2.0" + "@smithy/url-parser" "^4.0.2" + "@smithy/util-middleware" "^4.0.2" + tslib "^2.6.2" + +"@smithy/middleware-retry@^4.1.1": + version "4.1.2" + resolved "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.1.2.tgz" + integrity sha512-qN/Mmxm8JWtFAjozJ8VSTM83KOX4cIks8UjDqqNkKIegzPrE5ZKPNCQ/DqUSIF90pue5a/NycNXnBod2NwvZZQ== + dependencies: + "@smithy/node-config-provider" "^4.0.2" + "@smithy/protocol-http" "^5.1.0" + "@smithy/service-error-classification" "^4.0.3" + "@smithy/smithy-client" "^4.2.1" + "@smithy/types" "^4.2.0" + "@smithy/util-middleware" "^4.0.2" + "@smithy/util-retry" "^4.0.3" + tslib "^2.6.2" + uuid "^9.0.1" + +"@smithy/middleware-serde@^4.0.3": + version "4.0.3" + resolved "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.0.3.tgz" + integrity sha512-rfgDVrgLEVMmMn0BI8O+8OVr6vXzjV7HZj57l0QxslhzbvVfikZbVfBVthjLHqib4BW44QhcIgJpvebHlRaC9A== + dependencies: + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/middleware-stack@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.0.2.tgz" + integrity sha512-eSPVcuJJGVYrFYu2hEq8g8WWdJav3sdrI4o2c6z/rjnYDd3xH9j9E7deZQCzFn4QvGPouLngH3dQ+QVTxv5bOQ== + dependencies: + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/node-config-provider@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.0.2.tgz" + integrity sha512-WgCkILRZfJwJ4Da92a6t3ozN/zcvYyJGUTmfGbgS/FkCcoCjl7G4FJaCDN1ySdvLvemnQeo25FdkyMSTSwulsw== + dependencies: + "@smithy/property-provider" "^4.0.2" + "@smithy/shared-ini-file-loader" "^4.0.2" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/node-http-handler@^4.0.4": + version "4.0.4" + resolved "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.0.4.tgz" + integrity sha512-/mdqabuAT3o/ihBGjL94PUbTSPSRJ0eeVTdgADzow0wRJ0rN4A27EOrtlK56MYiO1fDvlO3jVTCxQtQmK9dZ1g== + dependencies: + "@smithy/abort-controller" "^4.0.2" + "@smithy/protocol-http" "^5.1.0" + "@smithy/querystring-builder" "^4.0.2" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/property-provider@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.0.2.tgz" + integrity sha512-wNRoQC1uISOuNc2s4hkOYwYllmiyrvVXWMtq+TysNRVQaHm4yoafYQyjN/goYZS+QbYlPIbb/QRjaUZMuzwQ7A== + dependencies: + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/protocol-http@^5.1.0": + version "5.1.0" + resolved "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.1.0.tgz" + integrity sha512-KxAOL1nUNw2JTYrtviRRjEnykIDhxc84qMBzxvu1MUfQfHTuBlCG7PA6EdVwqpJjH7glw7FqQoFxUJSyBQgu7g== + dependencies: + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/querystring-builder@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.0.2.tgz" + integrity sha512-NTOs0FwHw1vimmQM4ebh+wFQvOwkEf/kQL6bSM1Lock+Bv4I89B3hGYoUEPkmvYPkDKyp5UdXJYu+PoTQ3T31Q== + dependencies: + "@smithy/types" "^4.2.0" + "@smithy/util-uri-escape" "^4.0.0" + tslib "^2.6.2" + +"@smithy/querystring-parser@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.0.2.tgz" + integrity sha512-v6w8wnmZcVXjfVLjxw8qF7OwESD9wnpjp0Dqry/Pod0/5vcEA3qxCr+BhbOHlxS8O+29eLpT3aagxXGwIoEk7Q== + dependencies: + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/service-error-classification@^4.0.3": + version "4.0.3" + resolved "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.0.3.tgz" + integrity sha512-FTbcajmltovWMjj3tksDQdD23b2w6gH+A0DYA1Yz3iSpjDj8fmkwy62UnXcWMy4d5YoMoSyLFHMfkEVEzbiN8Q== + dependencies: + "@smithy/types" "^4.2.0" + +"@smithy/shared-ini-file-loader@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.0.2.tgz" + integrity sha512-J9/gTWBGVuFZ01oVA6vdb4DAjf1XbDhK6sLsu3OS9qmLrS6KB5ygpeHiM3miIbj1qgSJ96GYszXFWv6ErJ8QEw== + dependencies: + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/signature-v4@^5.1.0": + version "5.1.0" + resolved "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.1.0.tgz" + integrity sha512-4t5WX60sL3zGJF/CtZsUQTs3UrZEDO2P7pEaElrekbLqkWPYkgqNW1oeiNYC6xXifBnT9dVBOnNQRvOE9riU9w== + dependencies: + "@smithy/is-array-buffer" "^4.0.0" + "@smithy/protocol-http" "^5.1.0" + "@smithy/types" "^4.2.0" + "@smithy/util-hex-encoding" "^4.0.0" + "@smithy/util-middleware" "^4.0.2" + "@smithy/util-uri-escape" "^4.0.0" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@smithy/smithy-client@^4.2.1": + version "4.2.1" + resolved "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.2.1.tgz" + integrity sha512-fbniZef60QdsBc4ZY0iyI8xbFHIiC/QRtPi66iE4ufjiE/aaz7AfUXzcWMkpO8r+QhLeNRIfmPchIG+3/QDZ6g== + dependencies: + "@smithy/core" "^3.3.0" + "@smithy/middleware-endpoint" "^4.1.1" + "@smithy/middleware-stack" "^4.0.2" + "@smithy/protocol-http" "^5.1.0" + "@smithy/types" "^4.2.0" + "@smithy/util-stream" "^4.2.0" + tslib "^2.6.2" + +"@smithy/types@^4.2.0": + version "4.2.0" + resolved "https://registry.npmjs.org/@smithy/types/-/types-4.2.0.tgz" + integrity sha512-7eMk09zQKCO+E/ivsjQv+fDlOupcFUCSC/L2YUPgwhvowVGWbPQHjEFcmjt7QQ4ra5lyowS92SV53Zc6XD4+fg== + dependencies: + tslib "^2.6.2" + +"@smithy/url-parser@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.0.2.tgz" + integrity sha512-Bm8n3j2ScqnT+kJaClSVCMeiSenK6jVAzZCNewsYWuZtnBehEz4r2qP0riZySZVfzB+03XZHJeqfmJDkeeSLiQ== + dependencies: + "@smithy/querystring-parser" "^4.0.2" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/util-base64@^4.0.0": + version "4.0.0" + resolved "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-4.0.0.tgz" + integrity sha512-CvHfCmO2mchox9kjrtzoHkWHxjHZzaFojLc8quxXY7WAAMAg43nuxwv95tATVgQFNDwd4M9S1qFzj40Ul41Kmg== + dependencies: + "@smithy/util-buffer-from" "^4.0.0" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@smithy/util-body-length-browser@^4.0.0": + version "4.0.0" + resolved "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-4.0.0.tgz" + integrity sha512-sNi3DL0/k64/LO3A256M+m3CDdG6V7WKWHdAiBBMUN8S3hK3aMPhwnPik2A/a2ONN+9doY9UxaLfgqsIRg69QA== + dependencies: + tslib "^2.6.2" + +"@smithy/util-body-length-node@^4.0.0": + version "4.0.0" + resolved "https://registry.npmjs.org/@smithy/util-body-length-node/-/util-body-length-node-4.0.0.tgz" + integrity sha512-q0iDP3VsZzqJyje8xJWEJCNIu3lktUGVoSy1KB0UWym2CL1siV3artm+u1DFYTLejpsrdGyCSWBdGNjJzfDPjg== + dependencies: + tslib "^2.6.2" + +"@smithy/util-buffer-from@^2.2.0": + version "2.2.0" + resolved "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz" + integrity sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA== + dependencies: + "@smithy/is-array-buffer" "^2.2.0" + tslib "^2.6.2" + +"@smithy/util-buffer-from@^4.0.0": + version "4.0.0" + resolved "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-4.0.0.tgz" + integrity sha512-9TOQ7781sZvddgO8nxueKi3+yGvkY35kotA0Y6BWRajAv8jjmigQ1sBwz0UX47pQMYXJPahSKEKYFgt+rXdcug== + dependencies: + "@smithy/is-array-buffer" "^4.0.0" + tslib "^2.6.2" + +"@smithy/util-config-provider@^4.0.0": + version "4.0.0" + resolved "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-4.0.0.tgz" + integrity sha512-L1RBVzLyfE8OXH+1hsJ8p+acNUSirQnWQ6/EgpchV88G6zGBTDPdXiiExei6Z1wR2RxYvxY/XLw6AMNCCt8H3w== + dependencies: + tslib "^2.6.2" + +"@smithy/util-defaults-mode-browser@^4.0.9": + version "4.0.9" + resolved "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.0.9.tgz" + integrity sha512-B8j0XsElvyhv6+5hlFf6vFV/uCSyLKcInpeXOGnOImX2mGXshE01RvPoGipTlRpIk53e6UfYj7WdDdgbVfXDZw== + dependencies: + "@smithy/property-provider" "^4.0.2" + "@smithy/smithy-client" "^4.2.1" + "@smithy/types" "^4.2.0" + bowser "^2.11.0" + tslib "^2.6.2" + +"@smithy/util-defaults-mode-node@^4.0.9": + version "4.0.9" + resolved "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.0.9.tgz" + integrity sha512-wTDU8P/zdIf9DOpV5qm64HVgGRXvqjqB/fJZTEQbrz3s79JHM/E7XkMm/876Oq+ZLHJQgnXM9QHDo29dlM62eA== + dependencies: + "@smithy/config-resolver" "^4.1.0" + "@smithy/credential-provider-imds" "^4.0.2" + "@smithy/node-config-provider" "^4.0.2" + "@smithy/property-provider" "^4.0.2" + "@smithy/smithy-client" "^4.2.1" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/util-endpoints@^3.0.2": + version "3.0.2" + resolved "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.0.2.tgz" + integrity sha512-6QSutU5ZyrpNbnd51zRTL7goojlcnuOB55+F9VBD+j8JpRY50IGamsjlycrmpn8PQkmJucFW8A0LSfXj7jjtLQ== + dependencies: + "@smithy/node-config-provider" "^4.0.2" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/util-hex-encoding@^4.0.0": + version "4.0.0" + resolved "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-4.0.0.tgz" + integrity sha512-Yk5mLhHtfIgW2W2WQZWSg5kuMZCVbvhFmC7rV4IO2QqnZdbEFPmQnCcGMAX2z/8Qj3B9hYYNjZOhWym+RwhePw== + dependencies: + tslib "^2.6.2" + +"@smithy/util-middleware@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.0.2.tgz" + integrity sha512-6GDamTGLuBQVAEuQ4yDQ+ti/YINf/MEmIegrEeg7DdB/sld8BX1lqt9RRuIcABOhAGTA50bRbPzErez7SlDtDQ== + dependencies: + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/util-retry@^4.0.2", "@smithy/util-retry@^4.0.3": + version "4.0.3" + resolved "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.0.3.tgz" + integrity sha512-DPuYjZQDXmKr/sNvy9Spu8R/ESa2e22wXZzSAY6NkjOLj6spbIje/Aq8rT97iUMdDj0qHMRIe+bTxvlU74d9Ng== + dependencies: + "@smithy/service-error-classification" "^4.0.3" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + +"@smithy/util-stream@^4.2.0": + version "4.2.0" + resolved "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.2.0.tgz" + integrity sha512-Vj1TtwWnuWqdgQI6YTUF5hQ/0jmFiOYsc51CSMgj7QfyO+RF4EnT2HNjoviNlOOmgzgvf3f5yno+EiC4vrnaWQ== + dependencies: + "@smithy/fetch-http-handler" "^5.0.2" + "@smithy/node-http-handler" "^4.0.4" + "@smithy/types" "^4.2.0" + "@smithy/util-base64" "^4.0.0" + "@smithy/util-buffer-from" "^4.0.0" + "@smithy/util-hex-encoding" "^4.0.0" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@smithy/util-uri-escape@^4.0.0": + version "4.0.0" + resolved "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-4.0.0.tgz" + integrity sha512-77yfbCbQMtgtTylO9itEAdpPXSog3ZxMe09AEhm0dU0NLTalV70ghDZFR+Nfi1C60jnJoh/Re4090/DuZh2Omg== + dependencies: + tslib "^2.6.2" + +"@smithy/util-utf8@^2.0.0": + version "2.3.0" + resolved "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz" + integrity sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A== + dependencies: + "@smithy/util-buffer-from" "^2.2.0" + tslib "^2.6.2" + +"@smithy/util-utf8@^4.0.0": + version "4.0.0" + resolved "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-4.0.0.tgz" + integrity sha512-b+zebfKCfRdgNJDknHCob3O7FpeYQN6ZG6YLExMcasDHsCXlsXCEuiPZeLnJLpwa5dvPetGlnGCiMHuLwGvFow== + dependencies: + "@smithy/util-buffer-from" "^4.0.0" + tslib "^2.6.2" + +"@smithy/util-waiter@^4.0.3": + version "4.0.3" + resolved "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.0.3.tgz" + integrity sha512-JtaY3FxmD+te+KSI2FJuEcfNC9T/DGGVf551babM7fAaXhjJUt7oSYurH1Devxd2+BOSUACCgt3buinx4UnmEA== + dependencies: + "@smithy/abort-controller" "^4.0.2" + "@smithy/types" "^4.2.0" + tslib "^2.6.2" + "@swc/counter@0.1.3": version "0.1.3" resolved "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz" @@ -1371,6 +2441,11 @@ binary-extensions@^2.0.0: resolved "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz" integrity sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw== +bowser@^2.11.0: + version "2.11.0" + resolved "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz" + integrity sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA== + brace-expansion@^2.0.1: version "2.0.1" resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz" @@ -1992,6 +3067,13 @@ fast-xml-parser@^4.5.1: dependencies: strnum "^1.1.1" +fast-xml-parser@4.4.1: + version "4.4.1" + resolved "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.4.1.tgz" + integrity sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw== + dependencies: + strnum "^1.0.5" + fastq@^1.6.0: version "1.18.0" resolved "https://registry.npmjs.org/fastq/-/fastq-1.18.0.tgz" @@ -3684,7 +4766,7 @@ strip-ansi@^7.0.1: dependencies: ansi-regex "^6.0.1" -strnum@^1.1.1: +strnum@^1.0.5, strnum@^1.1.1: version "1.1.2" resolved "https://registry.npmjs.org/strnum/-/strnum-1.1.2.tgz" integrity sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA== @@ -3831,7 +4913,7 @@ ts-interface-checker@^0.1.9: resolved "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz" integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA== -tslib@^2.0.0, tslib@^2.1.0, tslib@^2.8.0: +tslib@^2.0.0, tslib@^2.1.0, tslib@^2.6.2, tslib@^2.8.0: version "2.8.1" resolved "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz" integrity sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w== @@ -3930,6 +5012,11 @@ uuid@^8.3.2: resolved "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== +uuid@^9.0.1: + version "9.0.1" + resolved "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz" + integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA== + vaul@^0.9.6: version "0.9.9" resolved "https://registry.npmjs.org/vaul/-/vaul-0.9.9.tgz"