/* global nsha256, nhashFile */
/**
 * @module @unitybase/aws
 * @example
 const s3Client = new aws.S3Client({
   URL: 'http://localhost:9000',
   credentials: {
     accessKeyId: '....',
     secretAccessKey: '...'
   }
 })
 s3Client.listBuckets() //  [{"Name":"ubbs","CreationDate":"2024-01-03T09:11:57.815Z"},{"Name":"ubbs-ver","CreationDate":"2024-04-25T12:43:39.789Z"}]
 // upload buffer to bucket. Content type will be calculated based on `bucketKey` extension
 s3Client.upload({
   bucket: 'ubbs',
   bucketKey: '2024/01/01/test.txt',
   buffer: Buffer.from('Hello, світ!')
 })
 */

const url = require('url')
const http = require('http')
const crypto = require('crypto')
const fs = require('fs')
const path = require('path')
const mime = require('mime-types')

// Note - fast-xml-parser is fast only for small XML's, for XMLs what contains BASE64 data it slower compared to xmldom
const { XMLParser /*, XMLBuilder, XMLValidator */ } = require('fast-xml-parser')
const xmlParser = new XMLParser({
  removeNSPrefix: true,
  ignoreAttributes: false
})
if (typeof String.prototype.trimStart !== 'function') { // UB polyfill - fast-xml-parser uses trimStart
  // eslint-disable-next-line no-extend-native
  String.prototype.trimStart = String.prototype.trimLeft
}

/**
 * Constant defining the headers being signed.
 * @type {Set}
 */
const NON_X_AMZ_HEADERS2SIGN = new Set(['host', 'content-type', 'range'])
const AWS_ALGORITHM = 'AWS4-HMAC-SHA256'
const AWS_S3_SERVICE = 's3'
const AWS4_REQUEST = 'aws4_request'
const S3_URI_SCHEMA = 's3://'

/**
 * Constant defining the x-amz-content-sha256 header name
 * @type {string}
 */
// eslint-disable-next-line no-unused-vars
const SHA_HEADER = 'x-amz-content-sha256'
/**
 * SHA256 of empty string
 * @type {string}
 */
const EMPTY_SHA = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'

/**
 * // if true - bucket name appended to the URL, otherwise - prepended to host - see https://aws.amazon.com/blogs/aws/amazon-s3-path-deprecation-plan-the-rest-of-the-story/
 * Currently only PATH style implemented
 * @type {boolean}
 */
// eslint-disable-next-line no-unused-vars
const IS_S3_PATH = true

/**
 * S3 compatible API for UnityBase
 * @example
const s3Client = new aws.S3Client({
  URL: 'http://localhost:9000',
  credentials: {
    accessKeyId: '....',
    secretAccessKey: '...'
  }
})
s3Client.listBuckets() //  [{"Name":"ubbs","CreationDate":"2024-01-03T09:11:57.815Z"},{"Name":"ubbs-ver","CreationDate":"2024-04-25T12:43:39.789Z"}]
// upload buffer to bucket. Content type will be calculated based on `bucketKey` extension
s3Client.upload({
  bucket: 'ubbs',
  bucketKey: '2024/01/01/test.txt',
  buffer: Buffer.from('Hello, світ!')
})
 */
class S3Client {
  /**
   *
   * @param {object} opt
   * @param {string} opt.URL S3 URL. Example: AWS - https://s3.us-east-1.amazonaws.com, local min.io - http://localhost:9000
   * @param {string} [opt.region='us-east-1'] AWS region
   * @param {object} opt.credentials Credentials
   * @param {string} opt.credentials.accessKeyId Access key ID
   * @param {string} opt.credentials.secretAccessKey Secret Access Key
   */
  constructor (opt) {
    this._opt = opt
    if (!this._opt.region) this._opt.region = 'us-east-1'
    // eslint-disable-next-line n/no-deprecated-api
    this.URLParts = url.parse(opt.URL)
    this._NOW = new Date()
    this.s3Host = this.URLParts.host
  }

  /**
   * Sign an AWS request. See https://docs.aws.amazon.com/IAM/latest/UserGuide/create-signed-request.html
   *
   * @param {object} options
   * @param {string} options.HTTPMethod
   * @param {object} options.additionalHeaders
   * @param {string} [options.bucket]
   * @param {string} [options.s3Path='/']
   * @param {object} options.urlParams
   * @param {string} [options.HashedPayload]
   * @returns {{URL: string, HTTPMethod: string, headersStr: string, path: string}}
   */
  computeAwsHttpRequestParams ({ HTTPMethod, additionalHeaders, bucket, s3Path, urlParams, HashedPayload }) {
    // Step 1: Create a canonical request
    if (HTTPMethod === 'GET' || HTTPMethod === 'HEAD' || HTTPMethod === 'DELETE') {
      HashedPayload = EMPTY_SHA
    }
    if (!s3Path) {
      s3Path = '/'
    }
    // TODO = path must be .split('/').map(encodeUriComponent).join('/') but for UB path parts do not require encoding
    if (bucket) {
      s3Path = path.join(bucket, s3Path)
    }
    if (!s3Path.startsWith('/')) {
      s3Path = '/' + s3Path
    }

    let CanonicalQueryString = ''
    if (urlParams) {
      const parts = []
      for (const prm in urlParams) {
        parts.push(encodeURIComponent(prm) + '=' + encodeURIComponent(urlParams[prm]))
      }
      CanonicalQueryString = parts.join('&')
    }
    const RequestDateTime = getAmzDate(new Date())
    const headers = Object.assign({}, additionalHeaders, {
      host: this.s3Host,
      'x-amz-content-sha256': HashedPayload,
      'x-amz-date': RequestDateTime
    })
    if (HTTPMethod === 'PUT') {
      if (!headers['content-type']) {
        headers['content-type'] = mime.contentType(path.extname(s3Path)) || 'application/octet-stream'
      }
    }
    // headers names is lower-cased and sorted, values is trim - as required by AWS
    const sortedHdrNames = Object.keys(headers).filter(h => h.startsWith('x-amz-') || NON_X_AMZ_HEADERS2SIGN.has(h)).sort()
    const CanonicalHeaders = sortedHdrNames.map(h => h + ':' + headers[h]).join('\n') + '\n'
    const SignedHeaders = sortedHdrNames.join(';')

    const CanonicalRequest = [
      HTTPMethod, s3Path, CanonicalQueryString, CanonicalHeaders, SignedHeaders, HashedPayload
    ].join('\n')

    // console.debug('CanonicalRequest=', '\n' + CanonicalRequest)

    // Step 2: Create a hash of the canonical request
    const HashedCanonicalRequest = nsha256(CanonicalRequest)

    // Step 3: Create a string to sign
    const RequestDate = RequestDateTime.substring(0, 8) // YYYYMMDD
    const CredentialScope = [
      RequestDate,
      this._opt.region,
      AWS_S3_SERVICE,
      AWS4_REQUEST
    ].join('/')
    const stringToSign = [AWS_ALGORITHM, RequestDateTime, CredentialScope, HashedCanonicalRequest].join('\n')
    // console.debug('stringToSign=', stringToSign)
    // Step 4: Calculate the signature
    // const DateKey = Buffer.from(nhmac_sha256('AWS4' + this._opt.credentials.secretAccessKey, RequestDate), 'hex')
    // const DateRegionKey = Buffer.from(nhmac_sha256(DateKey, this._opt.region), 'hex')
    // const DateRegionServiceKey = Buffer.from(nhmac_sha256(DateRegionKey, AWS_S3_SERVICE), 'hex')
    // const SigningKey = Buffer.from(nhmac_sha256(DateRegionServiceKey, AWS4_REQUEST), 'hex')
    // const signature = nhmac_sha256(SigningKey, stringToSign)
    const DateKey = crypto.createHmac('sha256', 'AWS4' + this._opt.credentials.secretAccessKey)
      .update(RequestDate).digest()
    const DateRegionKey = crypto.createHmac('sha256', Buffer.from(DateKey))
      .update(this._opt.region).digest()
    const DateRegionServiceKey = crypto.createHmac('sha256', Buffer.from(DateRegionKey))
      .update(AWS_S3_SERVICE).digest()
    const SigningKey = crypto.createHmac('sha256', Buffer.from(DateRegionServiceKey))
      .update(AWS4_REQUEST).digest()
    const signature = crypto.createHmac('sha256', Buffer.from(SigningKey))
      .update(stringToSign).digest('hex')
    const authorizationHeaderVal = AWS_ALGORITHM + ' ' + 'Credential=' + this._opt.credentials.accessKeyId + '/' +
      CredentialScope + ',SignedHeaders=' + SignedHeaders + ',Signature=' + signature
    // console.debug('authorizationHeaderVal=', authorizationHeaderVal)

    headers.Authorization = authorizationHeaderVal
    // make headers string
    const arr = []
    for (const prop in headers) {
      arr.push(prop + ': ' + headers[prop])
    }
    const headersStr = arr.join('\r\n')
    // console.debug('Send HTTP req to', this._opt.URL, 'using', HTTPMethod, 'with headers', headersStr)
    return {
      URL: this._opt.URL,
      HTTPMethod,
      headersStr,
      path: s3Path
    }
  }

  /**
   * Sign an AWS request. See https://docs.aws.amazon.com/IAM/latest/UserGuide/create-signed-request.html
   *
   * @private
   * @param {object} options
   * @param {string} options.HTTPMethod
   * @param {object} options.additionalHeaders
   * @param {string} [options.bucket]
   * @param {string} [options.s3Path='/']
   * @param {object} options.urlParams
   * @param {string} [options.HashedPayload]
   * @returns {ClientRequest}
   */
  prepareReq (options) {
    const reqParams = this.computeAwsHttpRequestParams(options)
    const req = http.request({
      URL: reqParams.URL
    })
    req.setMethod(reqParams.HTTPMethod)
    req.setHeadersAsString(reqParams.headersStr)
    req.setPath(reqParams.path)
    return req
  }

  /**
   * Put data into specified `s3url` or `bucket+bucketKey`. Either binary data in `params.buffer` or path to file in `params.filePath` must be specified
   *
   * If `additionalHeaders['content-type']` is not specified will take a content-type from a `bucketKey` extension.
   *
   * Will throw on error or return `true` on success
   *
   * @param {object} params
   * @param {string} [params.s3url] Url in format `s3://bucket/bucketKey`. If passed - preferred over `params.bucket` and `params.bucketKey`
   * @param {string} [params.bucket] Bucket to put data
   * @param {string} [params.bucketKey] A key to put data into, for example `2024/01/01/12211221.pdf`
   * @param {string} [params.filePath] Path to file to put into bucket
   * @param {ArrayBufferLike} [params.buffer] A binary data to put (if filePath is not specified)
   * @param {object} [params.urlParams] A URL params for request - keys are param name
   * @param {object} [params.additionalHeaders]
   * @returns {boolean}
   */
  upload (params) {
    const HashedPayload = params.filePath
      ? nhashFile(params.filePath, 'SHA256')
      : nsha256(params.buffer)
    const p = params.s3url ? parseS3Url(params.s3url) : params
    const req = this.prepareReq({
      HTTPMethod: 'PUT',
      bucket: p.bucket,
      s3Path: p.bucketKey,
      additionalHeaders: params.additionalHeaders,
      HashedPayload
    })
    const buf = params.filePath
      ? fs.readFileSync(params.filePath, { encoding: 'bin' })
      : params.buffer
    const resp = req.end(buf)
    this.checkRespError(resp)
    return true
  }

  /**
   * Retrieve object metadata. Returns `false` if object dose not exists
   *
   * @param {object} params
   * @param {string} params.bucket Bucket
   * @param {string} params.bucketKey A key to get metadata, for example `2024/01/01/12211221.pdf`
   * @param {object} [params.additionalHeaders]
   * @returns {boolean|object}
   */
  head (params) {
    const req = this.prepareReq({
      HTTPMethod: 'HEAD',
      bucket: params.bucket,
      s3Path: params.bucketKey,
      additionalHeaders: params.additionalHeaders
    })
    const resp = req.end()
    if (!resp || resp.statusCode !== 200) {
      if (resp.statusCode === 404) {
        return false // not exists
      } else {
        this.checkRespError(resp) // throw on error
      }
    } else {
      return resp.headers
    }
  }

  /**
   * List bucket content. See https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadBucket.html
   *
   * @param {object} params
   * @param {string} params.bucket Bucket to list
   *
   * @param {string} [params.pathInBucket='/'] path in bucket (ends with /) or '/' for root
   */
  list (params) {
    const req = this.prepareReq({
      HTTPMethod: 'HEAD',
      bucket: params.bucket,
      s3Path: params.pathInBucket || '/'
    })
    const resp = req.end()
    this.checkRespError(resp)
    return resp
  }

  /**
   * Download object from s3 from either `s3url` or `bucket+bucketKey`. See https://docs.aws.amazon.com/AmazonS3/latest/API/API_GetObject.html
   *
   * @param {object} params
   * @param {string} [params.s3url] Url in format `s3://bucket/bucketKey`. If passed - preferred over `params.bucket` and `params.bucketKey`
   * @param {string} [params.bucket] Bucket to put data
   * @param {string} [params.bucketKey] A key to put data into, for example `2024/01/01/12211221.pdf`
   * @param {object} [params.additionalHeaders]
   * @param {object} [params.encoding = 'bin']
   * @returns {string|Buffer|ArrayBuffer}
   */
  download (params) {
    const p = params.s3url ? parseS3Url(params.s3url) : params
    const req = this.prepareReq({
      HTTPMethod: 'GET',
      bucket: p.bucket,
      s3Path: p.bucketKey,
      additionalHeaders: params.additionalHeaders
    })
    const resp = req.end()
    this.checkRespError(resp)
    return resp.read(params.encoding || 'bin')
  }

  /**
   * Delete object from s3 from either `s3url` or `bucket+bucketKey` or throw. See https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteObject.html
   *
   * @param {object} params
   * @param {string} [params.s3url] Url in format `s3://bucket/bucketKey`. If passed - preferred over `params.bucket` and `params.bucketKey`
   * @param {string} [params.bucket] Bucket to delete from
   * @param {string} [params.bucketKey] A key to delete, for example `2024/01/01/12211221.pdf`
   * @param {object} [params.additionalHeaders]
   * @returns {boolean}
   */
  delete (params) {
    const p = params.s3url ? parseS3Url(params.s3url) : params
    const req = this.prepareReq({
      HTTPMethod: 'DELETE',
      bucket: p.bucket,
      s3Path: p.bucketKey,
      additionalHeaders: params.additionalHeaders
    })
    const resp = req.end()
    this.checkRespError(resp)
    return resp.read(params.encoding || 'bin')
  }

  /**
   * List buckets. https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListBuckets.html
   *
   * @param {object} [params]
   * @param {object} [params.additionalHeaders]
   * @returns {Array<{Name: string, CreationDate: string}>}
   */
  listBuckets (params) {
    const req = this.prepareReq({
      HTTPMethod: 'GET',
      s3Path: '/',
      additionalHeaders: params ? params.additionalHeaders : null
    })
    const resp = req.end()
    this.checkRespError(resp)
    const resXML = resp.read('utf-8')
    const res = xmlParser.parse(resXML)
    // "ListAllMyBucketsResult": {
    //   "Owner": {
    //     "ID": "02d6176db174dc93cb1b899f7c6078f08654445fe8cf1b6ce98d8855f66bdbf4",
    //       "DisplayName": "minio"
    //   },
    //   "Buckets": {
    //     "Bucket": { // OR Array if many
    //       "Name": "ubbs",
    //         "CreationDate": "2024-01-03T09:11:57.815Z"
    //     }
    //   }
    // }
    let result = []
    if (res.ListAllMyBucketsResult && res.ListAllMyBucketsResult.Buckets && res.ListAllMyBucketsResult.Buckets.Bucket) {
      if (Array.isArray(res.ListAllMyBucketsResult.Buckets.Bucket)) {
        result = res.ListAllMyBucketsResult.Buckets.Bucket
      } else {
        result = [res.ListAllMyBucketsResult.Buckets.Bucket]
      }
    }
    // console.debug(JSON.stringify(res, null, ' '))
    return result
  }

  /**
   * Check response is valid or throw
   * @param {IncomingMessage} resp
   */
  checkRespError (resp) {
    const errRe = /<Error>(.*)<\/Error>/
    if (!resp || (resp.statusCode !== 200 && resp.statusCode !== 204)) { // DELETE returns 204 No Content
      let msg = resp.read('utf-8')
      const parsed = errRe.exec(msg)
      if (parsed && parsed[0]) {
        msg = parsed[0]
      }
      throw new Error('S3Client error: ' + msg)
    }
  }
}
/**
 * Convert date into AMZ Date YYYYMMDDTHHMMSSZ
 * @param {Date} d
 * @returns {string}
 */
function getAmzDate (d) {
  // mock
  // return '20130524T000000Z'
  return d.toISOString().replace(/[-:]/g, '').substring(0, 15) + 'Z'
}

/**
 * Parse URL in S3 format `s3://bucket/path/to/object into bucket and bucketKey
 * @param {string} url
 * @return {{bucket: string, bucketKey: string}}
 */
function parseS3Url (url) {
  if (!url.startsWith(S3_URI_SCHEMA)) throw new Error('Expect s3 URL to starts with s3:// but got:' + url)
  const p = url.slice(S3_URI_SCHEMA.length)
  const bEnd = p.indexOf('/')
  if (bEnd === -1) throw new Error('Expect s3 URL to be s3://bucket/path/to/object but got:' + url)
  return {
    bucket: p.slice(0, bEnd),
    bucketKey: p.slice(bEnd + 1)
  }
}

module.exports = {
  S3Client,
  getAmzDate,
  parseS3Url,
  S3_URI_SCHEMA
}