/* global nhashFile */
/**
 * @module @unitybase/s3-blob-store
 */

const fs = require('fs')
const BlobStores = require('@unitybase/blob-stores')
const FileSystemBlobStore = BlobStores.classes.FileSystemBlobStore
const aws = require('@unitybase/aws')
const UB = require('@unitybase/ub')

/**
 * @type S3Client
 * @private
 */
let threadS3Client
/**
 *  @classdesc
 *  Blob store implementation for storing content inside s3 compatible storage
 *  Key conceptions:
 *
 *    - relative path created in format modelName|relativePathFromModelDir to hide real file place from client
 *    - OS user temp folder used for store temporary content
 *    - delete operation is forbidden since models must be under version control
 *
 *  Singleton
 */
class S3BlobStore extends FileSystemBlobStore {
  /**
   * @param {object} storeConfig
   * @param {ServerApp} appInstance
   * @param {UBSession} sessionInstance
   * @param {object} options
   */
  constructor (storeConfig, appInstance, sessionInstance, options) {
    super(storeConfig, appInstance, sessionInstance, { checkStorePath: storeConfig.s3enabled })
    this.s3enabled = storeConfig.s3enabled || false
    if (!this.s3enabled) {
      return
    }
    const s3Cfg = appInstance.serverConfig.application.s3
    if (!s3Cfg) {
      throw new Error(`BLOB store "${this.name}" configured to use s3 but serverConfig.application.s3 section not exists`)
    }
    if (!s3Cfg.URL || !s3Cfg.credentials || !s3Cfg.credentials.accessKeyId || !s3Cfg.credentials.secretAccessKey) {
      throw new Error(`BLOB store "${this.name}" configured to use s3 but URL or credentials.accessKeyId or credentials.secretAccessKey is empty in serverConfig.application.s3 section`)
    }
    this.repondUsingProxy = this.PROXY_SEND_FILE_HEADER && (s3Cfg.anonymousReadPolicyEnabled === true)
    this.defaultBucket = s3Cfg.defaultBucket || 'ubbs'
    if (!threadS3Client) {
      threadS3Client = new aws.S3Client({
        URL: s3Cfg.URL,
        credentials: s3Cfg.credentials
      })
      if (!this.repondUsingProxy && !process.isDebug) {
        console.warn('S3 BLOB store: for production usage enabling of reverse proxy and set a \'serverConfig.application.s3.anonymousReadPolicyEnabled\' to true is strongly recommended')
      }
    }
  }

  /**
   * Returns true in case item stored in S3 storage
   *
   * @param {BlobStoreRequest} [request]
   * @param {BlobStoreItem} blobInfo
   * @returns {boolean}
   */
  isStoredInS3 (request, blobInfo) {
    if (request && request.isDirty) {
      return false // temp files stored in BOB store temp folder
    } else if (blobInfo.relPath) {
      return blobInfo.relPath.startsWith(aws.S3_URI_SCHEMA)
    } else {
      return false
    }
  }

  /**
   * Returns full path to the file with BLOB content. For s3 objects returns s3://bucket/path
   *
   * @param {BlobStoreRequest} request
   * @param {BlobStoreItem} blobInfo JSON retrieved from a DB
   * @returns {string}
   */
  getContentFilePath (request, blobInfo) {
    return request.isDirty ? this.getTempFileName(request) : this.getPermanentFileName(blobInfo, request)
  }

  /**
   * Retrieve BLOB content from blob store
   *
   * @param {BlobStoreRequest} request
   * @param {BlobStoreItem} blobInfo JSON retrieved from a DB.
   * @param {object} [options]
   * @param {string|null} [options.encoding] Possible values:
   *   'bin' 'ascii' 'binary' 'hex' ucs2/ucs-2/utf16le/utf-16le utf8/utf-8 base64
   *   if `null` will return {@link Buffer}, if `bin` - ArrayBuffer
   * @returns {string|Buffer|ArrayBuffer|null}
   */
  getContent (request, blobInfo, options) {
    if (!this.isStoredInS3(request, blobInfo)) {
      return super.getContent(request, blobInfo, options)
    }
    const filePath = this.getContentFilePath(request, blobInfo)
    if (!filePath) return undefined
    if (!threadS3Client) {
      throw new Error(`File '${filePath}' is stored is s3 but s3 is not enabled`)
    }
    return threadS3Client.download({ s3url: filePath, encoding: options ? options.encoding : 'bin' })
  }

  /**
   * Fill HTTP response for getDocument request
   *
   * @param {BlobStoreRequest} request
   * @param {BlobStoreItem} blobInfo Document metadata. Not used for dirty requests
   * @param {THTTPRequest} req
   * @param {THTTPResponse} resp
   * @param {boolean} [preventChangeRespOnError=false] If `true` - prevents sets resp status code - just returns false on error
   * @returns {boolean}
   */
  fillResponse (request, blobInfo, req, resp, preventChangeRespOnError) {
    if (!this.isStoredInS3(request, blobInfo)) {
      return super.fillResponse(request, blobInfo, req, resp, preventChangeRespOnError)
    }
    // s3 storage request
    const s3url = this.getPermanentFileName(blobInfo, request)
    if (!threadS3Client) {
      throw new Error(`File '${s3url}' is stored is s3 but s3 is not enabled`)
    }
    let ct = blobInfo.ct
    if (!ct) ct = 'application/octet-stream'
    if (this.repondUsingProxy) { // send via nginx using proxy pass to s3
      const s3Path = s3url.slice(aws.S3_URI_SCHEMA.length)
      // non anonymous request (TODO - nginx do not pass new headers to s3)
      // const parsedUri = aws.parseS3Url(s3url)
      // const reqParams = threadS3Client.computeAwsHttpRequestParams({
      //   HTTPMethod: 'GET',
      //   bucket: parsedUri.bucket,
      //   s3Path: parsedUri.bucketKey
      // })
      // const proxyPassHead = `${this.PROXY_SEND_FILE_HEADER}: /${this.PROXY_SEND_FILE_LOCATION_ROOT}/s3${reqParams.path}`
      // const headersForNginx = reqParams.headersStr + '\r\n' + proxyPassHead
      const headersForNginx = `${this.PROXY_SEND_FILE_HEADER}: /${this.PROXY_SEND_FILE_LOCATION_ROOT}/s3/${s3Path}`
      console.debug('<- ', headersForNginx)
      resp.writeHead(headersForNginx)
      resp.statusCode = 200
    } else { // dev mode - send from memory
      console.debug('S3 store: downloading from ', s3url)
      const binContent = threadS3Client.download({ s3url, encoding: 'bin' })
      resp.writeHead(`Content-Type: ${ct}`)
      resp.writeEnd(binContent)
      resp.statusCode = 200
    }
    return true
  }

  /**
   * Move content defined by `dirtyItem` from temporary to permanent store.
   * TIPS: in v0 (UB<5) if file updated then implementation takes a store from old item.
   *   This raise a problem - old store may be in archive state (readonly)
   * So in UB5 we change implementation to use a store defined in the attribute for new items
   *
   * Return a new attribute content which describe a place of the BLOB in permanent store
   *
   * @param {UBEntityAttribute} attribute
   * @param {number} ID
   * @param {BlobStoreItem} dirtyItem
   * @param {number} newRevision
   * @returns {BlobStoreItem|null}
   */
  persist (attribute, ID, dirtyItem, newRevision) {
    if (!this.s3enabled) {
      return super.persist(attribute, ID, dirtyItem, newRevision)
    }
    // new files are stored in s3
    const tempPath = this.checkTempFileBeforePersist(attribute, ID, dirtyItem)
    if (tempPath === null) { // deleted
      return null
    }
    const newPlacement = this.genNewPlacement(attribute, dirtyItem, ID) // newPlacement.relPath is s3 URL
    const newMD5 = nhashFile(tempPath, 'MD5')
    const ct = this.getMimeType(newPlacement.ext, true)
    const stat = fs.statSync(tempPath)
    const resp = {
      v: 2,
      store: this.name,
      fName: newPlacement.fn,
      origName: dirtyItem.origName,
      relPath: newPlacement.relPath,
      ct,
      size: stat.size,
      md5: newMD5,
      revision: newRevision
    }
    const s3url = this.getPermanentFileName(resp)
    console.debug('S3 store: upload to ', s3url)
    threadS3Client.upload({
      s3url,
      filePath: tempPath,
      additionalHeaders: {
        'content-type': ct
      }
    })
    fs.unlinkSync(tempPath)
    if (dirtyItem.isPermanent) resp.isPermanent = true
    return resp
  }

  /**
   * @override
   * @param {UBEntityAttribute} attribute
   * @param {number} ID
   * @param {BlobStoreItem} blobInfo
   */
  doDeletion (attribute, ID, blobInfo) {
    if (!this.isStoredInS3(null, blobInfo)) {
      return super.doDeletion(attribute, ID, blobInfo)
    }
    const s3url = this.getPermanentFileName(blobInfo)
    if (!threadS3Client) {
      throw new Error(`File '${s3url}' is stored is s3 but s3 is not enabled`)
    }
    if (s3url) {
      try {
        console.info(`removes blob data from S3 store for ${attribute.entity.code}.${attribute.name} row ID ${ID}`)
        return threadS3Client.delete({
          s3url
        })
      } catch (e) {
        console.error(`BLOB store "${this.name}" - can't delete object "${s3url}":`, e)
      }
    }
  }

  /**
   * Calculate a relative path, file name and bucket for a new BLOB item
   *
   * @protected
   * @param {UBEntityAttribute} attribute
   * @param {BlobStoreItem} dirtyItem
   * @param {number} ID
   * @returns {{fn: string, ext: string, relPath: string, fullFn: string}}
   */
  genNewPlacement (attribute, dirtyItem, ID) {
    // generate file name && relPath
    const placement = super.genNewPlacement(attribute, dirtyItem, ID, { forceFolder: !this.s3enabled })
    if (this.s3enabled) {
      const bucketCfg = { name: this.defaultBucket }
      UB.App.emit('getBucketName', { attribute, blobStoreItem: dirtyItem }, bucketCfg)
      placement.relPath = `${aws.S3_URI_SCHEMA}${bucketCfg.name}/${placement.relPath}`
    }
    return placement
  }

  /**
   * If item stored in file system - returns full path to file, if in S3 - s3 URL
   *
   * @protected
   * @param {BlobStoreItem} blobItem
   * @param {BlobStoreRequest} [request] Optional request to get a revision
   * @returns {string} In case of item not exists - return empty string ''
   */
  getPermanentFileName (blobItem, request) {
    if (!this.isStoredInS3(null, blobItem) || !this.s3enabled) {
      return super.getPermanentFileName(blobItem, request)
    }
    return blobItem.relPath + '/' + blobItem.fName
  }
}

module.exports = S3BlobStore