All files / src/handlers/merge merge-ignore.ts

93.75% Statements 60/64
66.67% Branches 16/24
100% Functions 17/17
92.59% Lines 50/54

Press n or j to go to the next uncovered block, b, p or k for the previous block.

1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101  5x   5x               5x 8x 8x   8x 33x 8x   8x         8x 8x     8x 8x   25x 17x       8x     5x 8x   4x 12x 8x     8x 1x         4x     5x 8x   4x 9x 8x       5x 8x   4x 4x 4x   4x       5x 4x   4x 4x   4x 4x   4x   4x 4x   4x 4x   4x 4x   4x     5x  
import { FileGenerator } from '@/class'
import { unnest } from 'ramda'
 
const defaultBucketName = Symbol()
interface Item {
  name: string | symbol
  values: string[]
}
 
type Bucket = Item[]
 
const classifyIgnore = (list): Bucket => {
  let last: Item = { name: defaultBucketName, values: [] }
  const bucket = [last]
 
  list.forEach(item => {
    if (/^#/.test(item)) {
      const name = item.substring(1)
      /** comment adjacent comments */
      Iif (!last.values.length && typeof last.name !== 'symbol') {
        last.name = `${last.name}\n${name}`
        return
      }
 
      const pair = bucket.find(item => item.name === name)
      Iif (pair) {
        last = pair
      } else {
        last = { name, values: [] }
        bucket.push(last)
      }
    } else if (item) {
      Eif (!last.values.includes(item)) last.values.push(item)
    }
  })
 
  return bucket
}
 
const mergeBucket = (b1: Bucket, b2: Bucket): Bucket => {
  const bucket = b1.map(item => ({ ...item }))
 
  b2.forEach(pair => {
    const sameOne = bucket.find(item => item.name === pair.name)
    Iif (!sameOne) {
      bucket.push(pair)
    } else {
      pair.values.forEach(value => {
        Eif (!(sameOne.values.includes(value))) sameOne.values = sameOne.values.concat(value)
      })
    }
  })
 
  return bucket
}
 
const uniqBucket = (b1: Item[], b2: Item[]): Item[] => {
  const allValues = unnest(b2.map(item => item.values))
 
  return b1.map(pair => {
    const values = pair.values.filter(item => !allValues.includes(item))
    return { name: pair.name, values }
  })
}
 
const renderBucket = (bucket: Bucket): string => bucket
  .filter(pair => (pair.name !== defaultBucketName || pair.values.length))
  .map(({ name, values }) => {
    let str = ''
    Eif (typeof name === 'string') str += name.replace(/^(.*)(\n|$)/mg, '#$1\n')
    str += values.join('\n')
 
    return str
  })
  .join('\n\n')
 
const mergeIgnore: FileGenerator = async file => {
  const templateIgnoreList = file.content.split('\n')
 
  const projectContent = await file.getProjectContent()
  const projectIgnoreList = projectContent.split('\n')
 
  const templateIgnoreBucket = classifyIgnore(templateIgnoreList)
  const projectIgnoreBucket = classifyIgnore(projectIgnoreList)
 
  const uniquedBucket = uniqBucket(projectIgnoreBucket, templateIgnoreBucket)
 
  const bucket = mergeBucket(templateIgnoreBucket, uniquedBucket)
  const result = renderBucket(bucket)
 
  const beginMatched = file.content.match(/^\s*/g)
  const beginBlank = beginMatched ? beginMatched[0] : ''
 
  const endMatched = file.content.match(/\s*$/g)
  const endBlank = endMatched ? endMatched[0] : ''
 
  file.content = `${beginBlank}${result}${endBlank}`
}
 
export default mergeIgnore