/** * lunr - http://lunrjs.com - A bit like Solr, but much smaller and not as bright - 2.1.5 * Copyright (C) 2017 Oliver Nightingale * @license MIT */ !function () { var e = function (t) { var r = new e.Builder; return r.pipeline.add(e.trimmer, e.stopWordFilter, e.stemmer), r.searchPipeline.add(e.stemmer), t.call(r, r), r.build() }; e.version = "2.1.5", e.utils = {}, e.utils.warn = function (e) { return function (t) { e.console && console.warn && console.warn(t) } }(this), e.utils.asString = function (e) { return void 0 === e || null === e ? "" : e.toString() }, e.FieldRef = function (e, t, r) { this.docRef = e, this.fieldName = t, this._stringValue = r }, e.FieldRef.joiner = "/", e.FieldRef.fromString = function (t) { var r = t.indexOf(e.FieldRef.joiner); if (-1 === r) throw "malformed field ref string"; var i = t.slice(0, r), n = t.slice(r + 1); return new e.FieldRef(n, i, t) }, e.FieldRef.prototype.toString = function () { return void 0 == this._stringValue && (this._stringValue = this.fieldName + e.FieldRef.joiner + this.docRef), this._stringValue }, e.idf = function (e, t) { var r = 0; for (var i in e) "_index" != i && (r += Object.keys(e[i]).length); var n = (t - r + .5) / (r + .5); return Math.log(1 + Math.abs(n)) }, e.Token = function (e, t) { this.str = e || "", this.metadata = t || {} }, e.Token.prototype.toString = function () { return this.str }, e.Token.prototype.update = function (e) { return this.str = e(this.str, this.metadata), this }, e.Token.prototype.clone = function (t) { return t = t || function (e) { return e }, new e.Token(t(this.str, this.metadata), this.metadata) }, e.tokenizer = function (t) { if (null == t || void 0 == t) return []; if (Array.isArray(t)) return t.map(function (t) { return new e.Token(e.utils.asString(t).toLowerCase()) }); for (var r = t.toString().trim().toLowerCase(), i = r.length, n = [], s = 0, o = 0; i >= s; s++) { var a = r.charAt(s), u = s - o; (a.match(e.tokenizer.separator) || s == i) && (u > 0 && n.push(new e.Token(r.slice(o, s), { position: [o, u], index: n.length })), o = s + 1) } return n }, e.tokenizer.separator = /[\s\-]+/, e.Pipeline = function () { this._stack = [] }, e.Pipeline.registeredFunctions = Object.create(null), e.Pipeline.registerFunction = function (t, r) { r in this.registeredFunctions && e.utils.warn("Overwriting existing registered function: " + r), t.label = r, e.Pipeline.registeredFunctions[t.label] = t }, e.Pipeline.warnIfFunctionNotRegistered = function (t) { var r = t.label && t.label in this.registeredFunctions; r || e.utils.warn("Function is not registered with pipeline. This may cause problems when serialising the index.\n", t) }, e.Pipeline.load = function (t) { var r = new e.Pipeline; return t.forEach(function (t) { var i = e.Pipeline.registeredFunctions[t]; if (!i) throw new Error("Cannot load unregistered function: " + t); r.add(i) }), r }, e.Pipeline.prototype.add = function () { var t = Array.prototype.slice.call(arguments); t.forEach(function (t) { e.Pipeline.warnIfFunctionNotRegistered(t), this._stack.push(t) }, this) }, e.Pipeline.prototype.after = function (t, r) { e.Pipeline.warnIfFunctionNotRegistered(r); var i = this._stack.indexOf(t); if (-1 == i) throw new Error("Cannot find existingFn"); i += 1, this._stack.splice(i, 0, r) }, e.Pipeline.prototype.before = function (t, r) { e.Pipeline.warnIfFunctionNotRegistered(r); var i = this._stack.indexOf(t); if (-1 == i) throw new Error("Cannot find existingFn"); this._stack.splice(i, 0, r) }, e.Pipeline.prototype.remove = function (e) { var t = this._stack.indexOf(e); -1 != t && this._stack.splice(t, 1) }, e.Pipeline.prototype.run = function (e) { for (var t = this._stack.length, r = 0; t > r; r++) { var i = this._stack[r]; e = e.reduce(function (t, r, n) { var s = i(r, n, e); return void 0 === s || "" === s ? t : t.concat(s) }, []) } return e }, e.Pipeline.prototype.runString = function (t) { var r = new e.Token(t); return this.run([r]).map(function (e) { return e.toString() }) }, e.Pipeline.prototype.reset = function () { this._stack = [] }, e.Pipeline.prototype.toJSON = function () { return this._stack.map(function (t) { return e.Pipeline.warnIfFunctionNotRegistered(t), t.label }) }, e.Vector = function (e) { this._magnitude = 0, this.elements = e || [] }, e.Vector.prototype.positionForIndex = function (e) { if (0 == this.elements.length) return 0; for (var t = 0, r = this.elements.length / 2, i = r - t, n = Math.floor(i / 2), s = this.elements[2 * n]; i > 1 && (e > s && (t = n), s > e && (r = n), s != e);)i = r - t, n = t + Math.floor(i / 2), s = this.elements[2 * n]; return s == e ? 2 * n : s > e ? 2 * n : e > s ? 2 * (n + 1) : void 0 }, e.Vector.prototype.insert = function (e, t) { this.upsert(e, t, function () { throw "duplicate index" }) }, e.Vector.prototype.upsert = function (e, t, r) { this._magnitude = 0; var i = this.positionForIndex(e); this.elements[i] == e ? this.elements[i + 1] = r(this.elements[i + 1], t) : this.elements.splice(i, 0, e, t) }, e.Vector.prototype.magnitude = function () { if (this._magnitude) return this._magnitude; for (var e = 0, t = this.elements.length, r = 1; t > r; r += 2) { var i = this.elements[r]; e += i * i } return this._magnitude = Math.sqrt(e) }, e.Vector.prototype.dot = function (e) { for (var t = 0, r = this.elements, i = e.elements, n = r.length, s = i.length, o = 0, a = 0, u = 0, l = 0; n > u && s > l;)o = r[u], a = i[l], a > o ? u += 2 : o > a ? l += 2 : o == a && (t += r[u + 1] * i[l + 1], u += 2, l += 2); return t }, e.Vector.prototype.similarity = function (e) { return this.dot(e) / (this.magnitude() * e.magnitude()) }, e.Vector.prototype.toArray = function () { for (var e = new Array(this.elements.length / 2), t = 1, r = 0; t < this.elements.length; t += 2, r++)e[r] = this.elements[t]; return e }, e.Vector.prototype.toJSON = function () { return this.elements }, e.stemmer = function () { var e = { ational: "ate", tional: "tion", enci: "ence", anci: "ance", izer: "ize", bli: "ble", alli: "al", entli: "ent", eli: "e", ousli: "ous", ization: "ize", ation: "ate", ator: "ate", alism: "al", iveness: "ive", fulness: "ful", ousness: "ous", aliti: "al", iviti: "ive", biliti: "ble", logi: "log" }, t = { icate: "ic", ative: "", alize: "al", iciti: "ic", ical: "ic", ful: "", ness: "" }, r = "[^aeiou]", i = "[aeiouy]", n = r + "[^aeiouy]*", s = i + "[aeiou]*", o = "^(" + n + ")?" + s + n, a = "^(" + n + ")?" + s + n + "(" + s + ")?$", u = "^(" + n + ")?" + s + n + s + n, l = "^(" + n + ")?" + i, d = new RegExp(o), h = new RegExp(u), c = new RegExp(a), f = new RegExp(l), p = /^(.+?)(ss|i)es$/, y = /^(.+?)([^s])s$/, m = /^(.+?)eed$/, v = /^(.+?)(ed|ing)$/, g = /.$/, x = /(at|bl|iz)$/, w = new RegExp("([^aeiouylsz])\\1$"), k = new RegExp("^" + n + i + "[^aeiouwxy]$"), Q = /^(.+?[^aeiou])y$/, L = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/, T = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/, S = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/, b = /^(.+?)(s|t)(ion)$/, P = /^(.+?)e$/, E = /ll$/, I = new RegExp("^" + n + i + "[^aeiouwxy]$"), O = function (r) { var i, n, s, o, a, u, l; if (r.length < 3) return r; if (s = r.substr(0, 1), "y" == s && (r = s.toUpperCase() + r.substr(1)), o = p, a = y, o.test(r) ? r = r.replace(o, "$1$2") : a.test(r) && (r = r.replace(a, "$1$2")), o = m, a = v, o.test(r)) { var O = o.exec(r); o = d, o.test(O[1]) && (o = g, r = r.replace(o, "")) } else if (a.test(r)) { var O = a.exec(r); i = O[1], a = f, a.test(i) && (r = i, a = x, u = w, l = k, a.test(r) ? r += "e" : u.test(r) ? (o = g, r = r.replace(o, "")) : l.test(r) && (r += "e")) } if (o = Q, o.test(r)) { var O = o.exec(r); i = O[1], r = i + "i" } if (o = L, o.test(r)) { var O = o.exec(r); i = O[1], n = O[2], o = d, o.test(i) && (r = i + e[n]) } if (o = T, o.test(r)) { var O = o.exec(r); i = O[1], n = O[2], o = d, o.test(i) && (r = i + t[n]) } if (o = S, a = b, o.test(r)) { var O = o.exec(r); i = O[1], o = h, o.test(i) && (r = i) } else if (a.test(r)) { var O = a.exec(r); i = O[1] + O[2], a = h, a.test(i) && (r = i) } if (o = P, o.test(r)) { var O = o.exec(r); i = O[1], o = h, a = c, u = I, (o.test(i) || a.test(i) && !u.test(i)) && (r = i) } return o = E, a = h, o.test(r) && a.test(r) && (o = g, r = r.replace(o, "")), "y" == s && (r = s.toLowerCase() + r.substr(1)), r }; return function (e) { return e.update(O) } }(), e.Pipeline.registerFunction(e.stemmer, "stemmer"), e.generateStopWordFilter = function (e) { var t = e.reduce(function (e, t) { return e[t] = t, e }, {}); return function (e) { return e && t[e.toString()] !== e.toString() ? e : void 0 } }, e.stopWordFilter = e.generateStopWordFilter(["a", "able", "about", "across", "after", "all", "almost", "also", "am", "among", "an", "and", "any", "are", "as", "at", "be", "because", "been", "but", "by", "can", "cannot", "could", "dear", "did", "do", "does", "either", "else", "ever", "every", "for", "from", "get", "got", "had", "has", "have", "he", "her", "hers", "him", "his", "how", "however", "i", "if", "in", "into", "is", "it", "its", "just", "least", "let", "like", "likely", "may", "me", "might", "most", "must", "my", "neither", "no", "nor", "not", "of", "off", "often", "on", "only", "or", "other", "our", "own", "rather", "said", "say", "says", "she", "should", "since", "so", "some", "than", "that", "the", "their", "them", "then", "there", "these", "they", "this", "tis", "to", "too", "twas", "us", "wants", "was", "we", "were", "what", "when", "where", "which", "while", "who", "whom", "why", "will", "with", "would", "yet", "you", "your"]), e.Pipeline.registerFunction(e.stopWordFilter, "stopWordFilter"), e.trimmer = function (e) { return e.update(function (e) { return e.replace(/^\W+/, "").replace(/\W+$/, "") }) }, e.Pipeline.registerFunction(e.trimmer, "trimmer"), e.TokenSet = function () { this["final"] = !1, this.edges = {}, this.id = e.TokenSet._nextId, e.TokenSet._nextId += 1 }, e.TokenSet._nextId = 1, e.TokenSet.fromArray = function (t) { for (var r = new e.TokenSet.Builder, i = 0, n = t.length; n > i; i++)r.insert(t[i]); return r.finish(), r.root }, e.TokenSet.fromClause = function (t) { return "editDistance" in t ? e.TokenSet.fromFuzzyString(t.term, t.editDistance) : e.TokenSet.fromString(t.term) }, e.TokenSet.fromFuzzyString = function (t, r) { for (var i = new e.TokenSet, n = [{ node: i, editsRemaining: r, str: t }]; n.length;) { var s = n.pop(); if (s.str.length > 0) { var o, a = s.str.charAt(0); a in s.node.edges ? o = s.node.edges[a] : (o = new e.TokenSet, s.node.edges[a] = o), 1 == s.str.length ? o["final"] = !0 : n.push({ node: o, editsRemaining: s.editsRemaining, str: s.str.slice(1) }) } if (s.editsRemaining > 0 && s.str.length > 1) { var u, a = s.str.charAt(1); a in s.node.edges ? u = s.node.edges[a] : (u = new e.TokenSet, s.node.edges[a] = u), s.str.length <= 2 ? u["final"] = !0 : n.push({ node: u, editsRemaining: s.editsRemaining - 1, str: s.str.slice(2) }) } if (s.editsRemaining > 0 && 1 == s.str.length && (s.node["final"] = !0), s.editsRemaining > 0 && s.str.length >= 1) { if ("*" in s.node.edges) var l = s.node.edges["*"]; else { var l = new e.TokenSet; s.node.edges["*"] = l } 1 == s.str.length ? l["final"] = !0 : n.push({ node: l, editsRemaining: s.editsRemaining - 1, str: s.str.slice(1) }) } if (s.editsRemaining > 0) { if ("*" in s.node.edges) var d = s.node.edges["*"]; else { var d = new e.TokenSet; s.node.edges["*"] = d } 0 == s.str.length ? d["final"] = !0 : n.push({ node: d, editsRemaining: s.editsRemaining - 1, str: s.str }) } if (s.editsRemaining > 0 && s.str.length > 1) { var h, c = s.str.charAt(0), f = s.str.charAt(1); f in s.node.edges ? h = s.node.edges[f] : (h = new e.TokenSet, s.node.edges[f] = h), 1 == s.str.length ? h["final"] = !0 : n.push({ node: h, editsRemaining: s.editsRemaining - 1, str: c + s.str.slice(2) }) } } return i }, e.TokenSet.fromString = function (t) { for (var r = new e.TokenSet, i = r, n = !1, s = 0, o = t.length; o > s; s++) { var a = t[s], u = s == o - 1; if ("*" == a) n = !0, r.edges[a] = r, r["final"] = u; else { var l = new e.TokenSet; l["final"] = u, r.edges[a] = l, r = l, n && (r.edges["*"] = i) } } return i }, e.TokenSet.prototype.toArray = function () { for (var e = [], t = [{ prefix: "", node: this }]; t.length;) { var r = t.pop(), i = Object.keys(r.node.edges), n = i.length; r.node["final"] && e.push(r.prefix); for (var s = 0; n > s; s++) { var o = i[s]; t.push({ prefix: r.prefix.concat(o), node: r.node.edges[o] }) } } return e }, e.TokenSet.prototype.toString = function () { if (this._str) return this._str; for (var e = this["final"] ? "1" : "0", t = Object.keys(this.edges).sort(), r = t.length, i = 0; r > i; i++) { var n = t[i], s = this.edges[n]; e = e + n + s.id } return e }, e.TokenSet.prototype.intersect = function (t) { for (var r = new e.TokenSet, i = void 0, n = [{ qNode: t, output: r, node: this }]; n.length;) { i = n.pop(); for (var s = Object.keys(i.qNode.edges), o = s.length, a = Object.keys(i.node.edges), u = a.length, l = 0; o > l; l++)for (var d = s[l], h = 0; u > h; h++) { var c = a[h]; if (c == d || "*" == d) { var f = i.node.edges[c], p = i.qNode.edges[d], y = f["final"] && p["final"], m = void 0; c in i.output.edges ? (m = i.output.edges[c], m["final"] = m["final"] || y) : (m = new e.TokenSet, m["final"] = y, i.output.edges[c] = m), n.push({ qNode: p, output: m, node: f }) } } } return r }, e.TokenSet.Builder = function () { this.previousWord = "", this.root = new e.TokenSet, this.uncheckedNodes = [], this.minimizedNodes = {} }, e.TokenSet.Builder.prototype.insert = function (t) { var r, i = 0; if (t < this.previousWord) throw new Error("Out of order word insertion"); for (var n = 0; n < t.length && n < this.previousWord.length && t[n] == this.previousWord[n]; n++)i++; this.minimize(i), r = 0 == this.uncheckedNodes.length ? this.root : this.uncheckedNodes[this.uncheckedNodes.length - 1].child; for (var n = i; n < t.length; n++) { var s = new e.TokenSet, o = t[n]; r.edges[o] = s, this.uncheckedNodes.push({ parent: r, "char": o, child: s }), r = s } r["final"] = !0, this.previousWord = t }, e.TokenSet.Builder.prototype.finish = function () { this.minimize(0) }, e.TokenSet.Builder.prototype.minimize = function (e) { for (var t = this.uncheckedNodes.length - 1; t >= e; t--) { var r = this.uncheckedNodes[t], i = r.child.toString(); i in this.minimizedNodes ? r.parent.edges[r["char"]] = this.minimizedNodes[i] : (r.child._str = i, this.minimizedNodes[i] = r.child), this.uncheckedNodes.pop() } }, e.Index = function (e) { this.invertedIndex = e.invertedIndex, this.fieldVectors = e.fieldVectors, this.tokenSet = e.tokenSet, this.fields = e.fields, this.pipeline = e.pipeline }, e.Index.prototype.search = function (t) { return this.query(function (r) { var i = new e.QueryParser(t, r); i.parse() }) }, e.Index.prototype.query = function (t) { var r = new e.Query(this.fields), i = Object.create(null), n = Object.create(null), s = Object.create(null); t.call(r, r); for (var o = 0; o < r.clauses.length; o++) { var a = r.clauses[o], u = null; u = a.usePipeline ? this.pipeline.runString(a.term) : [a.term]; for (var l = 0; l < u.length; l++) { var d = u[l]; a.term = d; for (var h = e.TokenSet.fromClause(a), c = this.tokenSet.intersect(h).toArray(), f = 0; f < c.length; f++)for (var p = c[f], y = this.invertedIndex[p], m = y._index, v = 0; v < a.fields.length; v++) { var g = a.fields[v], x = y[g], w = Object.keys(x), k = p + "/" + g; if (void 0 === n[g] && (n[g] = new e.Vector), n[g].upsert(m, 1 * a.boost, function (e, t) { return e + t }), !s[k]) { for (var Q = 0; Q < w.length; Q++) { var L, T = w[Q], S = new e.FieldRef(T, g), b = x[T]; void 0 === (L = i[S]) ? i[S] = new e.MatchData(p, g, b) : L.add(p, g, b) } s[k] = !0 } } } } for (var P = Object.keys(i), E = [], I = Object.create(null), o = 0; o < P.length; o++) { var O, F = e.FieldRef.fromString(P[o]), R = F.docRef, _ = this.fieldVectors[F], N = n[F.fieldName].similarity(_); if (void 0 !== (O = I[R])) O.score += N, O.matchData.combine(i[F]); else { var C = { ref: R, score: N, matchData: i[F] }; I[R] = C, E.push(C) } } return E.sort(function (e, t) { return t.score - e.score }) }, e.Index.prototype.toJSON = function () { var t = Object.keys(this.invertedIndex).sort().map(function (e) { return [e, this.invertedIndex[e]] }, this), r = Object.keys(this.fieldVectors).map(function (e) { return [e, this.fieldVectors[e].toJSON()] }, this); return { version: e.version, fields: this.fields, fieldVectors: r, invertedIndex: t, pipeline: this.pipeline.toJSON() } }, e.Index.load = function (t) { var r = {}, i = {}, n = t.fieldVectors, s = {}, o = t.invertedIndex, a = new e.TokenSet.Builder, u = e.Pipeline.load(t.pipeline); t.version != e.version && e.utils.warn("Version mismatch when loading serialised index. Current version of lunr '" + e.version + "' does not match serialized index '" + t.version + "'"); for (var l = 0; l < n.length; l++) { var d = n[l], h = d[0], c = d[1]; i[h] = new e.Vector(c) } for (var l = 0; l < o.length; l++) { var d = o[l], f = d[0], p = d[1]; a.insert(f), s[f] = p } return a.finish(), r.fields = t.fields, r.fieldVectors = i, r.invertedIndex = s, r.tokenSet = a.root, r.pipeline = u, new e.Index(r) }, e.Builder = function () { this._ref = "id", this._fields = [], this.invertedIndex = Object.create(null), this.fieldTermFrequencies = {}, this.fieldLengths = {}, this.tokenizer = e.tokenizer, this.pipeline = new e.Pipeline, this.searchPipeline = new e.Pipeline, this.documentCount = 0, this._b = .75, this._k1 = 1.2, this.termIndex = 0, this.metadataWhitelist = [] }, e.Builder.prototype.ref = function (e) { this._ref = e }, e.Builder.prototype.field = function (e) { this._fields.push(e) }, e.Builder.prototype.b = function (e) { 0 > e ? this._b = 0 : e > 1 ? this._b = 1 : this._b = e }, e.Builder.prototype.k1 = function (e) { this._k1 = e }, e.Builder.prototype.add = function (t) { var r = t[this._ref]; this.documentCount += 1; for (var i = 0; i < this._fields.length; i++) { var n = this._fields[i], s = t[n], o = this.tokenizer(s), a = this.pipeline.run(o), u = new e.FieldRef(r, n), l = Object.create(null); this.fieldTermFrequencies[u] = l, this.fieldLengths[u] = 0, this.fieldLengths[u] += a.length; for (var d = 0; d < a.length; d++) { var h = a[d]; if (void 0 == l[h] && (l[h] = 0), l[h] += 1, void 0 == this.invertedIndex[h]) { var c = Object.create(null); c._index = this.termIndex, this.termIndex += 1; for (var f = 0; f < this._fields.length; f++)c[this._fields[f]] = Object.create(null); this.invertedIndex[h] = c } void 0 == this.invertedIndex[h][n][r] && (this.invertedIndex[h][n][r] = Object.create(null)); for (var p = 0; p < this.metadataWhitelist.length; p++) { var y = this.metadataWhitelist[p], m = h.metadata[y]; void 0 == this.invertedIndex[h][n][r][y] && (this.invertedIndex[h][n][r][y] = []), this.invertedIndex[h][n][r][y].push(m) } } } }, e.Builder.prototype.calculateAverageFieldLengths = function () { for (var t = Object.keys(this.fieldLengths), r = t.length, i = {}, n = {}, s = 0; r > s; s++) { var o = e.FieldRef.fromString(t[s]), a = o.fieldName; n[a] || (n[a] = 0), n[a] += 1, i[a] || (i[a] = 0), i[a] += this.fieldLengths[o] } for (var s = 0; s < this._fields.length; s++) { var a = this._fields[s]; i[a] = i[a] / n[a] } this.averageFieldLength = i }, e.Builder.prototype.createFieldVectors = function () { for (var t = {}, r = Object.keys(this.fieldTermFrequencies), i = r.length, n = Object.create(null), s = 0; i > s; s++) { for (var o = e.FieldRef.fromString(r[s]), a = o.fieldName, u = this.fieldLengths[o], l = new e.Vector, d = this.fieldTermFrequencies[o], h = Object.keys(d), c = h.length, f = 0; c > f; f++) { var p, y, m, v = h[f], g = d[v], x = this.invertedIndex[v]._index; void 0 === n[v] ? (p = e.idf(this.invertedIndex[v], this.documentCount), n[v] = p) : p = n[v], y = p * ((this._k1 + 1) * g) / (this._k1 * (1 - this._b + this._b * (u / this.averageFieldLength[a])) + g), m = Math.round(1e3 * y) / 1e3, l.insert(x, m) } t[o] = l } this.fieldVectors = t }, e.Builder.prototype.createTokenSet = function () { this.tokenSet = e.TokenSet.fromArray(Object.keys(this.invertedIndex).sort()) }, e.Builder.prototype.build = function () { return this.calculateAverageFieldLengths(), this.createFieldVectors(), this.createTokenSet(), new e.Index({ invertedIndex: this.invertedIndex, fieldVectors: this.fieldVectors, tokenSet: this.tokenSet, fields: this._fields, pipeline: this.searchPipeline }) }, e.Builder.prototype.use = function (e) { var t = Array.prototype.slice.call(arguments, 1); t.unshift(this), e.apply(this, t) }, e.MatchData = function (e, t, r) { for (var i = Object.create(null), n = Object.keys(r), s = 0; s < n.length; s++) { var o = n[s]; i[o] = r[o].slice() } this.metadata = Object.create(null), this.metadata[e] = Object.create(null), this.metadata[e][t] = i }, e.MatchData.prototype.combine = function (e) { for (var t = Object.keys(e.metadata), r = 0; r < t.length; r++) { var i = t[r], n = Object.keys(e.metadata[i]); void 0 == this.metadata[i] && (this.metadata[i] = Object.create(null)); for (var s = 0; s < n.length; s++) { var o = n[s], a = Object.keys(e.metadata[i][o]); void 0 == this.metadata[i][o] && (this.metadata[i][o] = Object.create(null)); for (var u = 0; u < a.length; u++) { var l = a[u]; void 0 == this.metadata[i][o][l] ? this.metadata[i][o][l] = e.metadata[i][o][l] : this.metadata[i][o][l] = this.metadata[i][o][l].concat(e.metadata[i][o][l]) } } } }, e.MatchData.prototype.add = function (e, t, r) { if (!(e in this.metadata)) return this.metadata[e] = Object.create(null), void (this.metadata[e][t] = r); if (!(t in this.metadata[e])) return void (this.metadata[e][t] = r); for (var i = Object.keys(r), n = 0; n < i.length; n++) { var s = i[n]; s in this.metadata[e][t] ? this.metadata[e][t][s] = this.metadata[e][t][s].concat(r[s]) : this.metadata[e][t][s] = r[s] } }, e.Query = function (e) { this.clauses = [], this.allFields = e }, e.Query.wildcard = new String("*"), e.Query.wildcard.NONE = 0, e.Query.wildcard.LEADING = 1, e.Query.wildcard.TRAILING = 2, e.Query.prototype.clause = function (t) { return "fields" in t || (t.fields = this.allFields), "boost" in t || (t.boost = 1), "usePipeline" in t || (t.usePipeline = !0), "wildcard" in t || (t.wildcard = e.Query.wildcard.NONE), t.wildcard & e.Query.wildcard.LEADING && t.term.charAt(0) != e.Query.wildcard && (t.term = "*" + t.term), t.wildcard & e.Query.wildcard.TRAILING && t.term.slice(-1) != e.Query.wildcard && (t.term = "" + t.term + "*"), this.clauses.push(t), this }, e.Query.prototype.term = function (e, t) { var r = t || {}; return r.term = e, this.clause(r), this }, e.QueryParseError = function (e, t, r) { this.name = "QueryParseError", this.message = e, this.start = t, this.end = r }, e.QueryParseError.prototype = new Error, e.QueryLexer = function (e) { this.lexemes = [], this.str = e, this.length = e.length, this.pos = 0, this.start = 0, this.escapeCharPositions = [] }, e.QueryLexer.prototype.run = function () { for (var t = e.QueryLexer.lexText; t;)t = t(this) }, e.QueryLexer.prototype.sliceString = function () { for (var e = [], t = this.start, r = this.pos, i = 0; i < this.escapeCharPositions.length; i++)r = this.escapeCharPositions[i], e.push(this.str.slice(t, r)), t = r + 1; return e.push(this.str.slice(t, this.pos)), this.escapeCharPositions.length = 0, e.join("") }, e.QueryLexer.prototype.emit = function (e) { this.lexemes.push({ type: e, str: this.sliceString(), start: this.start, end: this.pos }), this.start = this.pos }, e.QueryLexer.prototype.escapeCharacter = function () { this.escapeCharPositions.push(this.pos - 1), this.pos += 1 }, e.QueryLexer.prototype.next = function () { if (this.pos >= this.length) return e.QueryLexer.EOS; var t = this.str.charAt(this.pos); return this.pos += 1, t }, e.QueryLexer.prototype.width = function () { return this.pos - this.start }, e.QueryLexer.prototype.ignore = function () { this.start == this.pos && (this.pos += 1), this.start = this.pos }, e.QueryLexer.prototype.backup = function () { this.pos -= 1 }, e.QueryLexer.prototype.acceptDigitRun = function () { var t, r; do t = this.next(), r = t.charCodeAt(0); while (r > 47 && 58 > r); t != e.QueryLexer.EOS && this.backup() }, e.QueryLexer.prototype.more = function () { return this.pos < this.length }, e.QueryLexer.EOS = "EOS", e.QueryLexer.FIELD = "FIELD", e.QueryLexer.TERM = "TERM", e.QueryLexer.EDIT_DISTANCE = "EDIT_DISTANCE", e.QueryLexer.BOOST = "BOOST", e.QueryLexer.lexField = function (t) { return t.backup(), t.emit(e.QueryLexer.FIELD), t.ignore(), e.QueryLexer.lexText }, e.QueryLexer.lexTerm = function (t) { return t.width() > 1 && (t.backup(), t.emit(e.QueryLexer.TERM)), t.ignore(), t.more() ? e.QueryLexer.lexText : void 0 }, e.QueryLexer.lexEditDistance = function (t) { return t.ignore(), t.acceptDigitRun(), t.emit(e.QueryLexer.EDIT_DISTANCE), e.QueryLexer.lexText }, e.QueryLexer.lexBoost = function (t) { return t.ignore(), t.acceptDigitRun(), t.emit(e.QueryLexer.BOOST), e.QueryLexer.lexText }, e.QueryLexer.lexEOS = function (t) { t.width() > 0 && t.emit(e.QueryLexer.TERM) }, e.QueryLexer.termSeparator = e.tokenizer.separator, e.QueryLexer.lexText = function (t) { for (; ;) { var r = t.next(); if (r == e.QueryLexer.EOS) return e.QueryLexer.lexEOS; if (92 != r.charCodeAt(0)) { if (":" == r) return e.QueryLexer.lexField; if ("~" == r) return t.backup(), t.width() > 0 && t.emit(e.QueryLexer.TERM), e.QueryLexer.lexEditDistance; if ("^" == r) return t.backup(), t.width() > 0 && t.emit(e.QueryLexer.TERM), e.QueryLexer.lexBoost; if (r.match(e.QueryLexer.termSeparator)) return e.QueryLexer.lexTerm } else t.escapeCharacter() } }, e.QueryParser = function (t, r) { this.lexer = new e.QueryLexer(t), this.query = r, this.currentClause = {}, this.lexemeIdx = 0 }, e.QueryParser.prototype.parse = function () { this.lexer.run(), this.lexemes = this.lexer.lexemes; for (var t = e.QueryParser.parseFieldOrTerm; t;)t = t(this); return this.query }, e.QueryParser.prototype.peekLexeme = function () { return this.lexemes[this.lexemeIdx] }, e.QueryParser.prototype.consumeLexeme = function () { var e = this.peekLexeme(); return this.lexemeIdx += 1, e }, e.QueryParser.prototype.nextClause = function () { var e = this.currentClause; this.query.clause(e), this.currentClause = {} }, e.QueryParser.parseFieldOrTerm = function (t) { var r = t.peekLexeme(); if (void 0 != r) switch (r.type) { case e.QueryLexer.FIELD: return e.QueryParser.parseField; case e.QueryLexer.TERM: return e.QueryParser.parseTerm; default: var i = "expected either a field or a term, found " + r.type; throw r.str.length >= 1 && (i += " with value '" + r.str + "'"), new e.QueryParseError(i, r.start, r.end) } }, e.QueryParser.parseField = function (t) { var r = t.consumeLexeme(); if (void 0 != r) { if (-1 == t.query.allFields.indexOf(r.str)) { var i = t.query.allFields.map(function (e) { return "'" + e + "'" }).join(", "), n = "unrecognised field '" + r.str + "', possible fields: " + i; throw new e.QueryParseError(n, r.start, r.end) } t.currentClause.fields = [r.str]; var s = t.peekLexeme(); if (void 0 == s) { var n = "expecting term, found nothing"; throw new e.QueryParseError(n, r.start, r.end) } switch (s.type) { case e.QueryLexer.TERM: return e.QueryParser.parseTerm; default: var n = "expecting term, found '" + s.type + "'"; throw new e.QueryParseError(n, s.start, s.end) } } }, e.QueryParser.parseTerm = function (t) { var r = t.consumeLexeme(); if (void 0 != r) { t.currentClause.term = r.str.toLowerCase(), -1 != r.str.indexOf("*") && (t.currentClause.usePipeline = !1); var i = t.peekLexeme(); if (void 0 == i) return void t.nextClause(); switch (i.type) { case e.QueryLexer.TERM: return t.nextClause(), e.QueryParser.parseTerm; case e.QueryLexer.FIELD: return t.nextClause(), e.QueryParser.parseField; case e.QueryLexer.EDIT_DISTANCE: return e.QueryParser.parseEditDistance; case e.QueryLexer.BOOST: return e.QueryParser.parseBoost; default: var n = "Unexpected lexeme type '" + i.type + "'"; throw new e.QueryParseError(n, i.start, i.end) } } }, e.QueryParser.parseEditDistance = function (t) { var r = t.consumeLexeme(); if (void 0 != r) { var i = parseInt(r.str, 10); if (isNaN(i)) { var n = "edit distance must be numeric"; throw new e.QueryParseError(n, r.start, r.end) } t.currentClause.editDistance = i; var s = t.peekLexeme(); if (void 0 == s) return void t.nextClause(); switch (s.type) { case e.QueryLexer.TERM: return t.nextClause(), e.QueryParser.parseTerm; case e.QueryLexer.FIELD: return t.nextClause(), e.QueryParser.parseField; case e.QueryLexer.EDIT_DISTANCE: return e.QueryParser.parseEditDistance; case e.QueryLexer.BOOST: return e.QueryParser.parseBoost; default: var n = "Unexpected lexeme type '" + s.type + "'"; throw new e.QueryParseError(n, s.start, s.end) } } }, e.QueryParser.parseBoost = function (t) { var r = t.consumeLexeme(); if (void 0 != r) { var i = parseInt(r.str, 10); if (isNaN(i)) { var n = "boost must be numeric"; throw new e.QueryParseError(n, r.start, r.end) } t.currentClause.boost = i; var s = t.peekLexeme(); if (void 0 == s) return void t.nextClause(); switch (s.type) { case e.QueryLexer.TERM: return t.nextClause(), e.QueryParser.parseTerm; case e.QueryLexer.FIELD: return t.nextClause(), e.QueryParser.parseField; case e.QueryLexer.EDIT_DISTANCE: return e.QueryParser.parseEditDistance; case e.QueryLexer.BOOST: return e.QueryParser.parseBoost; default: var n = "Unexpected lexeme type '" + s.type + "'"; throw new e.QueryParseError(n, s.start, s.end) } } }, function (e, t) { "function" == typeof define && define.amd ? define(t) : "object" == typeof exports ? module.exports = t() : e.lunr = t() }(this, function () { return e }) }();