fix
This commit is contained in:
8
book/node_modules/lunr/test/env/augment.min.js
generated
vendored
Normal file
8
book/node_modules/lunr/test/env/augment.min.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
16
book/node_modules/lunr/test/env/jquery.js
generated
vendored
Normal file
16
book/node_modules/lunr/test/env/jquery.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
235
book/node_modules/lunr/test/env/qunit.css
generated
vendored
Normal file
235
book/node_modules/lunr/test/env/qunit.css
generated
vendored
Normal file
@@ -0,0 +1,235 @@
|
||||
/**
|
||||
* QUnit v1.10.0 - A JavaScript Unit Testing Framework
|
||||
*
|
||||
* http://qunitjs.com
|
||||
*
|
||||
* Copyright 2012 jQuery Foundation and other contributors
|
||||
* Released under the MIT license.
|
||||
* http://jquery.org/license
|
||||
*/
|
||||
|
||||
/** Font Family and Sizes */
|
||||
|
||||
#qunit-tests, #qunit-header, #qunit-banner, #qunit-testrunner-toolbar, #qunit-userAgent, #qunit-testresult {
|
||||
font-family: "Helvetica Neue Light", "HelveticaNeue-Light", "Helvetica Neue", Calibri, Helvetica, Arial, sans-serif;
|
||||
}
|
||||
|
||||
#qunit-testrunner-toolbar, #qunit-userAgent, #qunit-testresult, #qunit-tests li { font-size: small; }
|
||||
#qunit-tests { font-size: smaller; }
|
||||
|
||||
|
||||
/** Resets */
|
||||
|
||||
#qunit-tests, #qunit-tests ol, #qunit-header, #qunit-banner, #qunit-userAgent, #qunit-testresult, #qunit-modulefilter {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
|
||||
/** Header */
|
||||
|
||||
#qunit-header {
|
||||
padding: 0.5em 0 0.5em 1em;
|
||||
|
||||
color: #8699a4;
|
||||
background-color: #0d3349;
|
||||
|
||||
font-size: 1.5em;
|
||||
line-height: 1em;
|
||||
font-weight: normal;
|
||||
|
||||
border-radius: 5px 5px 0 0;
|
||||
-moz-border-radius: 5px 5px 0 0;
|
||||
-webkit-border-top-right-radius: 5px;
|
||||
-webkit-border-top-left-radius: 5px;
|
||||
}
|
||||
|
||||
#qunit-header a {
|
||||
text-decoration: none;
|
||||
color: #c2ccd1;
|
||||
}
|
||||
|
||||
#qunit-header a:hover,
|
||||
#qunit-header a:focus {
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
#qunit-testrunner-toolbar label {
|
||||
display: inline-block;
|
||||
padding: 0 .5em 0 .1em;
|
||||
}
|
||||
|
||||
#qunit-banner {
|
||||
height: 5px;
|
||||
}
|
||||
|
||||
#qunit-testrunner-toolbar {
|
||||
padding: 0.5em 0 0.5em 2em;
|
||||
color: #5E740B;
|
||||
background-color: #eee;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
#qunit-userAgent {
|
||||
padding: 0.5em 0 0.5em 2.5em;
|
||||
background-color: #2b81af;
|
||||
color: #fff;
|
||||
text-shadow: rgba(0, 0, 0, 0.5) 2px 2px 1px;
|
||||
}
|
||||
|
||||
#qunit-modulefilter-container {
|
||||
float: right;
|
||||
}
|
||||
|
||||
/** Tests: Pass/Fail */
|
||||
|
||||
#qunit-tests {
|
||||
list-style-position: inside;
|
||||
}
|
||||
|
||||
#qunit-tests li {
|
||||
padding: 0.4em 0.5em 0.4em 2.5em;
|
||||
border-bottom: 1px solid #fff;
|
||||
list-style-position: inside;
|
||||
}
|
||||
|
||||
#qunit-tests.hidepass li.pass, #qunit-tests.hidepass li.running {
|
||||
display: none;
|
||||
}
|
||||
|
||||
#qunit-tests li strong {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
#qunit-tests li a {
|
||||
padding: 0.5em;
|
||||
color: #c2ccd1;
|
||||
text-decoration: none;
|
||||
}
|
||||
#qunit-tests li a:hover,
|
||||
#qunit-tests li a:focus {
|
||||
color: #000;
|
||||
}
|
||||
|
||||
#qunit-tests ol {
|
||||
margin-top: 0.5em;
|
||||
padding: 0.5em;
|
||||
|
||||
background-color: #fff;
|
||||
|
||||
border-radius: 5px;
|
||||
-moz-border-radius: 5px;
|
||||
-webkit-border-radius: 5px;
|
||||
}
|
||||
|
||||
#qunit-tests table {
|
||||
border-collapse: collapse;
|
||||
margin-top: .2em;
|
||||
}
|
||||
|
||||
#qunit-tests th {
|
||||
text-align: right;
|
||||
vertical-align: top;
|
||||
padding: 0 .5em 0 0;
|
||||
}
|
||||
|
||||
#qunit-tests td {
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
#qunit-tests pre {
|
||||
margin: 0;
|
||||
white-space: pre-wrap;
|
||||
word-wrap: break-word;
|
||||
}
|
||||
|
||||
#qunit-tests del {
|
||||
background-color: #e0f2be;
|
||||
color: #374e0c;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
#qunit-tests ins {
|
||||
background-color: #ffcaca;
|
||||
color: #500;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
/*** Test Counts */
|
||||
|
||||
#qunit-tests b.counts { color: black; }
|
||||
#qunit-tests b.passed { color: #5E740B; }
|
||||
#qunit-tests b.failed { color: #710909; }
|
||||
|
||||
#qunit-tests li li {
|
||||
padding: 5px;
|
||||
background-color: #fff;
|
||||
border-bottom: none;
|
||||
list-style-position: inside;
|
||||
}
|
||||
|
||||
/*** Passing Styles */
|
||||
|
||||
#qunit-tests li li.pass {
|
||||
color: #3c510c;
|
||||
background-color: #fff;
|
||||
border-left: 10px solid #C6E746;
|
||||
}
|
||||
|
||||
#qunit-tests .pass { color: #528CE0; background-color: #D2E0E6; }
|
||||
#qunit-tests .pass .test-name { color: #366097; }
|
||||
|
||||
#qunit-tests .pass .test-actual,
|
||||
#qunit-tests .pass .test-expected { color: #999999; }
|
||||
|
||||
#qunit-banner.qunit-pass { background-color: #C6E746; }
|
||||
|
||||
/*** Failing Styles */
|
||||
|
||||
#qunit-tests li li.fail {
|
||||
color: #710909;
|
||||
background-color: #fff;
|
||||
border-left: 10px solid #EE5757;
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
#qunit-tests > li:last-child {
|
||||
border-radius: 0 0 5px 5px;
|
||||
-moz-border-radius: 0 0 5px 5px;
|
||||
-webkit-border-bottom-right-radius: 5px;
|
||||
-webkit-border-bottom-left-radius: 5px;
|
||||
}
|
||||
|
||||
#qunit-tests .fail { color: #000000; background-color: #EE5757; }
|
||||
#qunit-tests .fail .test-name,
|
||||
#qunit-tests .fail .module-name { color: #000000; }
|
||||
|
||||
#qunit-tests .fail .test-actual { color: #EE5757; }
|
||||
#qunit-tests .fail .test-expected { color: green; }
|
||||
|
||||
#qunit-banner.qunit-fail { background-color: #EE5757; }
|
||||
|
||||
|
||||
/** Result */
|
||||
|
||||
#qunit-testresult {
|
||||
padding: 0.5em 0.5em 0.5em 2.5em;
|
||||
|
||||
color: #2b81af;
|
||||
background-color: #D2E0E6;
|
||||
|
||||
border-bottom: 1px solid white;
|
||||
}
|
||||
#qunit-testresult .module-name {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
/** Fixture */
|
||||
|
||||
#qunit-fixture {
|
||||
position: absolute;
|
||||
top: -10000px;
|
||||
left: -10000px;
|
||||
width: 1000px;
|
||||
height: 1000px;
|
||||
}
|
1977
book/node_modules/lunr/test/env/qunit.js
generated
vendored
Normal file
1977
book/node_modules/lunr/test/env/qunit.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
125
book/node_modules/lunr/test/env/runner.js
generated
vendored
Normal file
125
book/node_modules/lunr/test/env/runner.js
generated
vendored
Normal file
@@ -0,0 +1,125 @@
|
||||
/*
|
||||
* QtWebKit-powered headless test runner using PhantomJS
|
||||
*
|
||||
* PhantomJS binaries: http://phantomjs.org/download.html
|
||||
* Requires PhantomJS 1.6+ (1.7+ recommended)
|
||||
*
|
||||
* Run with:
|
||||
* phantomjs runner.js [url-of-your-qunit-testsuite]
|
||||
*
|
||||
* e.g.
|
||||
* phantomjs runner.js http://localhost/qunit/test/index.html
|
||||
*/
|
||||
|
||||
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
var args = require('system').args;
|
||||
|
||||
// arg[0]: scriptName, args[1...]: arguments
|
||||
if (args.length !== 2) {
|
||||
console.error('Usage:\n phantomjs runner.js [url-of-your-qunit-testsuite]');
|
||||
phantom.exit(1);
|
||||
}
|
||||
|
||||
var url = args[1],
|
||||
page = require('webpage').create();
|
||||
|
||||
// Route `console.log()` calls from within the Page context to the main Phantom context (i.e. current `this`)
|
||||
page.onConsoleMessage = function(msg) {
|
||||
console.log(msg);
|
||||
};
|
||||
|
||||
page.onInitialized = function() {
|
||||
page.evaluate(addLogging);
|
||||
};
|
||||
|
||||
page.onCallback = function(message) {
|
||||
var result,
|
||||
failed;
|
||||
|
||||
if (message) {
|
||||
if (message.name === 'QUnit.done') {
|
||||
result = message.data;
|
||||
failed = !result || result.failed;
|
||||
|
||||
phantom.exit(failed ? 1 : 0);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
page.open(url, function(status) {
|
||||
if (status !== 'success') {
|
||||
console.error('Unable to access network: ' + status);
|
||||
phantom.exit(1);
|
||||
} else {
|
||||
// Cannot do this verification with the 'DOMContentLoaded' handler because it
|
||||
// will be too late to attach it if a page does not have any script tags.
|
||||
var qunitMissing = page.evaluate(function() { return (typeof QUnit === 'undefined' || !QUnit); });
|
||||
if (qunitMissing) {
|
||||
console.error('The `QUnit` object is not present on this page.');
|
||||
phantom.exit(1);
|
||||
}
|
||||
|
||||
// Do nothing... the callback mechanism will handle everything!
|
||||
}
|
||||
});
|
||||
|
||||
function addLogging() {
|
||||
window.document.addEventListener('DOMContentLoaded', function() {
|
||||
var current_test_assertions = [];
|
||||
|
||||
QUnit.log(function(details) {
|
||||
var response;
|
||||
|
||||
// Ignore passing assertions
|
||||
if (details.result) {
|
||||
return;
|
||||
}
|
||||
|
||||
response = details.message || '';
|
||||
|
||||
if (typeof details.expected !== 'undefined') {
|
||||
if (response) {
|
||||
response += ', ';
|
||||
}
|
||||
|
||||
response += 'expected: ' + details.expected + ', but was: ' + details.actual;
|
||||
if (details.source) {
|
||||
response += "\n" + details.source;
|
||||
}
|
||||
}
|
||||
|
||||
current_test_assertions.push('Failed assertion: ' + response);
|
||||
});
|
||||
|
||||
QUnit.testDone(function(result) {
|
||||
var i,
|
||||
len,
|
||||
name = result.module + ': ' + result.name;
|
||||
|
||||
if (result.failed) {
|
||||
console.log('Test failed: ' + name);
|
||||
|
||||
for (i = 0, len = current_test_assertions.length; i < len; i++) {
|
||||
console.log(' ' + current_test_assertions[i]);
|
||||
}
|
||||
}
|
||||
|
||||
current_test_assertions.length = 0;
|
||||
});
|
||||
|
||||
QUnit.done(function(result) {
|
||||
console.log('Took ' + result.runtime + 'ms to run ' + result.total + ' tests. ' + result.passed + ' passed, ' + result.failed + ' failed.');
|
||||
|
||||
if (typeof window.callPhantom === 'function') {
|
||||
window.callPhantom({
|
||||
'name': 'QUnit.done',
|
||||
'data': result
|
||||
});
|
||||
}
|
||||
});
|
||||
}, false);
|
||||
}
|
||||
})();
|
75
book/node_modules/lunr/test/event_emitter_test.js
generated
vendored
Normal file
75
book/node_modules/lunr/test/event_emitter_test.js
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
module('lunr.EventEmitter')
|
||||
|
||||
test('adding an event listener', function () {
|
||||
var emitter = new lunr.EventEmitter,
|
||||
handler = function () {}
|
||||
|
||||
emitter.addListener('test', handler)
|
||||
|
||||
ok('test' in emitter.events)
|
||||
ok(emitter.events.test.indexOf(handler) > -1)
|
||||
})
|
||||
|
||||
test('adding a listener to multiple events', function () {
|
||||
var emitter = new lunr.EventEmitter,
|
||||
handler = function () {}
|
||||
|
||||
emitter.addListener('foo', 'bar', 'baz', handler)
|
||||
|
||||
ok('foo' in emitter.events)
|
||||
ok('bar' in emitter.events)
|
||||
ok('baz' in emitter.events)
|
||||
|
||||
ok(emitter.events.foo.indexOf(handler) > -1)
|
||||
ok(emitter.events.bar.indexOf(handler) > -1)
|
||||
ok(emitter.events.baz.indexOf(handler) > -1)
|
||||
})
|
||||
|
||||
test('removing a single event listener', function () {
|
||||
var emitter = new lunr.EventEmitter,
|
||||
handler = function () {}
|
||||
|
||||
emitter.addListener('test', handler)
|
||||
|
||||
ok('test' in emitter.events)
|
||||
ok(emitter.events.test.indexOf(handler) > -1)
|
||||
|
||||
emitter.removeListener('test', handler)
|
||||
|
||||
ok(!('test' in emitter.events))
|
||||
})
|
||||
|
||||
test('removing a single event listener from many listeners', function () {
|
||||
var emitter = new lunr.EventEmitter,
|
||||
handler = function () {},
|
||||
otherHandler = function () {}
|
||||
|
||||
emitter.addListener('test', handler)
|
||||
emitter.addListener('test', otherHandler)
|
||||
|
||||
ok('test' in emitter.events)
|
||||
ok(emitter.events.test.indexOf(handler) > -1)
|
||||
|
||||
emitter.removeListener('test', handler)
|
||||
|
||||
ok('test' in emitter.events)
|
||||
equal(emitter.events.test.indexOf(handler), -1)
|
||||
ok(emitter.events.test.indexOf(otherHandler) > -1)
|
||||
})
|
||||
|
||||
test('emitting events', function () {
|
||||
var emitter = new lunr.EventEmitter,
|
||||
callbackCalled = false,
|
||||
callbackArguments = [],
|
||||
callback = function () {
|
||||
callbackArguments = Array.prototype.slice.call(arguments)
|
||||
callbackCalled = true
|
||||
}
|
||||
|
||||
emitter.emit('test', 1, 'a')
|
||||
emitter.addListener('test', callback)
|
||||
emitter.emit('test', 1, 'a')
|
||||
|
||||
ok(callbackCalled)
|
||||
deepEqual(callbackArguments, [1, 'a'])
|
||||
})
|
1
book/node_modules/lunr/test/fixtures/stemming_vocab.json
generated
vendored
Normal file
1
book/node_modules/lunr/test/fixtures/stemming_vocab.json
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
var stemmingFixture = {"consign":"consign","consigned":"consign","consigning":"consign","consignment":"consign","consist":"consist","consisted":"consist","consistency":"consist","consistent":"consist","consistently":"consist","consisting":"consist","consists":"consist","consolation":"consol","consolations":"consol","consolatory":"consolatori","console":"consol","consoled":"consol","consoles":"consol","consolidate":"consolid","consolidated":"consolid","consolidating":"consolid","consoling":"consol","consols":"consol","consonant":"conson","consort":"consort","consorted":"consort","consorting":"consort","conspicuous":"conspicu","conspicuously":"conspicu","conspiracy":"conspiraci","conspirator":"conspir","conspirators":"conspir","conspire":"conspir","conspired":"conspir","conspiring":"conspir","constable":"constabl","constables":"constabl","constance":"constanc","constancy":"constanc","constant":"constant","knack":"knack","knackeries":"knackeri","knacks":"knack","knag":"knag","knave":"knave","knaves":"knave","knavish":"knavish","kneaded":"knead","kneading":"knead","knee":"knee","kneel":"kneel","kneeled":"kneel","kneeling":"kneel","kneels":"kneel","knees":"knee","knell":"knell","knelt":"knelt","knew":"knew","knick":"knick","knif":"knif","knife":"knife","knight":"knight","knights":"knight","knit":"knit","knits":"knit","knitted":"knit","knitting":"knit","knives":"knive","knob":"knob","knobs":"knob","knock":"knock","knocked":"knock","knocker":"knocker","knockers":"knocker","knocking":"knock","knocks":"knock","knopp":"knopp","knot":"knot","knots":"knot","lay":"lay","try":"tri"}
|
52
book/node_modules/lunr/test/index.html
generated
vendored
Normal file
52
book/node_modules/lunr/test/index.html
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<title>Lunr tests</title>
|
||||
|
||||
<!-- dependencies -->
|
||||
<script src="/test/env/jquery.js" type="text/javascript" charset="utf-8"></script>
|
||||
<script src="/test/env/augment.min.js" type="text/javascript" charset="utf-8"></script>
|
||||
|
||||
<!-- QUnit -->
|
||||
<script src="/test/env/qunit.js"></script>
|
||||
<link rel="stylesheet" href="/test/env/qunit.css" type="text/css" media="screen" />
|
||||
|
||||
<!-- Lunr -->
|
||||
<script src="/lib/lunr.js" type="text/javascript" charset="utf-8"></script>
|
||||
<script src="/lib/utils.js" type="text/javascript" charset="utf-8"></script>
|
||||
<script src="/lib/tokenizer.js" type="text/javascript" charset="utf-8"></script>
|
||||
<script src="/lib/pipeline.js" type="text/javascript" charset="utf-8"></script>
|
||||
<script src="/lib/vector.js" type="text/javascript" charset="utf-8"></script>
|
||||
<script src="/lib/sorted_set.js" type="text/javascript" charset="utf-8"></script>
|
||||
<script src="/lib/event_emitter.js" type="text/javascript" charset="utf-8"></script>
|
||||
<script src="/lib/index.js" type="text/javascript" charset="utf-8"></script>
|
||||
<script src="/lib/document_store.js" type="text/javascript" charset="utf-8"></script>
|
||||
<script src="/lib/stemmer.js" type="text/javascript" charset="utf-8"></script>
|
||||
<script src="/lib/stop_word_filter.js" type="text/javascript" charset="utf-8"></script>
|
||||
<script src="/lib/trimmer.js" type="text/javascript" charset="utf-8"></script>
|
||||
<script src="/lib/token_store.js" type="text/javascript" charset="utf-8"></script>
|
||||
|
||||
<!-- Fixtures -->
|
||||
<script src="/test/fixtures/stemming_vocab.json"></script>
|
||||
|
||||
<!-- Tests -->
|
||||
<script src="/test/tokenizer_test.js"></script>
|
||||
<script src="/test/pipeline_test.js"></script>
|
||||
<script src="/test/vector_test.js"></script>
|
||||
<script src="/test/sorted_set_test.js"></script>
|
||||
<script src="/test/event_emitter_test.js"></script>
|
||||
<script src="/test/index_test.js"></script>
|
||||
<script src="/test/store_test.js"></script>
|
||||
<script src="/test/search_test.js"></script>
|
||||
<script src="/test/serialisation_test.js"></script>
|
||||
<script src="/test/stemmer_test.js"></script>
|
||||
<script src="/test/stop_word_filter_test.js"></script>
|
||||
<script src="/test/lunr_test.js"></script>
|
||||
<script src="/test/token_store_test.js"></script>
|
||||
<script src="/test/trimmer_test.js"></script>
|
||||
|
||||
</head>
|
||||
<body>
|
||||
<div id="qunit"></div>
|
||||
</body>
|
||||
</html>
|
336
book/node_modules/lunr/test/index_test.js
generated
vendored
Normal file
336
book/node_modules/lunr/test/index_test.js
generated
vendored
Normal file
@@ -0,0 +1,336 @@
|
||||
module('lunr.Index')
|
||||
|
||||
test("defining what fields to index", function () {
|
||||
var idx = new lunr.Index
|
||||
idx.field('foo')
|
||||
|
||||
deepEqual(idx._fields[0], {name: 'foo', boost: 1})
|
||||
})
|
||||
|
||||
test("giving a particular field a weighting", function () {
|
||||
var idx = new lunr.Index
|
||||
idx.field('foo', { boost: 10 })
|
||||
|
||||
deepEqual(idx._fields[0], {name: 'foo', boost: 10})
|
||||
})
|
||||
|
||||
test('default reference should be id', function () {
|
||||
var idx = new lunr.Index
|
||||
equal(idx._ref, 'id')
|
||||
})
|
||||
|
||||
test("defining the reference field for the index", function () {
|
||||
var idx = new lunr.Index
|
||||
idx.ref('foo')
|
||||
|
||||
deepEqual(idx._ref, 'foo')
|
||||
})
|
||||
|
||||
test('adding a document to the index', function () {
|
||||
var idx = new lunr.Index,
|
||||
doc = {id: 1, body: 'this is a test'}
|
||||
|
||||
idx.field('body')
|
||||
idx.add(doc)
|
||||
|
||||
equal(idx.documentStore.length, 1)
|
||||
ok(!!idx.documentStore.get(1))
|
||||
})
|
||||
|
||||
test('adding a document with an empty field', function () {
|
||||
var idx = new lunr.Index,
|
||||
doc = {id: 1, body: 'test', title: ''}
|
||||
|
||||
idx.field('title')
|
||||
idx.field('body')
|
||||
|
||||
idx.add(doc)
|
||||
ok(!isNaN(idx.tokenStore.get('test')[1].tf))
|
||||
})
|
||||
|
||||
test('ignore empty tokens', function () {
|
||||
var idx = new lunr.Index,
|
||||
doc = {id: 1, body: 'test ???'}
|
||||
|
||||
idx.field('body')
|
||||
idx.pipeline.add(lunr.trimmer)
|
||||
|
||||
idx.add(doc)
|
||||
|
||||
var tokens = idx.documentStore.get(1).toArray()
|
||||
equal(tokens.length, 1)
|
||||
deepEqual(tokens, ['test']) // ??? should be ignored
|
||||
})
|
||||
|
||||
test('triggering add events', function () {
|
||||
var idx = new lunr.Index,
|
||||
doc = {id: 1, body: 'this is a test'},
|
||||
callbackCalled = false,
|
||||
callbackArgs = []
|
||||
|
||||
idx.on('add', function (doc, index) {
|
||||
callbackCalled = true
|
||||
callbackArgs = Array.prototype.slice.call(arguments)
|
||||
})
|
||||
|
||||
idx.field('body')
|
||||
idx.add(doc)
|
||||
|
||||
ok(callbackCalled)
|
||||
equal(callbackArgs.length, 2)
|
||||
deepEqual(callbackArgs[0], doc)
|
||||
deepEqual(callbackArgs[1], idx)
|
||||
})
|
||||
|
||||
test('silencing add events', function () {
|
||||
var idx = new lunr.Index,
|
||||
doc = {id: 1, body: 'this is a test'},
|
||||
callbackCalled = false,
|
||||
callbackArgs = []
|
||||
|
||||
idx.on('add', function (doc, index) {
|
||||
callbackCalled = true
|
||||
callbackArgs = Array.prototype.slice.call(arguments)
|
||||
})
|
||||
|
||||
idx.field('body')
|
||||
idx.add(doc, false)
|
||||
|
||||
ok(!callbackCalled)
|
||||
})
|
||||
|
||||
test('removing a document from the index', function () {
|
||||
var idx = new lunr.Index,
|
||||
doc = {id: 1, body: 'this is a test'}
|
||||
|
||||
idx.field('body')
|
||||
equal(idx.documentStore.length, 0)
|
||||
|
||||
idx.add(doc)
|
||||
equal(idx.documentStore.length, 1)
|
||||
|
||||
idx.remove(doc)
|
||||
equal(idx.documentStore.length, 0)
|
||||
})
|
||||
|
||||
test('triggering remove events', function () {
|
||||
var idx = new lunr.Index,
|
||||
doc = {id: 1, body: 'this is a test'},
|
||||
callbackCalled = false,
|
||||
callbackArgs = []
|
||||
|
||||
idx.on('remove', function (doc, index) {
|
||||
callbackCalled = true
|
||||
callbackArgs = Array.prototype.slice.call(arguments)
|
||||
})
|
||||
|
||||
idx.field('body')
|
||||
idx.add(doc)
|
||||
idx.remove(doc)
|
||||
|
||||
ok(callbackCalled)
|
||||
equal(callbackArgs.length, 2)
|
||||
deepEqual(callbackArgs[0], doc)
|
||||
deepEqual(callbackArgs[1], idx)
|
||||
})
|
||||
|
||||
test('silencing remove events', function () {
|
||||
var idx = new lunr.Index,
|
||||
doc = {id: 1, body: 'this is a test'},
|
||||
callbackCalled = false,
|
||||
callbackArgs = []
|
||||
|
||||
idx.on('remove', function (doc, index) {
|
||||
callbackCalled = true
|
||||
callbackArgs = Array.prototype.slice.call(arguments)
|
||||
})
|
||||
|
||||
idx.field('body')
|
||||
idx.add(doc)
|
||||
idx.remove(doc, false)
|
||||
|
||||
ok(!callbackCalled)
|
||||
})
|
||||
|
||||
test('removing a non-existent document from the index', function () {
|
||||
var idx = new lunr.Index,
|
||||
doc = {id: 1, body: 'this is a test'},
|
||||
doc2 = {id: 2, body: 'i dont exist'},
|
||||
callbackCalled = false
|
||||
|
||||
idx.on('remove', function (doc, index) {
|
||||
callbackCalled = true
|
||||
})
|
||||
|
||||
idx.field('body')
|
||||
equal(idx.documentStore.length, 0)
|
||||
|
||||
idx.add(doc)
|
||||
equal(idx.documentStore.length, 1)
|
||||
|
||||
idx.remove(doc2)
|
||||
equal(idx.documentStore.length, 1)
|
||||
|
||||
ok(!callbackCalled)
|
||||
})
|
||||
|
||||
test('updating a document', function () {
|
||||
var idx = new lunr.Index,
|
||||
doc = {id: 1, body: 'foo'}
|
||||
|
||||
idx.field('body')
|
||||
idx.add(doc)
|
||||
equal(idx.documentStore.length, 1)
|
||||
ok(idx.tokenStore.has('foo'))
|
||||
|
||||
doc.body = 'bar'
|
||||
idx.update(doc)
|
||||
|
||||
equal(idx.documentStore.length, 1)
|
||||
ok(idx.tokenStore.has('bar'))
|
||||
})
|
||||
|
||||
test('emitting update events', function () {
|
||||
var idx = new lunr.Index,
|
||||
doc = {id: 1, body: 'foo'},
|
||||
addCallbackCalled = false,
|
||||
removeCallbackCalled = false,
|
||||
updateCallbackCalled = false,
|
||||
callbackArgs = []
|
||||
|
||||
idx.field('body')
|
||||
idx.add(doc)
|
||||
equal(idx.documentStore.length, 1)
|
||||
ok(idx.tokenStore.has('foo'))
|
||||
|
||||
idx.on('update', function (doc, index) {
|
||||
updateCallbackCalled = true
|
||||
callbackArgs = Array.prototype.slice.call(arguments)
|
||||
})
|
||||
|
||||
idx.on('add', function () {
|
||||
addCallbackCalled = true
|
||||
})
|
||||
|
||||
idx.on('remove', function () {
|
||||
removeCallbackCalled = true
|
||||
})
|
||||
|
||||
|
||||
doc.body = 'bar'
|
||||
idx.update(doc)
|
||||
|
||||
ok(updateCallbackCalled)
|
||||
equal(callbackArgs.length, 2)
|
||||
deepEqual(callbackArgs[0], doc)
|
||||
deepEqual(callbackArgs[1], idx)
|
||||
|
||||
ok(!addCallbackCalled)
|
||||
ok(!removeCallbackCalled)
|
||||
})
|
||||
|
||||
test('silencing update events', function () {
|
||||
var idx = new lunr.Index,
|
||||
doc = {id: 1, body: 'foo'},
|
||||
callbackCalled = false
|
||||
|
||||
idx.field('body')
|
||||
idx.add(doc)
|
||||
equal(idx.documentStore.length, 1)
|
||||
ok(idx.tokenStore.has('foo'))
|
||||
|
||||
idx.on('update', function (doc, index) {
|
||||
callbackCalled = true
|
||||
})
|
||||
|
||||
doc.body = 'bar'
|
||||
idx.update(doc, false)
|
||||
|
||||
ok(!callbackCalled)
|
||||
})
|
||||
|
||||
test('serialising', function () {
|
||||
var idx = new lunr.Index,
|
||||
mockDocumentStore = { toJSON: function () { return 'documentStore' }},
|
||||
mockTokenStore = { toJSON: function () { return 'tokenStore' }},
|
||||
mockCorpusTokens = { toJSON: function () { return 'corpusTokens' }},
|
||||
mockPipeline = { toJSON: function () { return 'pipeline' }}
|
||||
|
||||
idx.documentStore = mockDocumentStore
|
||||
idx.tokenStore = mockTokenStore
|
||||
idx.corpusTokens = mockCorpusTokens
|
||||
idx.pipeline = mockPipeline
|
||||
|
||||
idx.ref('id')
|
||||
|
||||
idx.field('title', { boost: 10 })
|
||||
idx.field('body')
|
||||
|
||||
deepEqual(idx.toJSON(), {
|
||||
version: '@VERSION', // this is what the lunr version is set to before being built
|
||||
fields: [
|
||||
{ name: 'title', boost: 10 },
|
||||
{ name: 'body', boost: 1 }
|
||||
],
|
||||
ref: 'id',
|
||||
documentStore: 'documentStore',
|
||||
tokenStore: 'tokenStore',
|
||||
corpusTokens: 'corpusTokens',
|
||||
pipeline: 'pipeline'
|
||||
})
|
||||
})
|
||||
|
||||
test('loading a serialised index', function () {
|
||||
var serialisedData = {
|
||||
version: '@VERSION', // this is what the lunr version is set to before being built
|
||||
fields: [
|
||||
{ name: 'title', boost: 10 },
|
||||
{ name: 'body', boost: 1 }
|
||||
],
|
||||
ref: 'id',
|
||||
documentStore: { store: {}, length: 0 },
|
||||
tokenStore: { root: {}, length: 0 },
|
||||
corpusTokens: [],
|
||||
pipeline: ['stopWordFilter', 'stemmer']
|
||||
}
|
||||
|
||||
var idx = lunr.Index.load(serialisedData)
|
||||
|
||||
deepEqual(idx._fields, serialisedData.fields)
|
||||
equal(idx._ref, 'id')
|
||||
})
|
||||
|
||||
test('idf cache with reserved words', function () {
|
||||
var idx = new lunr.Index
|
||||
|
||||
var troublesomeTokens = [
|
||||
'constructor',
|
||||
'__proto__',
|
||||
'hasOwnProperty',
|
||||
'isPrototypeOf',
|
||||
'propertyIsEnumerable',
|
||||
'toLocaleString',
|
||||
'toString',
|
||||
'valueOf'
|
||||
]
|
||||
|
||||
troublesomeTokens.forEach(function (token) {
|
||||
equal(typeof(idx.idf(token)), 'number', 'Using token: ' + token)
|
||||
})
|
||||
})
|
||||
|
||||
test('using a plugin', function () {
|
||||
var idx = new lunr.Index,
|
||||
ctx, args,
|
||||
plugin = function () {
|
||||
ctx = this
|
||||
args = Array.prototype.slice.call(arguments)
|
||||
this.pluginLoaded = true
|
||||
}
|
||||
|
||||
idx.use(plugin, 'foo', 'bar')
|
||||
|
||||
equal(ctx, idx)
|
||||
deepEqual(args, [idx, 'foo', 'bar'])
|
||||
ok(idx.pluginLoaded)
|
||||
})
|
37
book/node_modules/lunr/test/lunr_test.js
generated
vendored
Normal file
37
book/node_modules/lunr/test/lunr_test.js
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
module('lunr')
|
||||
|
||||
test('returns a new instance of lunr.Index', function () {
|
||||
var index = lunr()
|
||||
|
||||
equal(index.constructor, lunr.Index)
|
||||
})
|
||||
|
||||
test('should set up the pipeline', function () {
|
||||
var index = lunr(),
|
||||
stack = index.pipeline._stack
|
||||
|
||||
equal(stack.length, 3)
|
||||
equal(stack.indexOf(lunr.trimmer), 0)
|
||||
equal(stack.indexOf(lunr.stopWordFilter), 1)
|
||||
equal(stack.indexOf(lunr.stemmer), 2)
|
||||
})
|
||||
|
||||
test('passing a config fn which is called with the new index', function () {
|
||||
var configCtx, configArg
|
||||
|
||||
var index = lunr(function (idx) {
|
||||
configCtx = this
|
||||
configArg = idx
|
||||
|
||||
this.ref('cid')
|
||||
|
||||
this.field('title', 10)
|
||||
this.field('body')
|
||||
})
|
||||
|
||||
equal(configCtx, index)
|
||||
equal(configArg, index)
|
||||
|
||||
equal(index._ref, 'cid')
|
||||
equal(index._fields.length, 2)
|
||||
})
|
227
book/node_modules/lunr/test/pipeline_test.js
generated
vendored
Normal file
227
book/node_modules/lunr/test/pipeline_test.js
generated
vendored
Normal file
@@ -0,0 +1,227 @@
|
||||
module('lunr.Pipeline', {
|
||||
setup: function () {
|
||||
this.existingRegisteredFunctions = lunr.Pipeline.registeredFunctions
|
||||
lunr.Pipeline.registeredFunctions = {}
|
||||
|
||||
this.existingWarnIfFunctionNotRegistered = lunr.Pipeline.warnIfFunctionNotRegistered
|
||||
lunr.Pipeline.warnIfFunctionNotRegistered = $.noop
|
||||
},
|
||||
teardown: function () {
|
||||
lunr.Pipeline.registeredFunctions = this.existingRegisteredFunctions
|
||||
lunr.Pipeline.warnIfFunctionNotRegistered = this.existingWarnIfFunctionNotRegistered
|
||||
}
|
||||
})
|
||||
|
||||
test("adding a new item to the pipeline", function () {
|
||||
var pipeline = new lunr.Pipeline
|
||||
equal(pipeline._stack.length, 0)
|
||||
|
||||
pipeline.add($.noop)
|
||||
equal(pipeline._stack.length, 1)
|
||||
})
|
||||
|
||||
test("adding multiple items to the pipeline in one go", function () {
|
||||
var pipeline = new lunr.Pipeline
|
||||
|
||||
pipeline.add($.noop, $.noop)
|
||||
equal(pipeline._stack.length, 2)
|
||||
})
|
||||
|
||||
test("removing an item from the pipeline", function () {
|
||||
var pipeline = new lunr.Pipeline,
|
||||
fn = $.noop
|
||||
|
||||
pipeline.add(fn)
|
||||
equal(pipeline._stack.length, 1)
|
||||
|
||||
pipeline.remove(fn)
|
||||
equal(pipeline._stack.length, 0)
|
||||
})
|
||||
|
||||
test("removing a nonexistent item from the pipeline", function () {
|
||||
var pipeline = new lunr.Pipeline,
|
||||
fn1 = $.noop,
|
||||
fn2 = function () {}
|
||||
|
||||
pipeline.add(fn1)
|
||||
equal(pipeline._stack.length, 1)
|
||||
|
||||
pipeline.remove(fn2)
|
||||
equal(pipeline._stack.length, 1)
|
||||
})
|
||||
|
||||
test("adding an item to the pipeline before another item", function () {
|
||||
var pipeline = new lunr.Pipeline,
|
||||
fn1 = $.noop,
|
||||
fn2 = function () {}
|
||||
|
||||
pipeline.add(fn1)
|
||||
pipeline.before(fn1, fn2)
|
||||
|
||||
deepEqual(pipeline._stack, [fn2, fn1])
|
||||
})
|
||||
|
||||
test("adding an item to the pipeline before nonexistent item", function () {
|
||||
var pipeline = new lunr.Pipeline,
|
||||
fn1 = $.noop,
|
||||
fn2 = function () {},
|
||||
fn3 = function () {}
|
||||
|
||||
pipeline.add(fn1, fn2)
|
||||
|
||||
throws(function () {
|
||||
pipeline.before(fn3, fn1)
|
||||
})
|
||||
|
||||
deepEqual(pipeline._stack, [fn1, fn2])
|
||||
})
|
||||
|
||||
test("adding an item to the pipeline after another item", function () {
|
||||
var pipeline = new lunr.Pipeline,
|
||||
fn1 = $.noop,
|
||||
fn2 = function () {},
|
||||
fn3 = function () {}
|
||||
|
||||
pipeline.add(fn1, fn2)
|
||||
pipeline.after(fn1, fn3)
|
||||
|
||||
deepEqual(pipeline._stack, [fn1, fn3, fn2])
|
||||
})
|
||||
|
||||
test("adding an item to the pipeline after nonexistent item", function () {
|
||||
var pipeline = new lunr.Pipeline,
|
||||
fn1 = $.noop,
|
||||
fn2 = function () {},
|
||||
fn3 = function () {}
|
||||
|
||||
pipeline.add(fn1, fn2)
|
||||
|
||||
throws(function () {
|
||||
pipeline.after(fn3, fn1)
|
||||
})
|
||||
|
||||
deepEqual(pipeline._stack, [fn1, fn2])
|
||||
})
|
||||
|
||||
test("run calls each member of the pipeline for each input", function () {
|
||||
var pipeline = new lunr.Pipeline,
|
||||
count1 = 0, count2 = 0,
|
||||
fn1 = function (token) { count1++ ; return token },
|
||||
fn2 = function (token) { count2++ ; return token }
|
||||
|
||||
pipeline.add(fn1, fn2)
|
||||
|
||||
pipeline.run([1,2,3])
|
||||
|
||||
equal(count1, 3)
|
||||
equal(count2, 3)
|
||||
})
|
||||
|
||||
test("run should pass three inputs to the pipeline fn", function () {
|
||||
var pipeline = new lunr.Pipeline,
|
||||
input, index, arr,
|
||||
fn1 = function () { input = arguments[0], index = arguments[1], arr = arguments[2] }
|
||||
|
||||
pipeline.add(fn1)
|
||||
|
||||
pipeline.run(['a'])
|
||||
|
||||
equal(input, 'a')
|
||||
equal(index, 0)
|
||||
deepEqual(arr, ['a'])
|
||||
})
|
||||
|
||||
test("run should pass the output of one into the input of the next", function () {
|
||||
var pipeline = new lunr.Pipeline,
|
||||
output,
|
||||
fn1 = function (t1) { return t1.toUpperCase() },
|
||||
fn2 = function (t2) { output = t2 }
|
||||
|
||||
pipeline.add(fn1)
|
||||
pipeline.add(fn2)
|
||||
|
||||
pipeline.run(['a'])
|
||||
|
||||
equal(output, 'A')
|
||||
})
|
||||
|
||||
test("run should return the result of running the entire pipeline on each element", function () {
|
||||
var pipeline = new lunr.Pipeline,
|
||||
fn1 = function (t1) { return t1.toUpperCase() }
|
||||
pipeline.add(fn1)
|
||||
deepEqual(pipeline.run(['a']), ['A'])
|
||||
})
|
||||
|
||||
test("run should filter out any undefined values at each stage in the pipeline", function () {
|
||||
var pipeline = new lunr.Pipeline,
|
||||
fn2Count = 0,
|
||||
fn1 = function (t) { if (t < 5) return t },
|
||||
fn2 = function (t) { fn2Count++ ; return t }
|
||||
|
||||
pipeline.add(fn1, fn2)
|
||||
|
||||
var output = pipeline.run([0,1,2,3,4,5,6,7,8,9])
|
||||
equal(fn2Count, 5)
|
||||
equal(output.length, 5)
|
||||
})
|
||||
|
||||
test('toJSON', function () {
|
||||
var pipeline = new lunr.Pipeline,
|
||||
fn1 = function () {},
|
||||
fn2 = function () {}
|
||||
|
||||
lunr.Pipeline.registerFunction(fn1, 'fn1')
|
||||
lunr.Pipeline.registerFunction(fn2, 'fn2')
|
||||
|
||||
pipeline.add(fn1, fn2)
|
||||
|
||||
deepEqual(pipeline.toJSON(), ['fn1', 'fn2'])
|
||||
})
|
||||
|
||||
test('registering a pipeline function', function () {
|
||||
var fn1 = function () {}
|
||||
|
||||
equal(Object.keys(lunr.Pipeline.registeredFunctions).length, 0)
|
||||
|
||||
lunr.Pipeline.registerFunction(fn1, 'fn1')
|
||||
|
||||
equal(fn1.label, 'fn1')
|
||||
equal(Object.keys(lunr.Pipeline.registeredFunctions).length, 1)
|
||||
deepEqual(lunr.Pipeline.registeredFunctions['fn1'], fn1)
|
||||
})
|
||||
|
||||
test('load', function () {
|
||||
var fn1 = function () {},
|
||||
fn2 = function () {}
|
||||
|
||||
lunr.Pipeline.registerFunction(fn1, 'fn1')
|
||||
lunr.Pipeline.registerFunction(fn2, 'fn2')
|
||||
|
||||
var serialised = ['fn1', 'fn2']
|
||||
|
||||
var pipeline = lunr.Pipeline.load(serialised)
|
||||
|
||||
equal(pipeline._stack.length, 2)
|
||||
deepEqual(pipeline._stack[0], fn1)
|
||||
deepEqual(pipeline._stack[1], fn2)
|
||||
})
|
||||
|
||||
test('loading an un-registered pipeline function', function () {
|
||||
var serialised = ['fn1']
|
||||
|
||||
throws(function () {
|
||||
lunr.Pipeline.load(serialised)
|
||||
})
|
||||
})
|
||||
|
||||
test('resetting the pipeline', function () {
|
||||
var fn1 = function () {},
|
||||
fn2 = function () {},
|
||||
pipeline = new lunr.Pipeline
|
||||
|
||||
pipeline.add(fn1, fn2)
|
||||
deepEqual(pipeline._stack, [fn1, fn2])
|
||||
|
||||
pipeline.reset()
|
||||
deepEqual(pipeline._stack, [])
|
||||
})
|
9
book/node_modules/lunr/test/runner.sh
generated
vendored
Executable file
9
book/node_modules/lunr/test/runner.sh
generated
vendored
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
NODE=/usr/local/bin/node
|
||||
PHANTOMJS=./node_modules/.bin/phantomjs
|
||||
SERVER_PORT=${1:-54545}
|
||||
|
||||
echo "Starting test server at http://localhost:$SERVER_PORT"
|
||||
$NODE server.js "$SERVER_PORT" > /dev/null 2>&1 &
|
||||
$PHANTOMJS ./test/env/runner.js "http://localhost:$SERVER_PORT/test" 2> /dev/null
|
77
book/node_modules/lunr/test/search_test.js
generated
vendored
Normal file
77
book/node_modules/lunr/test/search_test.js
generated
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
module('search', {
|
||||
setup: function () {
|
||||
var idx = new lunr.Index
|
||||
idx.field('body')
|
||||
idx.field('title', { boost: 10 })
|
||||
|
||||
;([{
|
||||
id: 'a',
|
||||
title: 'Mr. Green kills Colonel Mustard',
|
||||
body: 'Mr. Green killed Colonel Mustard in the study with the candlestick. Mr. Green is not a very nice fellow.',
|
||||
wordCount: 19
|
||||
},{
|
||||
id: 'b',
|
||||
title: 'Plumb waters plant',
|
||||
body: 'Professor Plumb has a green plant in his study',
|
||||
wordCount: 9
|
||||
},{
|
||||
id: 'c',
|
||||
title: 'Scarlett helps Professor',
|
||||
body: 'Miss Scarlett watered Professor Plumbs green plant while he was away from his office last week.',
|
||||
wordCount: 16
|
||||
},{
|
||||
id: 'd',
|
||||
title: 'title',
|
||||
body: 'handsome',
|
||||
},{
|
||||
id: 'e',
|
||||
title: 'title',
|
||||
body: 'hand',
|
||||
}]).forEach(function (doc) { idx.add(doc) })
|
||||
|
||||
this.idx = idx
|
||||
}
|
||||
})
|
||||
|
||||
test('returning the correct results', function () {
|
||||
var results = this.idx.search('green plant')
|
||||
|
||||
equal(results.length, 2)
|
||||
equal(results[0].ref, 'b')
|
||||
})
|
||||
|
||||
test('search term not in the index', function () {
|
||||
var results = this.idx.search('foo')
|
||||
|
||||
equal(results.length, 0)
|
||||
})
|
||||
|
||||
test('one search term not in the index', function () {
|
||||
var results = this.idx.search('foo green')
|
||||
|
||||
equal(results.length, 0)
|
||||
})
|
||||
|
||||
test('search contains one term not in the index', function () {
|
||||
var results = this.idx.search('green foo')
|
||||
|
||||
equal(results.length, 0)
|
||||
})
|
||||
|
||||
test('search takes into account boosts', function () {
|
||||
var results = this.idx.search('professor')
|
||||
|
||||
equal(results.length, 2)
|
||||
equal(results[0].ref, 'c')
|
||||
|
||||
ok(results[0].score > 10 * results[1].score)
|
||||
})
|
||||
|
||||
test('search boosts exact matches', function () {
|
||||
var results = this.idx.search('hand')
|
||||
|
||||
equal(results.length, 2)
|
||||
equal(results[0].ref, 'e')
|
||||
|
||||
ok(results[0].score > results[1].score)
|
||||
})
|
46
book/node_modules/lunr/test/serialisation_test.js
generated
vendored
Normal file
46
book/node_modules/lunr/test/serialisation_test.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
module('serialisation', {
|
||||
setup: function () {
|
||||
this.corpus = [{
|
||||
id: 'a',
|
||||
title: 'Mr. Green kills Colonel Mustard',
|
||||
body: 'Mr. Green killed Colonel Mustard in the study with the candlestick. Mr. Green is not a very nice fellow.'
|
||||
},{
|
||||
id: 'b',
|
||||
title: 'Plumb waters plant',
|
||||
body: 'Professor Plumb has a green plant in his study'
|
||||
},{
|
||||
id: 'c',
|
||||
title: 'Scarlett helps Professor',
|
||||
body: 'Miss Scarlett watered Professor Plumbs green plant while he was away from his office last week.'
|
||||
}]
|
||||
}
|
||||
})
|
||||
|
||||
test('dumping and loading an index', function () {
|
||||
var idx = new lunr.Index
|
||||
|
||||
idx.field('title', { boost: 10 })
|
||||
idx.field('body')
|
||||
|
||||
this.corpus.forEach(function (doc) { idx.add(doc) })
|
||||
|
||||
var dumpedIdx = JSON.stringify(idx),
|
||||
clonedIdx = lunr.Index.load(JSON.parse(dumpedIdx))
|
||||
|
||||
deepEqual(idx.search('green plant'), clonedIdx.search('green plant'))
|
||||
})
|
||||
|
||||
test('dumping and loading an index with a populated pipeline', function () {
|
||||
var idx = lunr(function () {
|
||||
this.field('title', { boost: 10 })
|
||||
this.field('body')
|
||||
})
|
||||
|
||||
this.corpus.forEach(function (doc) { idx.add(doc) })
|
||||
|
||||
var dumpedIdx = JSON.stringify(idx),
|
||||
clonedIdx = lunr.Index.load(JSON.parse(dumpedIdx))
|
||||
|
||||
deepEqual(idx.pipeline._stack, clonedIdx.pipeline._stack)
|
||||
deepEqual(idx.search('water'), clonedIdx.search('water'))
|
||||
})
|
21
book/node_modules/lunr/test/size.html
generated
vendored
Normal file
21
book/node_modules/lunr/test/size.html
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
<script src="/test/env/jquery.js" type="text/javascript" charset="utf-8"></script>
|
||||
|
||||
<script src="/words.json"></script>
|
||||
<script src="/lib/lunr.js" type="text/javascript" charset="utf-8"></script>
|
||||
|
||||
<script src="/lib/token_store.js" type="text/javascript" charset="utf-8"></script>
|
||||
|
||||
<script src="/lib/reverse_index.js" type="text/javascript" charset="utf-8"></script>
|
||||
<script src="/lib/ternary_tree.js" type="text/javascript" charset="utf-8"></script>
|
||||
|
||||
<script>
|
||||
tokenStore = new lunr.TokenStore
|
||||
ternaryTree = new lunr.TernaryTree
|
||||
reverseIndex = new lunr.ReverseIndex
|
||||
|
||||
words.forEach(function (word) {
|
||||
ternaryTree.add(word)
|
||||
reverseIndex.set(word, 1, 1)
|
||||
tokenStore.add(word, {ref: 1, tf: 1})
|
||||
})
|
||||
</script>
|
118
book/node_modules/lunr/test/sorted_set_test.js
generated
vendored
Normal file
118
book/node_modules/lunr/test/sorted_set_test.js
generated
vendored
Normal file
@@ -0,0 +1,118 @@
|
||||
module('lunr.SortedSet')
|
||||
|
||||
test('adding an element that doesn\'t exist into the set', function () {
|
||||
var set = new lunr.SortedSet
|
||||
|
||||
equal(set.length, 0)
|
||||
set.add('foo')
|
||||
equal(set.length, 1)
|
||||
})
|
||||
|
||||
test('adding an element that does exist into the set', function () {
|
||||
var set = new lunr.SortedSet
|
||||
set.add('foo')
|
||||
equal(set.length, 1)
|
||||
|
||||
set.add('foo')
|
||||
equal(set.length, 1)
|
||||
})
|
||||
|
||||
test('sort is maintained when adding elements to the set', function () {
|
||||
var set = new lunr.SortedSet
|
||||
|
||||
set.add('b')
|
||||
set.add('d')
|
||||
set.add('a')
|
||||
set.add('c')
|
||||
|
||||
deepEqual(set.elements, ['a', 'b', 'c', 'd'])
|
||||
})
|
||||
|
||||
test('adding more than one element to the set in one go', function () {
|
||||
var set = new lunr.SortedSet
|
||||
set.add('foo', 'bar', 'baz', 'foo')
|
||||
equal(set.length, 3)
|
||||
})
|
||||
|
||||
test('converting to an array', function () {
|
||||
var set = new lunr.SortedSet
|
||||
set.add('foo', 'bar', 'baz')
|
||||
deepEqual(set.toArray(), ['bar', 'baz', 'foo'])
|
||||
})
|
||||
|
||||
test('mapping the set', function () {
|
||||
var set = new lunr.SortedSet, a = []
|
||||
|
||||
set.add('foo', 'bar')
|
||||
|
||||
set.forEach(function (t) { a.push(t) })
|
||||
|
||||
deepEqual(a, ['bar', 'foo'])
|
||||
})
|
||||
|
||||
test('getting the index of an item in the set', function () {
|
||||
var set = new lunr.SortedSet
|
||||
|
||||
equal(set.indexOf('non member'), -1)
|
||||
|
||||
set.add('foo')
|
||||
|
||||
equal(set.indexOf('foo'), 0)
|
||||
equal(set.indexOf('non member'), -1)
|
||||
|
||||
set.add('bar')
|
||||
|
||||
equal(set.indexOf('foo'), 1)
|
||||
equal(set.indexOf('bar'), 0)
|
||||
equal(set.indexOf('non member'), -1)
|
||||
})
|
||||
|
||||
test('intersecting this set with another set', function () {
|
||||
var set1 = new lunr.SortedSet,
|
||||
set2 = new lunr.SortedSet,
|
||||
setIntersect
|
||||
|
||||
set1.add('foo', 'bar')
|
||||
set2.add('baz', 'foo')
|
||||
|
||||
setIntersect = set1.intersect(set2)
|
||||
|
||||
ok(setIntersect.indexOf('foo') > -1)
|
||||
ok(setIntersect.indexOf('bar') == -1)
|
||||
ok(setIntersect.indexOf('baz') == -1)
|
||||
})
|
||||
|
||||
test('unioning this set with another set', function () {
|
||||
var set1 = new lunr.SortedSet,
|
||||
set2 = new lunr.SortedSet,
|
||||
setUnion
|
||||
|
||||
set1.add('foo', 'bar')
|
||||
set2.add('baz', 'foo')
|
||||
|
||||
setUnion = set1.union(set2)
|
||||
|
||||
ok(setUnion.indexOf('foo') > -1)
|
||||
ok(setUnion.indexOf('bar') > -1)
|
||||
ok(setUnion.indexOf('baz') > -1)
|
||||
|
||||
equal(setUnion.length ,3)
|
||||
})
|
||||
|
||||
test('serialising', function () {
|
||||
var emptySet = new lunr.SortedSet,
|
||||
nonEmptySet = new lunr.SortedSet
|
||||
|
||||
nonEmptySet.add(1,2,3,4)
|
||||
|
||||
deepEqual(emptySet.toJSON(), [])
|
||||
deepEqual(nonEmptySet.toJSON(), [1,2,3,4])
|
||||
})
|
||||
|
||||
test('loading serialised dump', function () {
|
||||
var serialisedData = [1,2,3,4],
|
||||
set = lunr.SortedSet.load(serialisedData)
|
||||
|
||||
equal(set.length, 4)
|
||||
deepEqual(set.elements, [1,2,3,4])
|
||||
})
|
14
book/node_modules/lunr/test/stemmer_test.js
generated
vendored
Normal file
14
book/node_modules/lunr/test/stemmer_test.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
module('lunr.stemmer')
|
||||
|
||||
test('should stem words correctly', function () {
|
||||
Object.keys(stemmingFixture).forEach(function (testWord) {
|
||||
var expected = stemmingFixture[testWord]
|
||||
|
||||
equal(lunr.stemmer(testWord), expected)
|
||||
})
|
||||
})
|
||||
|
||||
test('should be registered with lunr.Pipeline', function () {
|
||||
equal(lunr.stemmer.label, 'stemmer')
|
||||
deepEqual(lunr.Pipeline.registeredFunctions['stemmer'], lunr.stemmer)
|
||||
})
|
30
book/node_modules/lunr/test/stop_word_filter_test.js
generated
vendored
Normal file
30
book/node_modules/lunr/test/stop_word_filter_test.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
module('lunr.stopWordFilter')
|
||||
|
||||
test('stops stop words', function () {
|
||||
var stopWords = ['the', 'and', 'but', 'than', 'when']
|
||||
|
||||
stopWords.forEach(function (word) {
|
||||
equal(lunr.stopWordFilter(word), undefined)
|
||||
})
|
||||
})
|
||||
|
||||
test('non stop words pass through', function () {
|
||||
var nonStopWords = ['interesting', 'words', 'pass', 'through']
|
||||
|
||||
nonStopWords.forEach(function (word) {
|
||||
equal(lunr.stopWordFilter(word), word)
|
||||
})
|
||||
})
|
||||
|
||||
test('should not filter Object.prototype terms', function () {
|
||||
var nonStopWords = ['constructor', 'hasOwnProperty', 'toString', 'valueOf']
|
||||
|
||||
nonStopWords.forEach(function (word) {
|
||||
equal(lunr.stopWordFilter(word), word)
|
||||
})
|
||||
})
|
||||
|
||||
test('should be registered with lunr.Pipeline', function () {
|
||||
equal(lunr.stopWordFilter.label, 'stopWordFilter')
|
||||
deepEqual(lunr.Pipeline.registeredFunctions['stopWordFilter'], lunr.stopWordFilter)
|
||||
})
|
17
book/node_modules/lunr/test/store_node_test.js
generated
vendored
Normal file
17
book/node_modules/lunr/test/store_node_test.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
module('store node')
|
||||
|
||||
test("get all children", function() {
|
||||
var node = new lunr.StoreNode,
|
||||
childNode = node.at('a'),
|
||||
otherChildNode = node.at('a'),
|
||||
grandChildNode = childNode.at('a')
|
||||
|
||||
childNode.push('childNode')
|
||||
otherChildNode.push('otherChildNode')
|
||||
grandChildNode.push('grandChildNode')
|
||||
|
||||
equal(node.allChildren().length, 3)
|
||||
ok(node.allChildren().indexOf(childNode) > -1)
|
||||
ok(node.allChildren().indexOf(otherChildNode) > -1)
|
||||
ok(node.allChildren().indexOf(grandChildNode) > -1)
|
||||
})
|
60
book/node_modules/lunr/test/store_test.js
generated
vendored
Normal file
60
book/node_modules/lunr/test/store_test.js
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
module('lunr.Store')
|
||||
|
||||
test('adding document tokens to the document store', function () {
|
||||
var docStore = new lunr.Store,
|
||||
tokens = ['eggs', 'ham']
|
||||
|
||||
docStore.set(1, tokens)
|
||||
deepEqual(docStore.get(1), tokens)
|
||||
})
|
||||
|
||||
test('getting the number of items in the document store', function () {
|
||||
var docStore = new lunr.Store
|
||||
|
||||
equal(docStore.length, 0)
|
||||
docStore.set(1, 'foo')
|
||||
equal(docStore.length, 1)
|
||||
})
|
||||
|
||||
test('checking whether the store contains a key', function () {
|
||||
var store = new lunr.Store
|
||||
|
||||
ok(!store.has('foo'))
|
||||
store.set('foo', 1)
|
||||
ok(store.has('foo'))
|
||||
})
|
||||
|
||||
test('removing an element from the store', function () {
|
||||
var store = new lunr.Store
|
||||
|
||||
store.set('foo', 1)
|
||||
ok(store.has('foo'))
|
||||
equal(store.length, 1)
|
||||
store.remove('foo')
|
||||
ok(!store.has('foo'))
|
||||
equal(store.length, 0)
|
||||
})
|
||||
|
||||
test('serialising', function () {
|
||||
var store = new lunr.Store
|
||||
|
||||
deepEqual(store.toJSON(), { store: {}, length: 0 })
|
||||
|
||||
store.set(1, ['eggs', 'ham'])
|
||||
|
||||
deepEqual(store.toJSON(), { store: { 1: ['eggs', 'ham'] }, length: 1 })
|
||||
})
|
||||
|
||||
test('loading serialised data', function () {
|
||||
var serialisedData = {
|
||||
length: 1,
|
||||
store: {
|
||||
1: ['eggs', 'ham']
|
||||
}
|
||||
}
|
||||
|
||||
var store = lunr.Store.load(serialisedData)
|
||||
|
||||
equal(store.length, 1)
|
||||
deepEqual(store.get(1), lunr.SortedSet.load(['eggs', 'ham']))
|
||||
})
|
23
book/node_modules/lunr/test/test_helper.js
generated
vendored
Normal file
23
book/node_modules/lunr/test/test_helper.js
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
var helpers = require('./../lib/helpers')
|
||||
|
||||
var extensions = function () {
|
||||
this.equalNumber = function (lambdaNum, num, desc) {
|
||||
return this.equal.call(this, helpers.toNumber(lambdaNum), num, desc)
|
||||
},
|
||||
|
||||
this.isTrue = function (lambdaBool, desc) {
|
||||
return this.ok.call(this, helpers.toBoolean(lambdaBool), desc)
|
||||
},
|
||||
|
||||
this.isFalse = function (lambdaBool, desc) {
|
||||
return this.ok.call(this, !helpers.toBoolean(lambdaBool), desc)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function (testName, testFn) {
|
||||
module.exports[testName] = function (test) {
|
||||
extensions.call(test)
|
||||
testFn.call(test, test)
|
||||
test.done()
|
||||
}
|
||||
}
|
177
book/node_modules/lunr/test/token_store_test.js
generated
vendored
Normal file
177
book/node_modules/lunr/test/token_store_test.js
generated
vendored
Normal file
@@ -0,0 +1,177 @@
|
||||
module('lunr.TokenStore')
|
||||
|
||||
test('adding a token to the store', function () {
|
||||
var store = new lunr.TokenStore,
|
||||
doc = { ref: 123, tf: 1 },
|
||||
token = 'foo'
|
||||
|
||||
store.add(token, doc)
|
||||
|
||||
ok(store.root['f']['o']['o']['docs'][123] === doc)
|
||||
equal(store.length, 1)
|
||||
})
|
||||
|
||||
test('adding another document to the token', function () {
|
||||
var store = new lunr.TokenStore,
|
||||
doc1 = { ref: 123, tf: 1 },
|
||||
doc2 = { ref: 456, tf: 1 },
|
||||
token = 'foo'
|
||||
|
||||
store.add(token, doc1)
|
||||
store.add(token, doc2)
|
||||
|
||||
ok(store.root['f']['o']['o']['docs'][123] === doc1)
|
||||
ok(store.root['f']['o']['o']['docs'][456] === doc2)
|
||||
})
|
||||
|
||||
test('checking if a token exists in the store', function () {
|
||||
var store = new lunr.TokenStore,
|
||||
doc = { ref: 123, tf: 1 },
|
||||
token = 'foo'
|
||||
|
||||
store.add(token, doc)
|
||||
|
||||
ok(store.has(token))
|
||||
})
|
||||
|
||||
test('checking if a token does not exist in the store', function () {
|
||||
var store = new lunr.TokenStore,
|
||||
doc = { ref: 123, tf: 1 },
|
||||
token = 'foo'
|
||||
|
||||
ok(!store.has('bar'))
|
||||
store.add(token, doc)
|
||||
ok(!store.has('bar'))
|
||||
})
|
||||
|
||||
test('retrieving items from the store', function () {
|
||||
var store = new lunr.TokenStore,
|
||||
doc = { ref: 123, tf: 1 },
|
||||
token = 'foo'
|
||||
|
||||
store.add(token, doc)
|
||||
deepEqual(store.get(token), {
|
||||
'123': doc
|
||||
})
|
||||
|
||||
deepEqual(store.get(''), {})
|
||||
})
|
||||
|
||||
test('retrieving items that do not exist in the store', function () {
|
||||
var store = new lunr.TokenStore
|
||||
|
||||
deepEqual(store.get('foo'), {})
|
||||
})
|
||||
|
||||
test('counting items in the store', function () {
|
||||
var store = new lunr.TokenStore,
|
||||
doc1 = { ref: 123, tf: 1 },
|
||||
doc2 = { ref: 456, tf: 1 },
|
||||
doc3 = { ref: 789, tf: 1 }
|
||||
|
||||
store.add('foo', doc1)
|
||||
store.add('foo', doc2)
|
||||
store.add('bar', doc3)
|
||||
|
||||
equal(store.count('foo'), 2)
|
||||
equal(store.count('bar'), 1)
|
||||
equal(store.count('baz'), 0)
|
||||
})
|
||||
|
||||
test('removing a document from the token store', function () {
|
||||
var store = new lunr.TokenStore,
|
||||
doc = { ref: 123, tf: 1 }
|
||||
|
||||
deepEqual(store.get('foo'), {})
|
||||
store.add('foo', doc)
|
||||
deepEqual(store.get('foo'), {
|
||||
'123': doc
|
||||
})
|
||||
|
||||
store.remove('foo', 123)
|
||||
deepEqual(store.get('foo'), {})
|
||||
})
|
||||
|
||||
test('removing a document that is not in the store', function () {
|
||||
var store = new lunr.TokenStore,
|
||||
doc1 = { ref: 123, tf: 1 },
|
||||
doc2 = { ref: 567, tf: 1 }
|
||||
|
||||
store.add('foo', doc1)
|
||||
store.add('bar', doc2)
|
||||
store.remove('foo', 456)
|
||||
|
||||
deepEqual(store.get('foo'), { 123: doc1 })
|
||||
})
|
||||
|
||||
test('removing a document from a key that does not exist', function () {
|
||||
var store = new lunr.TokenStore
|
||||
|
||||
store.remove('foo', 123)
|
||||
ok(!store.has('foo'))
|
||||
})
|
||||
|
||||
test('expand a token into all descendent tokens', function () {
|
||||
var store = new lunr.TokenStore,
|
||||
doc = { ref: 123, tf: 1 }
|
||||
|
||||
store.add('hell', doc)
|
||||
store.add('hello', doc)
|
||||
store.add('help', doc)
|
||||
store.add('held', doc)
|
||||
store.add('foo', doc)
|
||||
store.add('bar', doc)
|
||||
|
||||
var tokens = store.expand('hel')
|
||||
deepEqual(tokens, ['hell', 'hello', 'help', 'held'])
|
||||
})
|
||||
|
||||
test('serialisation', function () {
|
||||
var store = new lunr.TokenStore
|
||||
|
||||
deepEqual(store.toJSON(), { root: { docs: {} }, length: 0 })
|
||||
|
||||
store.add('foo', { ref: 123, tf: 1 })
|
||||
|
||||
deepEqual(store.toJSON(),
|
||||
{
|
||||
root: {
|
||||
docs: {},
|
||||
f: {
|
||||
docs: {},
|
||||
o: {
|
||||
docs: {},
|
||||
o: {
|
||||
docs: { 123: { ref: 123, tf: 1 } }
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
length: 1
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
test('loading a serialised story', function () {
|
||||
var serialisedData = {
|
||||
root: {
|
||||
docs: {},
|
||||
f: {
|
||||
docs: {},
|
||||
o: {
|
||||
docs: {},
|
||||
o: {
|
||||
docs: { 123: { ref: 123, tf: 1 } }
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
length: 1
|
||||
}
|
||||
|
||||
var store = lunr.TokenStore.load(serialisedData),
|
||||
documents = store.get('foo')
|
||||
|
||||
equal(store.length, 1)
|
||||
deepEqual(documents, { 123: { ref: 123, tf: 1 }})
|
||||
})
|
65
book/node_modules/lunr/test/tokenizer_test.js
generated
vendored
Normal file
65
book/node_modules/lunr/test/tokenizer_test.js
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
module('lunr.tokenizer')
|
||||
|
||||
test("splitting simple strings into tokens", function () {
|
||||
var simpleString = "this is a simple string",
|
||||
tokens = lunr.tokenizer(simpleString)
|
||||
|
||||
deepEqual(tokens, ['this', 'is', 'a', 'simple', 'string'])
|
||||
})
|
||||
|
||||
test('downcasing tokens', function () {
|
||||
var simpleString = 'FOO BAR',
|
||||
tags = ['Foo', 'BAR']
|
||||
|
||||
deepEqual(lunr.tokenizer(simpleString), ['foo', 'bar'])
|
||||
deepEqual(lunr.tokenizer(tags), ['foo', 'bar'])
|
||||
})
|
||||
|
||||
test('handling arrays', function () {
|
||||
var tags = ['foo', 'bar'],
|
||||
tokens = lunr.tokenizer(tags)
|
||||
|
||||
deepEqual(tokens, tags)
|
||||
})
|
||||
|
||||
test('handling multiple white spaces', function () {
|
||||
var testString = ' foo bar ',
|
||||
tokens = lunr.tokenizer(testString)
|
||||
|
||||
deepEqual(tokens, ['foo', 'bar'])
|
||||
})
|
||||
|
||||
test('handling null-like arguments', function () {
|
||||
deepEqual(lunr.tokenizer(), [])
|
||||
deepEqual(lunr.tokenizer(null), [])
|
||||
deepEqual(lunr.tokenizer(undefined), [])
|
||||
})
|
||||
|
||||
test('calling to string on passed val', function () {
|
||||
var date = new Date (Date.UTC(2013, 0, 1, 12)),
|
||||
obj = {
|
||||
toString: function () { return 'custom object' }
|
||||
}
|
||||
|
||||
equal(lunr.tokenizer(41), '41')
|
||||
equal(lunr.tokenizer(false), 'false')
|
||||
deepEqual(lunr.tokenizer(obj), ['custom', 'object'])
|
||||
|
||||
// slicing here to avoid asserting on the timezone part of the date
|
||||
// that will be different whereever the test is run.
|
||||
deepEqual(lunr.tokenizer(date).slice(0, 4), ['tue', 'jan', '01', '2013'])
|
||||
})
|
||||
|
||||
test("splitting strings with hyphens", function () {
|
||||
var simpleString = "take the New York-San Francisco flight",
|
||||
tokens = lunr.tokenizer(simpleString)
|
||||
|
||||
deepEqual(tokens, ['take', 'the', 'new', 'york', 'san', 'francisco', 'flight'])
|
||||
})
|
||||
|
||||
test("splitting strings with hyphens and spaces", function () {
|
||||
var simpleString = "Solve for A - B",
|
||||
tokens = lunr.tokenizer(simpleString)
|
||||
|
||||
deepEqual(tokens, ['solve', 'for', 'a', 'b'])
|
||||
})
|
32
book/node_modules/lunr/test/trimmer_test.js
generated
vendored
Normal file
32
book/node_modules/lunr/test/trimmer_test.js
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
module('lunr.trimmer')
|
||||
|
||||
test('latin characters', function () {
|
||||
var token = 'hello'
|
||||
equal(lunr.trimmer(token), token)
|
||||
})
|
||||
|
||||
test('removing leading and trailing punctuation', function () {
|
||||
var fullStop = 'hello.',
|
||||
innerApostrophe = "it's",
|
||||
trailingApostrophe = "james'",
|
||||
exclamationMark = 'stop!',
|
||||
comma = 'first,',
|
||||
brackets = '[tag]'
|
||||
|
||||
deepEqual(lunr.trimmer(fullStop), 'hello')
|
||||
deepEqual(lunr.trimmer(innerApostrophe), "it's")
|
||||
deepEqual(lunr.trimmer(trailingApostrophe), "james")
|
||||
deepEqual(lunr.trimmer(exclamationMark), 'stop')
|
||||
deepEqual(lunr.trimmer(comma), 'first')
|
||||
deepEqual(lunr.trimmer(brackets), 'tag')
|
||||
})
|
||||
|
||||
test('should be registered with lunr.Pipeline', function () {
|
||||
equal(lunr.trimmer.label, 'trimmer')
|
||||
deepEqual(lunr.Pipeline.registeredFunctions['trimmer'], lunr.trimmer)
|
||||
})
|
||||
|
||||
test('empty tokens should return undefined', function () {
|
||||
var token = '???'
|
||||
equal(lunr.trimmer(token), void 0)
|
||||
})
|
64
book/node_modules/lunr/test/vector_test.js
generated
vendored
Normal file
64
book/node_modules/lunr/test/vector_test.js
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
module("lunr.Vector")
|
||||
|
||||
test("calculating the magnitude of a vector", function () {
|
||||
var vector = new lunr.Vector,
|
||||
elements = [4,5,6]
|
||||
|
||||
elements.forEach(function (el, i) { vector.insert(i, el) })
|
||||
|
||||
equal(vector.magnitude(), Math.sqrt(77))
|
||||
})
|
||||
|
||||
test("calculating the dot product with another vector", function () {
|
||||
var v1 = new lunr.Vector,
|
||||
v2 = new lunr.Vector,
|
||||
els1 = [1, 3, -5],
|
||||
els2 = [4, -2, -1]
|
||||
|
||||
|
||||
els1.forEach(function (el, i) { v1.insert(i, el) })
|
||||
els2.forEach(function (el, i) { v2.insert(i, el) })
|
||||
|
||||
equal(v1.dot(v2), 3)
|
||||
})
|
||||
|
||||
test("calculating the similarity between two vectors", function () {
|
||||
var v1 = new lunr.Vector,
|
||||
v2 = new lunr.Vector,
|
||||
els1 = [1, 3, -5],
|
||||
els2 = [4, -2, -1]
|
||||
|
||||
els1.forEach(function (el, i) { v1.insert(i, el) })
|
||||
els2.forEach(function (el, i) { v2.insert(i, el) })
|
||||
|
||||
var similarity = v1.similarity(v2),
|
||||
roundedSimilarity = Math.round(similarity * 1000) / 1000
|
||||
|
||||
equal(roundedSimilarity, 0.111)
|
||||
})
|
||||
|
||||
test("inserting an element invalidates the magnitude cache", function () {
|
||||
var vector = new lunr.Vector,
|
||||
elements = [4,5,6]
|
||||
|
||||
elements.forEach(function (el, i) { vector.insert(i, el) })
|
||||
|
||||
equal(vector.magnitude(), Math.sqrt(77))
|
||||
|
||||
vector.insert(3, 7)
|
||||
|
||||
equal(vector.magnitude(), Math.sqrt(126))
|
||||
})
|
||||
|
||||
test("inserted elements are kept in index order", function () {
|
||||
var vector = new lunr.Vector,
|
||||
elements = [6,5,4]
|
||||
|
||||
vector.insert(2, 4)
|
||||
vector.insert(1, 5)
|
||||
vector.insert(0, 6)
|
||||
|
||||
equal(vector.list.idx, 0)
|
||||
equal(vector.list.next.idx, 1)
|
||||
equal(vector.list.next.next.idx, 2)
|
||||
})
|
Reference in New Issue
Block a user