Skip to content

Commit 5ac33db

Browse files
committed
test: add unit and integration tests for new discovery methods
1 parent d5ba660 commit 5ac33db

2 files changed

Lines changed: 84 additions & 0 deletions

File tree

test/integration/test.discovery.js

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,12 +22,14 @@ describe('discovery_integration', function() {
2222
let configuration_id;
2323
let collection_id;
2424
let collection_id2;
25+
let japanese_collection_id;
2526

2627
before(function() {
2728
environment_id = auth.discovery.environment_id;
2829
configuration_id = auth.discovery.configuration_id;
2930
collection_id = auth.discovery.collection_id;
3031
collection_id2 = auth.discovery.collection_id_2;
32+
japanese_collection_id = auth.discovery.japanese_collection_id;
3133

3234
nock.enableNetConnect();
3335
discovery = new DiscoveryV1(
@@ -481,4 +483,55 @@ describe('discovery_integration', function() {
481483
});
482484
});
483485
});
486+
487+
describe('tokenization dictionary tests @slow', function() {
488+
it('should createTokenizationDictionary', function(done) {
489+
const params = {
490+
environment_id,
491+
collection_id: japanese_collection_id,
492+
tokenization_rules: [
493+
{
494+
text: 'すしネコ',
495+
tokens: ['すし', 'ネコ'],
496+
readings: ['寿司', 'ネコ'],
497+
part_of_speech: 'カスタム名詞',
498+
},
499+
],
500+
};
501+
502+
discovery.createTokenizationDictionary(params, (err, res) => {
503+
assert.ifError(err);
504+
assert(res.status);
505+
assert(res.type);
506+
done();
507+
});
508+
});
509+
510+
it('should getTokenizationDictionaryStatus', function(done) {
511+
const params = {
512+
environment_id,
513+
collection_id: japanese_collection_id,
514+
};
515+
516+
discovery.getTokenizationDictionaryStatus(params, (err, res) => {
517+
assert.ifError(err);
518+
assert(res.status);
519+
assert(res.type);
520+
done();
521+
});
522+
});
523+
524+
it('should deleteTokenizationDictionary', function(done) {
525+
const params = {
526+
environment_id,
527+
collection_id: japanese_collection_id,
528+
};
529+
530+
discovery.deleteTokenizationDictionary(params, (err, res) => {
531+
assert.ifError(err);
532+
assert.equal(res, '');
533+
done();
534+
});
535+
});
536+
});
484537
});

test/unit/test.discovery.v1.js

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -76,6 +76,8 @@ describe('discovery-v1', function() {
7676
events: '/v1/events',
7777
metrics: '/v1/metrics',
7878
logs: '/v1/logs',
79+
tokenization_dictionaries:
80+
'/v1/environments/env-guid/collections/col-guid/word_lists/tokenization_dictionary',
7981
};
8082

8183
it('should generate version was not specified (negative test)', function() {
@@ -1008,6 +1010,35 @@ describe('discovery-v1', function() {
10081010
assert.equal(req.method, 'GET');
10091011
});
10101012
});
1013+
1014+
describe('tokenization dictionary tests', function() {
1015+
it('createTokenizationDictionary', function() {
1016+
const req = discovery.createTokenizationDictionary(queryPayload, noop);
1017+
assert.equal(
1018+
req.uri.href,
1019+
service.url + paths.tokenization_dictionaries + '?version=' + service.version
1020+
);
1021+
assert.equal(req.method, 'POST');
1022+
});
1023+
1024+
it('deleteTokenizationDictionary', function() {
1025+
const req = discovery.deleteTokenizationDictionary(queryPayload, noop);
1026+
assert.equal(
1027+
req.uri.href,
1028+
service.url + paths.tokenization_dictionaries + '?version=' + service.version
1029+
);
1030+
assert.equal(req.method, 'DELETE');
1031+
});
1032+
1033+
it('getTokenizationDictionaryStatus', function() {
1034+
const req = discovery.getTokenizationDictionaryStatus(queryPayload, noop);
1035+
assert.equal(
1036+
req.uri.href,
1037+
service.url + paths.tokenization_dictionaries + '?version=' + service.version
1038+
);
1039+
assert.equal(req.method, 'GET');
1040+
});
1041+
});
10111042
});
10121043
});
10131044
});

0 commit comments

Comments
 (0)