Skip to content

Commit 39cc3e3

Browse files
authoredFeb 24, 2025··
[JS] Preparing the JS package for preview release (openvinotoolkit#1775)
Details: - Code refactoring: - Renamed entity from `Pipeline` to `LLMpipeline` - Renamed JS package from `genai-node` to `openvino-genai-node` - Refactored import of buildings - Improving code style with `ESLint` - Updated version to `2025.1.0-preview` - Adding post-installation binary downloads Tickets: - CVS-161447 --------- Signed-off-by: Kirill Suvorov <kirill.suvorov@intel.com>
1 parent 0073a1e commit 39cc3e3

17 files changed

+1332
-217
lines changed
 

‎.github/workflows/linux.yml

+5-1
Original file line numberDiff line numberDiff line change
@@ -452,11 +452,15 @@ jobs:
452452
rm -rf node_modules/openvino-node/bin
453453
cp -R ${{ env.OV_INSTALL_DIR }}/openvino_js_package node_modules/openvino-node/bin
454454
455+
- name: Check lint
456+
working-directory: ${{ env.SRC_DIR }}/src/js
457+
run: npm run lint
458+
455459
- name: Run npm package tests
456460
working-directory: ${{ env.SRC_DIR }}/src/js
457461
run: npm test
458462

459-
- name: Install genai-node samples dependencies
463+
- name: Install openvino-genai-node samples dependencies
460464
working-directory: ${{ env.SRC_DIR }}/samples/js/text_generation
461465
run: |
462466
npm install --verbose

‎.github/workflows/windows.yml

+5-1
Original file line numberDiff line numberDiff line change
@@ -543,11 +543,15 @@ jobs:
543543
Remove-Item -Recurse -Force node_modules/openvino-node/bin
544544
Copy-Item -Recurse ${{ env.INSTALL_DIR }}/openvino_js_package node_modules/openvino-node/bin
545545
546+
- name: Check lint
547+
working-directory: ${{ env.SRC_DIR }}/src/js
548+
run: npm run lint
549+
546550
- name: Run npm package tests
547551
working-directory: ${{ env.SRC_DIR }}/src/js
548552
run: npm test
549553

550-
- name: Install genai-node samples dependencies
554+
- name: Install openvino-genai-node samples dependencies
551555
working-directory: ${{ env.SRC_DIR }}/samples/js/text_generation
552556
run: |
553557
npm install --verbose

‎samples/js/text_generation/chat_sample.js

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import readline from 'readline';
2-
import { Pipeline } from 'genai-node';
2+
import { LLMPipeline } from 'openvino-genai-node';
33

44
main();
55

@@ -24,7 +24,7 @@ async function main() {
2424
output: process.stdout,
2525
});
2626

27-
const pipe = await Pipeline.LLMPipeline(MODEL_PATH, device);
27+
const pipe = await LLMPipeline(MODEL_PATH, device);
2828
const config = { 'max_new_tokens': 100 };
2929

3030
await pipe.startChat();

‎samples/js/text_generation/package-lock.json

+10-9
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

‎samples/js/text_generation/package.json

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
{
2-
"name": "genai-node-demo",
2+
"name": "openvino-genai-node-demo",
33
"version": "1.0.0",
44
"license": "Apache-2.0",
55
"type": "module",
66
"devDependencies": {
77
"openvino-node": "^2025.0.0",
8-
"genai-node": "file:../../../src/js/"
8+
"openvino-genai-node": "file:../../../src/js/"
99
},
1010
"engines": {
1111
"node": ">=21.0.0"

‎src/js/README.md

+4-4
Original file line numberDiff line numberDiff line change
@@ -27,17 +27,17 @@ Then you can use OpenVINO™ GenAI JavaScript Bindings in one of the following w
2727
#### Option 1 - using npm:
2828

2929
To use this package locally use `npm link` in `src/js/` directory
30-
and `npm link genai-node` in the folder where you want to add this package as a dependency
30+
and `npm link openvino-genai-node` in the folder where you want to add this package as a dependency
3131

3232
#### Option 2 - using package.json:
3333

34-
Add the `genai-node` package manually by specifying the path to the `src/js/` directory in your `package.json`:
34+
Add the `openvino-genai-node` package manually by specifying the path to the `src/js/` directory in your `package.json`:
3535

3636
```
37-
"genai-node": "file:*path-to-current-directory*"
37+
"openvino-genai-node": "file:*path-to-current-directory*"
3838
```
3939

4040
### Verify the installation:
4141
```sh
42-
node -e "const { Pipeline } = require('genai-node'); console.log(Pipeline);"
42+
node -e "const { Pipeline } = require('openvino-genai-node'); console.log(Pipeline);"
4343
```

‎src/js/eslint.config.js

+35
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
import globals from 'globals';
2+
3+
/** @type {import('eslint').Linter.Config[]} */
4+
export default [
5+
{
6+
languageOptions: { globals: globals.node },
7+
rules: {
8+
'semi': ['error'],
9+
'no-var': ['error'],
10+
'max-len': ['error', { 'ignoreUrls': true }],
11+
'eol-last': ['error'],
12+
'indent': ['error', 2],
13+
'camelcase': ['error'],
14+
'semi-spacing': ['error'],
15+
'arrow-spacing': ['error'],
16+
'comma-spacing': ['error'],
17+
'no-multi-spaces': ['error'],
18+
'quotes': ['error', 'single'],
19+
'no-trailing-spaces': ['error'],
20+
'space-before-blocks': ['error'],
21+
'newline-before-return': ['error'],
22+
'comma-dangle': ['error', 'always-multiline'],
23+
'space-before-function-paren': ['error', {
24+
named: 'never',
25+
anonymous: 'never',
26+
asyncArrow: 'always',
27+
}],
28+
'key-spacing': ['error', { beforeColon: false }],
29+
'no-multiple-empty-lines': ['error', { max: 1, maxBOF: 0, maxEOF: 0 }],
30+
'keyword-spacing': ['error', { overrides: { catch: { after: false } } }],
31+
'prefer-destructuring': ['error', { object: true, array: false }],
32+
'@typescript-eslint/no-var-requires': 0,
33+
},
34+
},
35+
];

‎src/js/lib/addon.js

+20
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
import { createRequire } from 'module';
2+
import { platform } from 'node:os';
3+
import { join, dirname, resolve } from 'node:path';
4+
5+
// We need to use delayed import to get an updated Path if required
6+
function getGenAIAddon() {
7+
const require = createRequire(import.meta.url);
8+
const ovPath = require.resolve('openvino-node');
9+
if (platform() == 'win32') {
10+
// Find the openvino binaries that are required for openvino-genai-node
11+
const pathToOpenVino = join(dirname(ovPath), '../bin');
12+
if (!process.env.PATH.includes('openvino-node')) {
13+
process.env.PATH += ';' + resolve(pathToOpenVino);
14+
}
15+
}
16+
17+
return require('../bin/genai_node_addon.node');
18+
}
19+
20+
export default getGenAIAddon();

‎src/js/lib/bindings.cjs

-2
This file was deleted.

‎src/js/lib/module.js

+4-156
Original file line numberDiff line numberDiff line change
@@ -1,164 +1,12 @@
1-
import { createRequire } from 'module'
2-
import path from 'path';
3-
import util from 'node:util';
4-
import { platform } from 'node:os';
5-
import getGenAIAddon from './bindings.cjs';
1+
import { LLMPipeline as LLM } from './pipelines/llmPipeline.js';
62

7-
let ovPath;
8-
const require = createRequire(import.meta.url);
9-
try {
10-
ovPath = require.resolve('openvino-node');
11-
} catch (error) {
12-
if (error.code === 'MODULE_NOT_FOUND') {
13-
console.error(error.message);
14-
}
15-
throw error;
16-
}
17-
18-
if (platform() == "win32") {
19-
// Find the openvino libraries that are required for genai-node
20-
const pathToOpenVino = path.join(path.dirname(ovPath), '../bin');
21-
if (!process.env.PATH.includes('openvino-node')) {
22-
process.env.PATH += ';' + path.resolve(pathToOpenVino);
23-
}
24-
}
25-
26-
const addon = getGenAIAddon();
27-
28-
class LLMPipeline {
29-
modelPath = null;
30-
device = null;
31-
pipeline = null;
32-
isInitialized = false;
33-
isChatStarted = false;
34-
35-
constructor(modelPath, device) {
36-
this.modelPath = modelPath;
37-
this.device = device;
38-
}
39-
40-
async init() {
41-
if (this.isInitialized)
42-
throw new Error('Pipeline is already initialized');
43-
44-
this.pipeline = new addon.LLMPipeline();
45-
46-
const init = util.promisify(this.pipeline.init.bind(this.pipeline));
47-
const result = await init(this.modelPath, this.device);
48-
49-
this.isInitialized = true;
50-
51-
return result;
52-
}
53-
54-
async startChat() {
55-
if (this.isChatStarted)
56-
throw new Error('Chat is already started');
57-
58-
const startChatPromise = util.promisify(
59-
this.pipeline.startChat.bind(this.pipeline)
60-
);
61-
const result = await startChatPromise();
62-
63-
this.isChatStarted = true;
64-
65-
return result;
66-
}
67-
async finishChat() {
68-
if (!this.isChatStarted)
69-
throw new Error('Chat is not started');
70-
71-
const finishChatPromise = util.promisify(
72-
this.pipeline.finishChat.bind(this.pipeline)
73-
);
74-
const result = await finishChatPromise();
75-
76-
this.isChatStarted = false;
77-
78-
return result;
79-
}
80-
81-
static castOptionsToString(options) {
82-
const castedOptions = {};
83-
84-
for (const key in options)
85-
castedOptions[key] = String(options[key]);
86-
87-
return castedOptions;
88-
}
89-
90-
getAsyncGenerator(prompt, generationOptions = {}) {
91-
if (!this.isInitialized)
92-
throw new Error('Pipeline is not initialized');
93-
94-
if (typeof prompt !== 'string')
95-
throw new Error('Prompt must be a string');
96-
if (typeof generationOptions !== 'object')
97-
throw new Error('Options must be an object');
98-
99-
const castedOptions = LLMPipeline.castOptionsToString(generationOptions);
100-
101-
const queue = [];
102-
let resolvePromise;
103-
104-
// Callback function that C++ will call when a chunk is ready
105-
function chunkOutput(isDone, subword) {
106-
if (resolvePromise) {
107-
resolvePromise({ value: subword, done: isDone }); // Fulfill pending request
108-
resolvePromise = null; // Reset promise resolver
109-
} else {
110-
queue.push({ isDone, subword }); // Add data to queue if no pending promise
111-
}
112-
}
113-
114-
this.pipeline.generate(prompt, chunkOutput, castedOptions);
115-
116-
return {
117-
async next() {
118-
// If there is data in the queue, return it
119-
// Otherwise, return a promise that will resolve when data is available
120-
if (queue.length > 0) {
121-
const { isDone, subword } = queue.shift();
122-
123-
return { value: subword, done: isDone };
124-
}
125-
126-
return new Promise((resolve) => (resolvePromise = resolve));
127-
},
128-
[Symbol.asyncIterator]() { return this; }
129-
};
130-
}
131-
132-
async generate(prompt, generationOptions, generationCallback) {
133-
const options = generationOptions || {};
134-
135-
if (generationCallback !== undefined && typeof generationCallback !== 'function')
136-
throw new Error('Generation callback must be a function');
137-
138-
const g = this.getAsyncGenerator(prompt, options);
139-
const result = [];
140-
141-
for await (const chunk of g) {
142-
result.push(chunk);
143-
144-
if (generationCallback) generationCallback(chunk);
145-
}
146-
147-
return result.join('');
148-
}
149-
}
150-
151-
class Pipeline {
3+
class PipelineFactory {
1524
static async LLMPipeline(modelPath, device = 'CPU') {
153-
const pipeline = new LLMPipeline(modelPath, device);
5+
const pipeline = new LLM(modelPath, device);
1546
await pipeline.init();
1557

1568
return pipeline;
1579
}
15810
}
15911

160-
161-
export {
162-
addon,
163-
Pipeline,
164-
};
12+
export const {LLMPipeline} = PipelineFactory;

‎src/js/lib/pipelines/llmPipeline.js

+127
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,127 @@
1+
import util from 'node:util';
2+
import addon from '../addon.js';
3+
4+
export class LLMPipeline {
5+
modelPath = null;
6+
device = null;
7+
pipeline = null;
8+
isInitialized = false;
9+
isChatStarted = false;
10+
11+
constructor(modelPath, device) {
12+
this.modelPath = modelPath;
13+
this.device = device;
14+
}
15+
16+
async init() {
17+
if (this.isInitialized)
18+
throw new Error('LLMPipeline is already initialized');
19+
20+
this.pipeline = new addon.LLMPipeline();
21+
22+
const initPromise = util.promisify(this.pipeline.init.bind(this.pipeline));
23+
const result = await initPromise(this.modelPath, this.device);
24+
25+
this.isInitialized = true;
26+
27+
return result;
28+
}
29+
30+
async startChat() {
31+
if (this.isChatStarted)
32+
throw new Error('Chat is already started');
33+
34+
const startChatPromise = util.promisify(
35+
this.pipeline.startChat.bind(this.pipeline),
36+
);
37+
const result = await startChatPromise();
38+
39+
this.isChatStarted = true;
40+
41+
return result;
42+
}
43+
async finishChat() {
44+
if (!this.isChatStarted)
45+
throw new Error('Chat is not started');
46+
47+
const finishChatPromise = util.promisify(
48+
this.pipeline.finishChat.bind(this.pipeline),
49+
);
50+
const result = await finishChatPromise();
51+
52+
this.isChatStarted = false;
53+
54+
return result;
55+
}
56+
57+
static castOptionsToString(options) {
58+
const castedOptions = {};
59+
60+
for (const key in options)
61+
castedOptions[key] = String(options[key]);
62+
63+
return castedOptions;
64+
}
65+
66+
getAsyncGenerator(prompt, generationOptions = {}) {
67+
if (!this.isInitialized)
68+
throw new Error('Pipeline is not initialized');
69+
70+
if (typeof prompt !== 'string')
71+
throw new Error('Prompt must be a string');
72+
if (typeof generationOptions !== 'object')
73+
throw new Error('Options must be an object');
74+
75+
const castedOptions = LLMPipeline.castOptionsToString(generationOptions);
76+
77+
const queue = [];
78+
let resolvePromise;
79+
80+
// Callback function that C++ will call when a chunk is ready
81+
function chunkOutput(isDone, subword) {
82+
if (resolvePromise) {
83+
// Fulfill pending request
84+
resolvePromise({ value: subword, done: isDone });
85+
resolvePromise = null; // Reset promise resolver
86+
} else {
87+
// Add data to queue if no pending promise
88+
queue.push({ isDone, subword });
89+
}
90+
}
91+
92+
this.pipeline.generate(prompt, chunkOutput, castedOptions);
93+
94+
return {
95+
async next() {
96+
// If there is data in the queue, return it
97+
// Otherwise, return a promise that will resolve when data is available
98+
if (queue.length > 0) {
99+
const { isDone, subword } = queue.shift();
100+
101+
return { value: subword, done: isDone };
102+
}
103+
104+
return new Promise((resolve) => (resolvePromise = resolve));
105+
},
106+
[Symbol.asyncIterator]() { return this; },
107+
};
108+
}
109+
110+
async generate(prompt, generationOptions, generationCallback) {
111+
112+
if (generationCallback !== undefined
113+
&& typeof generationCallback !== 'function')
114+
throw new Error('Generation callback must be a function');
115+
116+
const g = this.getAsyncGenerator(prompt, generationOptions);
117+
const result = [];
118+
119+
for await (const chunk of g) {
120+
result.push(chunk);
121+
122+
if (generationCallback) generationCallback(chunk);
123+
}
124+
125+
return result.join('');
126+
}
127+
}

‎src/js/package-lock.json

+1,042-7
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

‎src/js/package.json

+14-3
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
{
2-
"name": "genai-node",
2+
"name": "openvino-genai-node",
33
"type": "module",
4-
"version": "2024.5.0-preview",
4+
"version": "2025.1.0-preview",
55
"description": "OpenVINO™ GenAI pipelines for using from Node.js environment",
66
"license": "Apache-2.0",
77
"main": "./lib/module.js",
@@ -19,15 +19,26 @@
1919
"GenAI"
2020
],
2121
"scripts": {
22+
"lint": "eslint .",
2223
"test_setup": "node ./tests/setup.js",
23-
"test": "npm run test_setup && node --test ./tests/*.test.js"
24+
"test": "npm run test_setup && node --test ./tests/*.test.js",
25+
"postinstall": "node ./scripts/download-runtime.cjs --ignore-if-exists"
2426
},
2527
"dependencies": {
2628
"openvino-node": "^2025.0.0"
2729
},
2830
"devDependencies": {
2931
"@huggingface/hub": "^0.21.0",
32+
"eslint": "^9.20.1",
3033
"global-agent": "^3.0.0",
34+
"globals": "^15.15.0",
3135
"node-fetch": "^3.3.2"
36+
},
37+
"binary": {
38+
"version": "2025.1.0.0",
39+
"module_path": "./bin/",
40+
"remote_path": "./repositories/openvino_genai/nodejs_bindings/{version}/{platform}/",
41+
"package_name": "openvino_genai_nodejs_bindings_{platform}_{version}_{arch}.tar.gz",
42+
"host": "https://storage.openvinotoolkit.org"
3243
}
3344
}

‎src/js/scripts/download-runtime.cjs

+23
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
const { join } = require('node:path');
2+
const BinaryManager = require('openvino-node/scripts/lib/binary-manager');
3+
4+
const packageJson = require('../package.json');
5+
6+
if (require.main === module) main();
7+
8+
async function main() {
9+
if (!BinaryManager.isCompatible()) process.exit(1);
10+
11+
const force = process.argv.includes('-f');
12+
const ignoreIfExists = process.argv.includes('--ignore-if-exists');
13+
14+
const { env } = process;
15+
const proxy = env.http_proxy || env.HTTP_PROXY || env.npm_config_proxy;
16+
17+
await BinaryManager.prepareBinary(
18+
join(__dirname, '..'),
19+
packageJson.binary.version || packageJson.version,
20+
packageJson.binary,
21+
{ force, ignoreIfExists, proxy },
22+
);
23+
}

‎src/js/src/addon.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -27,4 +27,4 @@ Napi::Object init_module(Napi::Env env, Napi::Object exports) {
2727
}
2828

2929
// Register the addon with Node.js
30-
NODE_API_MODULE(genai-node, init_module)
30+
NODE_API_MODULE(openvino-genai-node, init_module)

‎src/js/tests/bindings.test.js

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { addon } from '../lib/module.js';
1+
import addon from '../lib/addon.js';
22

33
import assert from 'node:assert';
44
import { describe, it, before, after } from 'node:test';
@@ -53,6 +53,7 @@ describe('bindings', () => {
5353

5454
assert.ok(output.length > 0);
5555
done();
56+
// eslint-disable-next-line camelcase
5657
}, { temperature: '0', max_new_tokens: '4' });
5758
});
5859
});

‎src/js/tests/module.test.js

+36-28
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { Pipeline } from '../lib/module.js';
1+
import { LLMPipeline } from '../lib/module.js';
22

33
import assert from 'node:assert/strict';
44
import { describe, it, before, after } from 'node:test';
@@ -11,7 +11,7 @@ describe('module', async () => {
1111
let pipeline = null;
1212

1313
await before(async () => {
14-
pipeline = await Pipeline.LLMPipeline(MODEL_PATH, 'CPU');
14+
pipeline = await LLMPipeline(MODEL_PATH, 'CPU');
1515

1616
await pipeline.startChat();
1717
});
@@ -23,6 +23,7 @@ describe('module', async () => {
2323
await it('should generate non empty string', async () => {
2424
const result = await pipeline.generate(
2525
'Type something in English',
26+
// eslint-disable-next-line camelcase
2627
{ temperature: '0', max_new_tokens: '4' },
2728
() => {},
2829
);
@@ -33,19 +34,19 @@ describe('module', async () => {
3334

3435
describe('corner cases', async () => {
3536
it('should throw an error if pipeline is already initialized', async () => {
36-
const pipeline = await Pipeline.LLMPipeline(MODEL_PATH, 'CPU');
37+
const pipeline = await LLMPipeline(MODEL_PATH, 'CPU');
3738

3839
await assert.rejects(
3940
async () => await pipeline.init(),
4041
{
4142
name: 'Error',
42-
message: 'Pipeline is already initialized',
43+
message: 'LLMPipeline is already initialized',
4344
},
4445
);
4546
});
4647

4748
it('should throw an error if chat is already started', async () => {
48-
const pipeline = await Pipeline.LLMPipeline(MODEL_PATH, 'CPU');
49+
const pipeline = await LLMPipeline(MODEL_PATH, 'CPU');
4950

5051
await pipeline.startChat();
5152

@@ -59,7 +60,7 @@ describe('corner cases', async () => {
5960
});
6061

6162
it('should throw an error if chat is not started', async () => {
62-
const pipeline = await Pipeline.LLMPipeline(MODEL_PATH, 'CPU');
63+
const pipeline = await LLMPipeline(MODEL_PATH, 'CPU');
6364

6465
await assert.rejects(
6566
() => pipeline.finishChat(),
@@ -75,7 +76,7 @@ describe('generation parameters validation', () => {
7576
let pipeline = null;
7677

7778
before(async () => {
78-
pipeline = await Pipeline.LLMPipeline(MODEL_PATH, 'CPU');
79+
pipeline = await LLMPipeline(MODEL_PATH, 'CPU');
7980

8081
await pipeline.startChat();
8182
});
@@ -94,47 +95,54 @@ describe('generation parameters validation', () => {
9495
);
9596
});
9697

97-
it('should throw an error if generationCallback is not a function', async () => {
98-
const pipeline = await Pipeline.LLMPipeline(MODEL_PATH, 'CPU');
98+
it(
99+
'should throw an error if generationCallback is not a function',
100+
async () => {
101+
const pipeline = await LLMPipeline(MODEL_PATH, 'CPU');
99102

100-
await pipeline.startChat();
103+
await pipeline.startChat();
101104

102-
await assert.rejects(
103-
async () => await pipeline.generate('prompt', {}, false),
104-
{
105-
name: 'Error',
106-
message: 'Generation callback must be a function',
107-
},
108-
);
109-
});
105+
await assert.rejects(
106+
async () => await pipeline.generate('prompt', {}, false),
107+
{
108+
name: 'Error',
109+
message: 'Generation callback must be a function',
110+
},
111+
);
112+
});
110113

111-
it('should throw an error if options specified but not an object', async () => {
112-
await assert.rejects(
113-
async () => await pipeline.generate('prompt', 'options', () => {}),
114-
{
115-
name: 'Error',
116-
message: 'Options must be an object',
117-
},
118-
);
119-
});
114+
it(
115+
'should throw an error if options specified but not an object',
116+
async () => {
117+
await assert.rejects(
118+
async () => await pipeline.generate('prompt', 'options', () => {}),
119+
{
120+
name: 'Error',
121+
message: 'Options must be an object',
122+
},
123+
);
124+
});
120125

121126
it('should perform generation with default options', async () => {
122127
try {
128+
// eslint-disable-next-line camelcase
123129
await pipeline.generate('prompt', { max_new_tokens: 1 });
124-
} catch (error) {
130+
} catch(error) {
125131
assert.fail(error);
126132
}
127133

128134
assert.ok(true);
129135
});
130136

131137
it('should return a string as generation result', async () => {
138+
// eslint-disable-next-line camelcase
132139
const reply = await pipeline.generate('prompt', { max_new_tokens: 1 });
133140

134141
assert.strictEqual(typeof reply, 'string');
135142
});
136143

137144
it('should call generationCallback with string chunk', async () => {
145+
// eslint-disable-next-line camelcase
138146
await pipeline.generate('prompt', { max_new_tokens: 1 }, (chunk) => {
139147
assert.strictEqual(typeof chunk, 'string');
140148
});

0 commit comments

Comments
 (0)
Please sign in to comment.