mirror of https://github.com/grpc/grpc-node.git
Merge branch 'master' into multi-filenames
This commit is contained in:
commit
8e2e2b25c2
|
@ -20,3 +20,6 @@ package-lock.json
|
|||
|
||||
# Test generated files
|
||||
coverage
|
||||
|
||||
# Node's bash completion file
|
||||
.node_bash_completion
|
||||
|
|
|
@ -22,7 +22,7 @@ Load Balancing | :heavy_check_mark: | :x:
|
|||
Other Properties | `grpc` | `@grpc/grpc-js`
|
||||
-----------------|--------|----------------
|
||||
Pure JavaScript Code | :x: | :heavy_check_mark:
|
||||
Supported Node Versions | >= 4 | ^8.11.2 or >=9.4
|
||||
Supported Node Versions | >= 4 | ^8.13.0 or >=10.10.0
|
||||
Supported Electron Versions | All | >= 3
|
||||
Supported Platforms | Linux, Windows, MacOS | All
|
||||
Supported Architectures | x86, x86-64, ARM7+ | All
|
||||
|
@ -36,4 +36,4 @@ In addition, all channel arguments defined in [this header file](https://github.
|
|||
- `grpc.keepalive_time_ms`
|
||||
- `grpc.keepalive_timeout_ms`
|
||||
- `channelOverride`
|
||||
- `channelFactoryOverride`
|
||||
- `channelFactoryOverride`
|
||||
|
|
|
@ -19,7 +19,7 @@ Directory: [`packages/grpc-js`](https://github.com/grpc/grpc-node/tree/master/pa
|
|||
|
||||
npm package: [@grpc/grpc-js](https://www.npmjs.com/package/@grpc/grpc-js)
|
||||
|
||||
**This library is currently incomplete and experimental, built on the [experimental http2 Node module](https://nodejs.org/api/http2.html).**
|
||||
**This library is currently incomplete and experimental. It is built on the [http2 Node module](https://nodejs.org/api/http2.html).**
|
||||
|
||||
This library implements the core functionality of gRPC purely in JavaScript, without a C++ addon. It works on the latest version of Node.js on all platforms that Node.js runs on.
|
||||
|
||||
|
|
114
gulpfile.ts
114
gulpfile.ts
|
@ -15,96 +15,58 @@
|
|||
*
|
||||
*/
|
||||
|
||||
import * as _gulp from 'gulp';
|
||||
import * as help from 'gulp-help';
|
||||
|
||||
// gulp-help monkeypatches tasks to have an additional description parameter
|
||||
const gulp = help(_gulp);
|
||||
|
||||
const runSequence = require('run-sequence');
|
||||
|
||||
/**
|
||||
* Require a module at the given path with a patched gulp object that prepends
|
||||
* the given prefix to each task name.
|
||||
* @param path The path to require.
|
||||
* @param prefix The string to use as a prefix. This will be prepended to a task
|
||||
* name with a '.' separator.
|
||||
*/
|
||||
function loadGulpTasksWithPrefix(path: string, prefix: string) {
|
||||
const gulpTask = gulp.task;
|
||||
gulp.task = ((taskName: string, ...args: any[]) => {
|
||||
// Don't create a task for ${prefix}.help
|
||||
if (taskName === 'help') {
|
||||
return;
|
||||
}
|
||||
// The only array passed to gulp.task must be a list of dependent tasks.
|
||||
const newArgs = args.map(arg => Array.isArray(arg) ?
|
||||
arg.map(dep => `${prefix}.${dep}`) : arg);
|
||||
gulpTask(`${prefix}.${taskName}`, ...newArgs);
|
||||
});
|
||||
const result = require(path);
|
||||
gulp.task = gulpTask;
|
||||
return result;
|
||||
}
|
||||
|
||||
[
|
||||
['./packages/grpc-health-check/gulpfile', 'health-check'],
|
||||
['./packages/grpc-js/gulpfile', 'js.core'],
|
||||
['./packages/grpc-native-core/gulpfile', 'native.core'],
|
||||
['./packages/proto-loader/gulpfile', 'protobuf'],
|
||||
['./test/gulpfile', 'internal.test'],
|
||||
].forEach((args) => loadGulpTasksWithPrefix(args[0], args[1]));
|
||||
import * as gulp from 'gulp';
|
||||
import * as healthCheck from './packages/grpc-health-check/gulpfile';
|
||||
import * as jsCore from './packages/grpc-js/gulpfile';
|
||||
import * as nativeCore from './packages/grpc-native-core/gulpfile';
|
||||
import * as protobuf from './packages/proto-loader/gulpfile';
|
||||
import * as internalTest from './test/gulpfile';
|
||||
|
||||
const root = __dirname;
|
||||
|
||||
gulp.task('install.all', 'Install dependencies for all subdirectory packages',
|
||||
['js.core.install', 'native.core.install', 'health-check.install', 'protobuf.install', 'internal.test.install']);
|
||||
const installAll = gulp.parallel(jsCore.install, nativeCore.install, healthCheck.install, protobuf.install, internalTest.install);
|
||||
|
||||
gulp.task('install.all.windows', 'Install dependencies for all subdirectory packages for MS Windows',
|
||||
['js.core.install', 'native.core.install.windows', 'health-check.install', 'protobuf.install', 'internal.test.install']);
|
||||
const installAllWindows = gulp.parallel(jsCore.install, nativeCore.installWindows, healthCheck.install, protobuf.install, internalTest.install);
|
||||
|
||||
gulp.task('lint', 'Emit linting errors in source and test files',
|
||||
['js.core.lint', 'native.core.lint']);
|
||||
const lint = gulp.parallel(jsCore.lint, nativeCore.lint);
|
||||
|
||||
gulp.task('build', 'Build packages', ['js.core.compile', 'native.core.build', 'protobuf.compile']);
|
||||
const build = gulp.parallel(jsCore.compile, nativeCore.build, protobuf.compile);
|
||||
|
||||
gulp.task('link.surface', 'Link to surface packages',
|
||||
['health-check.link.add']);
|
||||
const link = gulp.series(healthCheck.linkAdd);
|
||||
|
||||
gulp.task('link', 'Link together packages', (callback) => {
|
||||
/**
|
||||
* We use workarounds for linking in some modules. See npm/npm#18835
|
||||
*/
|
||||
runSequence('link.surface', callback);
|
||||
});
|
||||
const setup = gulp.series(installAll, link);
|
||||
|
||||
gulp.task('setup', 'One-time setup for a clean repository', (callback) => {
|
||||
runSequence('install.all', 'link', callback);
|
||||
});
|
||||
gulp.task('setup.windows', 'One-time setup for a clean repository for MS Windows', (callback) => {
|
||||
runSequence('install.all.windows', 'link', callback);
|
||||
});
|
||||
const setupWindows = gulp.series(installAllWindows, link);
|
||||
|
||||
gulp.task('clean', 'Delete generated files', ['js.core.clean', 'native.core.clean', 'protobuf.clean']);
|
||||
const setupPureJSInterop = gulp.parallel(jsCore.install, protobuf.install, internalTest.install);
|
||||
|
||||
gulp.task('clean.all', 'Delete all files created by tasks',
|
||||
['js.core.clean.all', 'native.core.clean.all', 'health-check.clean.all',
|
||||
'internal.test.clean.all', 'protobuf.clean.all']);
|
||||
const clean = gulp.parallel(jsCore.clean, nativeCore.clean, protobuf.clean);
|
||||
|
||||
gulp.task('native.test.only', 'Run tests of native code without rebuilding anything',
|
||||
['native.core.test', 'health-check.test']);
|
||||
const cleanAll = gulp.parallel(jsCore.cleanAll, nativeCore.cleanAll, healthCheck.cleanAll, internalTest.cleanAll, protobuf.cleanAll);
|
||||
|
||||
gulp.task('native.test', 'Run tests of native code', (callback) => {
|
||||
runSequence('build', 'native.test.only', callback);
|
||||
});
|
||||
const nativeTestOnly = gulp.parallel(nativeCore.test, healthCheck.test);
|
||||
|
||||
gulp.task('test.only', 'Run tests without rebuilding anything',
|
||||
['js.core.test', 'native.test.only', 'protobuf.test']);
|
||||
const nativeTest = gulp.series(build, nativeTestOnly);
|
||||
|
||||
gulp.task('test', 'Run all tests', (callback) => {
|
||||
runSequence('build', 'test.only', 'internal.test.test', callback);
|
||||
});
|
||||
const testOnly = gulp.parallel(jsCore.test, nativeTestOnly, protobuf.test);
|
||||
|
||||
gulp.task('doc.gen', 'Generate documentation', ['native.core.doc.gen']);
|
||||
const test = gulp.series(build, testOnly, internalTest.test);
|
||||
|
||||
gulp.task('default', ['help']);
|
||||
const docGen = gulp.series(nativeCore.docGen);
|
||||
|
||||
export {
|
||||
installAll,
|
||||
installAllWindows,
|
||||
lint,
|
||||
build,
|
||||
link,
|
||||
setup,
|
||||
setupWindows,
|
||||
setupPureJSInterop,
|
||||
clean,
|
||||
cleanAll,
|
||||
nativeTestOnly,
|
||||
nativeTest,
|
||||
test,
|
||||
docGen
|
||||
};
|
||||
|
|
|
@ -25,6 +25,30 @@ const readDir = util.promisify(fs.readdir);
|
|||
|
||||
const rootDir = __dirname;
|
||||
|
||||
// Fake test suite log with a failure if log parsing failed
|
||||
const parseFailureLog = [
|
||||
{
|
||||
$: {
|
||||
name: 'Unknown Test Suite',
|
||||
tests: '1',
|
||||
failures: '1',
|
||||
},
|
||||
testcase: [
|
||||
{
|
||||
$: {
|
||||
classname: 'Test Log Parsing',
|
||||
name: 'Test Log Parsing',
|
||||
failure: {
|
||||
$: {
|
||||
message: "Log parsing failed"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
];
|
||||
|
||||
readDir(rootDir + '/reports')
|
||||
.then((dirNames) =>
|
||||
Promise.all(dirNames.map((dirName) =>
|
||||
|
@ -41,7 +65,12 @@ readDir(rootDir + '/reports')
|
|||
)
|
||||
.then((objects) => {
|
||||
let merged = objects[0];
|
||||
merged.testsuites.testsuite = Array.prototype.concat.apply([], objects.map((obj) => obj.testsuites.testsuite));
|
||||
merged.testsuites.testsuite = Array.prototype.concat.apply([], objects.map((obj) => {
|
||||
if (obj) {
|
||||
return obj.testsuites.testsuite;
|
||||
} else {
|
||||
return parseFailureLog;
|
||||
}}));
|
||||
let builder = new xml2js.Builder();
|
||||
let xml = builder.buildObject(merged);
|
||||
let resultName = path.resolve(rootDir, 'reports', dirName, 'sponge_log.xml');
|
||||
|
|
|
@ -11,7 +11,6 @@
|
|||
"devDependencies": {
|
||||
"@types/execa": "^0.8.0",
|
||||
"@types/gulp": "^4.0.5",
|
||||
"@types/gulp-help": "0.0.34",
|
||||
"@types/gulp-mocha": "0.0.31",
|
||||
"@types/ncp": "^2.0.1",
|
||||
"@types/node": "^8.0.32",
|
||||
|
@ -20,8 +19,7 @@
|
|||
"coveralls": "^3.0.1",
|
||||
"del": "^3.0.0",
|
||||
"execa": "^0.8.0",
|
||||
"gulp": "^3.9.1",
|
||||
"gulp-help": "^1.6.1",
|
||||
"gulp": "^4.0.1",
|
||||
"gulp-jsdoc3": "^1.0.1",
|
||||
"gulp-jshint": "^2.0.4",
|
||||
"gulp-mocha": "^4.3.1",
|
||||
|
@ -42,7 +40,7 @@
|
|||
"semver": "^5.5.0",
|
||||
"symlink": "^2.1.0",
|
||||
"through2": "^2.0.3",
|
||||
"ts-node": "^3.3.0",
|
||||
"ts-node": "^8.1.0",
|
||||
"tslint": "^5.5.0",
|
||||
"typescript": "~3.3.3333",
|
||||
"xml2js": "^0.4.19"
|
||||
|
|
|
@ -1,50 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 gRPC authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
const _gulp = require('gulp');
|
||||
const help = require('gulp-help');
|
||||
const mocha = require('gulp-mocha');
|
||||
const execa = require('execa');
|
||||
const path = require('path');
|
||||
const del = require('del');
|
||||
const linkSync = require('../../util').linkSync;
|
||||
|
||||
const gulp = help(_gulp);
|
||||
|
||||
const healthCheckDir = __dirname;
|
||||
const baseDir = path.resolve(healthCheckDir, '..', '..');
|
||||
const testDir = path.resolve(healthCheckDir, 'test');
|
||||
|
||||
gulp.task('clean.links', 'Delete npm links', () => {
|
||||
return del(path.resolve(healthCheckDir, 'node_modules/grpc'));
|
||||
});
|
||||
|
||||
gulp.task('clean.all', 'Delete all code created by tasks',
|
||||
['clean.links']);
|
||||
|
||||
gulp.task('install', 'Install health check dependencies', ['clean.links'], () => {
|
||||
return execa('npm', ['install', '--unsafe-perm'], {cwd: healthCheckDir, stdio: 'inherit'});
|
||||
});
|
||||
|
||||
gulp.task('link.add', 'Link local copy of grpc', () => {
|
||||
linkSync(healthCheckDir, './node_modules/grpc', '../grpc-native-core');
|
||||
});
|
||||
|
||||
gulp.task('test', 'Run health check tests',
|
||||
() => {
|
||||
return gulp.src(`${testDir}/*.js`).pipe(mocha({reporter: 'mocha-jenkins-reporter'}));
|
||||
});
|
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Copyright 2019 gRPC authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
import * as gulp from 'gulp';
|
||||
import * as mocha from 'gulp-mocha';
|
||||
import * as execa from 'execa';
|
||||
import * as path from 'path';
|
||||
import * as del from 'del';
|
||||
import {linkSync} from '../../util';
|
||||
|
||||
const healthCheckDir = __dirname;
|
||||
const baseDir = path.resolve(healthCheckDir, '..', '..');
|
||||
const testDir = path.resolve(healthCheckDir, 'test');
|
||||
|
||||
const cleanLinks = () => del(path.resolve(healthCheckDir, 'node_modules/grpc'));
|
||||
|
||||
const cleanAll = gulp.parallel(cleanLinks);
|
||||
|
||||
const runInstall = () => execa('npm', ['install', '--unsafe-perm'], {cwd: healthCheckDir, stdio: 'inherit'});
|
||||
|
||||
const install = gulp.series(cleanLinks, runInstall);
|
||||
|
||||
const linkAdd = (callback) => {
|
||||
linkSync(healthCheckDir, './node_modules/grpc', '../grpc-native-core');
|
||||
callback();
|
||||
}
|
||||
|
||||
const test = () => gulp.src(`${testDir}/*.js`).pipe(mocha({reporter: 'mocha-jenkins-reporter'}));
|
||||
|
||||
export {
|
||||
cleanLinks,
|
||||
cleanAll,
|
||||
install,
|
||||
linkAdd,
|
||||
test
|
||||
}
|
|
@ -15,8 +15,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
import * as _gulp from 'gulp';
|
||||
import * as help from 'gulp-help';
|
||||
import * as gulp from 'gulp';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as mocha from 'gulp-mocha';
|
||||
|
@ -26,9 +25,6 @@ import * as pify from 'pify';
|
|||
import * as semver from 'semver';
|
||||
import { ncp } from 'ncp';
|
||||
|
||||
// gulp-help monkeypatches tasks to have an additional description parameter
|
||||
const gulp = help(_gulp);
|
||||
|
||||
const ncpP = pify(ncp);
|
||||
|
||||
Error.stackTraceLimit = Infinity;
|
||||
|
@ -44,35 +40,29 @@ const execNpmVerb = (verb: string, ...args: string[]) =>
|
|||
execa('npm', [verb, ...args], {cwd: jsCoreDir, stdio: 'inherit'});
|
||||
const execNpmCommand = execNpmVerb.bind(null, 'run');
|
||||
|
||||
gulp.task('install', 'Install native core dependencies', () =>
|
||||
execNpmVerb('install', '--unsafe-perm'));
|
||||
const install = () => execNpmVerb('install', '--unsafe-perm');
|
||||
|
||||
/**
|
||||
* Runs tslint on files in src/, with linting rules defined in tslint.json.
|
||||
*/
|
||||
gulp.task('lint', 'Emits linting errors found in src/ and test/.', () =>
|
||||
execNpmCommand('check'));
|
||||
const lint = () => execNpmCommand('check');
|
||||
|
||||
gulp.task('clean', 'Deletes transpiled code.', ['install'],
|
||||
() => execNpmCommand('clean'));
|
||||
const cleanFiles = () => execNpmCommand('clean');
|
||||
|
||||
gulp.task('clean.all', 'Deletes all files added by targets', ['clean']);
|
||||
const clean = gulp.series(install, cleanFiles);
|
||||
|
||||
const cleanAll = gulp.parallel(clean);
|
||||
|
||||
/**
|
||||
* Transpiles TypeScript files in src/ to JavaScript according to the settings
|
||||
* found in tsconfig.json.
|
||||
*/
|
||||
gulp.task('compile', 'Transpiles src/.', () => execNpmCommand('compile'));
|
||||
const compile = () => execNpmCommand('compile');
|
||||
|
||||
gulp.task('copy-test-fixtures', 'Copy test fixtures.', () => {
|
||||
return ncpP(`${jsCoreDir}/test/fixtures`, `${outDir}/test/fixtures`);
|
||||
});
|
||||
const copyTestFixtures = () => ncpP(`${jsCoreDir}/test/fixtures`, `${outDir}/test/fixtures`);
|
||||
|
||||
/**
|
||||
* Transpiles src/ and test/, and then runs all tests.
|
||||
*/
|
||||
gulp.task('test', 'Runs all tests.', ['lint', 'copy-test-fixtures'], () => {
|
||||
if (semver.satisfies(process.version, '^8.11.2 || >=9.4')) {
|
||||
const runTests = () => {
|
||||
if (semver.satisfies(process.version, '^8.13.0 || >=10.10.0')) {
|
||||
return gulp.src(`${outDir}/test/**/*.js`)
|
||||
.pipe(mocha({reporter: 'mocha-jenkins-reporter',
|
||||
require: ['ts-node/register']}));
|
||||
|
@ -80,4 +70,15 @@ gulp.task('test', 'Runs all tests.', ['lint', 'copy-test-fixtures'], () => {
|
|||
console.log(`Skipping grpc-js tests for Node ${process.version}`);
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const test = gulp.series(install, copyTestFixtures, runTests);
|
||||
|
||||
export {
|
||||
install,
|
||||
lint,
|
||||
clean,
|
||||
cleanAll,
|
||||
compile,
|
||||
test
|
||||
}
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "@grpc/grpc-js",
|
||||
"version": "0.3.6",
|
||||
"version": "0.4.0",
|
||||
"description": "gRPC Library for Node - pure JS implementation",
|
||||
"homepage": "https://grpc.io/",
|
||||
"repository": "https://github.com/grpc/grpc-node/tree/master/packages/grpc-js",
|
||||
"main": "build/src/index.js",
|
||||
"engines": {
|
||||
"node": "^8.11.2 || >=9.4"
|
||||
"node": "^8.13.0 || >=10.10.0"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": {
|
||||
|
@ -15,11 +15,12 @@
|
|||
"types": "build/src/index.d.ts",
|
||||
"license": "Apache-2.0",
|
||||
"devDependencies": {
|
||||
"@grpc/proto-loader": "^0.4.0",
|
||||
"@types/lodash": "^4.14.108",
|
||||
"@types/mocha": "^2.2.43",
|
||||
"@types/node": "^10.5.4",
|
||||
"@types/mocha": "^5.2.6",
|
||||
"@types/node": "^11.13.2",
|
||||
"clang-format": "^1.0.55",
|
||||
"gts": "^0.5.1",
|
||||
"gts": "^0.9.0",
|
||||
"lodash": "^4.17.4",
|
||||
"typescript": "~3.3.3333"
|
||||
},
|
||||
|
@ -42,7 +43,7 @@
|
|||
"posttest": "npm run check"
|
||||
},
|
||||
"dependencies": {
|
||||
"semver": "^5.5.0"
|
||||
"semver": "^6.0.0"
|
||||
},
|
||||
"files": [
|
||||
"build/src/*.{js,d.ts}",
|
||||
|
|
|
@ -26,6 +26,7 @@ import {Filter} from './filter';
|
|||
import {FilterStackFactory} from './filter-stack';
|
||||
import {Metadata} from './metadata';
|
||||
import {ObjectDuplex, WriteCallback} from './object-stream';
|
||||
import {StreamDecoder} from './stream-decoder';
|
||||
|
||||
const {HTTP2_HEADER_STATUS, HTTP2_HEADER_CONTENT_TYPE, NGHTTP2_CANCEL} =
|
||||
http2.constants;
|
||||
|
@ -77,12 +78,6 @@ export type Call = {
|
|||
EmitterAugmentation1<'status', StatusObject>&
|
||||
ObjectDuplex<WriteObject, Buffer>;
|
||||
|
||||
enum ReadState {
|
||||
NO_DATA,
|
||||
READING_SIZE,
|
||||
READING_MESSAGE
|
||||
}
|
||||
|
||||
export class Http2CallStream extends Duplex implements Call {
|
||||
credentials: CallCredentials = CallCredentials.createEmpty();
|
||||
filterStack: Filter;
|
||||
|
@ -92,13 +87,7 @@ export class Http2CallStream extends Duplex implements Call {
|
|||
private pendingWriteCallback: WriteCallback|null = null;
|
||||
private pendingFinalCallback: Function|null = null;
|
||||
|
||||
private readState: ReadState = ReadState.NO_DATA;
|
||||
private readCompressFlag: Buffer = Buffer.alloc(1);
|
||||
private readPartialSize: Buffer = Buffer.alloc(4);
|
||||
private readSizeRemaining = 4;
|
||||
private readMessageSize = 0;
|
||||
private readPartialMessage: Buffer[] = [];
|
||||
private readMessageRemaining = 0;
|
||||
private decoder = new StreamDecoder();
|
||||
|
||||
private isReadFilterPending = false;
|
||||
private canPush = false;
|
||||
|
@ -292,62 +281,10 @@ export class Http2CallStream extends Duplex implements Call {
|
|||
});
|
||||
stream.on('trailers', this.handleTrailers.bind(this));
|
||||
stream.on('data', (data: Buffer) => {
|
||||
let readHead = 0;
|
||||
let toRead: number;
|
||||
while (readHead < data.length) {
|
||||
switch (this.readState) {
|
||||
case ReadState.NO_DATA:
|
||||
this.readCompressFlag = data.slice(readHead, readHead + 1);
|
||||
readHead += 1;
|
||||
this.readState = ReadState.READING_SIZE;
|
||||
this.readPartialSize.fill(0);
|
||||
this.readSizeRemaining = 4;
|
||||
this.readMessageSize = 0;
|
||||
this.readMessageRemaining = 0;
|
||||
this.readPartialMessage = [];
|
||||
break;
|
||||
case ReadState.READING_SIZE:
|
||||
toRead = Math.min(data.length - readHead, this.readSizeRemaining);
|
||||
data.copy(
|
||||
this.readPartialSize, 4 - this.readSizeRemaining, readHead,
|
||||
readHead + toRead);
|
||||
this.readSizeRemaining -= toRead;
|
||||
readHead += toRead;
|
||||
// readSizeRemaining >=0 here
|
||||
if (this.readSizeRemaining === 0) {
|
||||
this.readMessageSize = this.readPartialSize.readUInt32BE(0);
|
||||
this.readMessageRemaining = this.readMessageSize;
|
||||
if (this.readMessageRemaining > 0) {
|
||||
this.readState = ReadState.READING_MESSAGE;
|
||||
} else {
|
||||
this.tryPush(Buffer.concat(
|
||||
[this.readCompressFlag, this.readPartialSize]));
|
||||
this.readState = ReadState.NO_DATA;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case ReadState.READING_MESSAGE:
|
||||
toRead =
|
||||
Math.min(data.length - readHead, this.readMessageRemaining);
|
||||
this.readPartialMessage.push(
|
||||
data.slice(readHead, readHead + toRead));
|
||||
this.readMessageRemaining -= toRead;
|
||||
readHead += toRead;
|
||||
// readMessageRemaining >=0 here
|
||||
if (this.readMessageRemaining === 0) {
|
||||
// At this point, we have read a full message
|
||||
const framedMessageBuffers = [
|
||||
this.readCompressFlag, this.readPartialSize
|
||||
].concat(this.readPartialMessage);
|
||||
const framedMessage = Buffer.concat(
|
||||
framedMessageBuffers, this.readMessageSize + 5);
|
||||
this.tryPush(framedMessage);
|
||||
this.readState = ReadState.NO_DATA;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new Error('This should never happen');
|
||||
}
|
||||
const message = this.decoder.write(data);
|
||||
|
||||
if (message !== null) {
|
||||
this.tryPush(message);
|
||||
}
|
||||
});
|
||||
stream.on('end', () => {
|
||||
|
|
|
@ -24,9 +24,7 @@ import {ChannelOptions} from './channel-options';
|
|||
import {Status} from './constants';
|
||||
import {Metadata} from './metadata';
|
||||
|
||||
// This symbol must be exported (for now).
|
||||
// See: https://github.com/Microsoft/TypeScript/issues/20080
|
||||
export const kChannel = Symbol();
|
||||
const CHANNEL_SYMBOL = Symbol();
|
||||
|
||||
export interface UnaryCallback<ResponseType> {
|
||||
(err: ServiceError|null, value?: ResponseType): void;
|
||||
|
@ -52,26 +50,26 @@ export type ClientOptions = Partial<ChannelOptions>&{
|
|||
* clients.
|
||||
*/
|
||||
export class Client {
|
||||
private readonly[kChannel]: Channel;
|
||||
private readonly[CHANNEL_SYMBOL]: Channel;
|
||||
constructor(
|
||||
address: string, credentials: ChannelCredentials,
|
||||
options: ClientOptions = {}) {
|
||||
if (options.channelOverride) {
|
||||
this[kChannel] = options.channelOverride;
|
||||
this[CHANNEL_SYMBOL] = options.channelOverride;
|
||||
} else if (options.channelFactoryOverride) {
|
||||
this[kChannel] =
|
||||
this[CHANNEL_SYMBOL] =
|
||||
options.channelFactoryOverride(address, credentials, options);
|
||||
} else {
|
||||
this[kChannel] = new Http2Channel(address, credentials, options);
|
||||
this[CHANNEL_SYMBOL] = new Http2Channel(address, credentials, options);
|
||||
}
|
||||
}
|
||||
|
||||
close(): void {
|
||||
this[kChannel].close();
|
||||
this[CHANNEL_SYMBOL].close();
|
||||
}
|
||||
|
||||
getChannel(): Channel {
|
||||
return this[kChannel];
|
||||
return this[CHANNEL_SYMBOL];
|
||||
}
|
||||
|
||||
waitForReady(deadline: Deadline, callback: (error?: Error) => void): void {
|
||||
|
@ -82,7 +80,7 @@ export class Client {
|
|||
}
|
||||
let newState;
|
||||
try {
|
||||
newState = this[kChannel].getConnectivityState(true);
|
||||
newState = this[CHANNEL_SYMBOL].getConnectivityState(true);
|
||||
} catch (e) {
|
||||
callback(new Error('The channel has been closed'));
|
||||
return;
|
||||
|
@ -91,7 +89,8 @@ export class Client {
|
|||
callback();
|
||||
} else {
|
||||
try {
|
||||
this[kChannel].watchConnectivityState(newState, deadline, checkState);
|
||||
this[CHANNEL_SYMBOL].watchConnectivityState(
|
||||
newState, deadline, checkState);
|
||||
} catch (e) {
|
||||
callback(new Error('The channel has been closed'));
|
||||
}
|
||||
|
@ -188,7 +187,7 @@ export class Client {
|
|||
({metadata, options, callback} =
|
||||
this.checkOptionalUnaryResponseArguments<ResponseType>(
|
||||
metadata, options, callback));
|
||||
const call: Call = this[kChannel].createCall(
|
||||
const call: Call = this[CHANNEL_SYMBOL].createCall(
|
||||
method, options.deadline, options.host, null, options.propagate_flags);
|
||||
if (options.credentials) {
|
||||
call.setCredentials(options.credentials);
|
||||
|
@ -229,7 +228,7 @@ export class Client {
|
|||
({metadata, options, callback} =
|
||||
this.checkOptionalUnaryResponseArguments<ResponseType>(
|
||||
metadata, options, callback));
|
||||
const call: Call = this[kChannel].createCall(
|
||||
const call: Call = this[CHANNEL_SYMBOL].createCall(
|
||||
method, options.deadline, options.host, null, options.propagate_flags);
|
||||
if (options.credentials) {
|
||||
call.setCredentials(options.credentials);
|
||||
|
@ -277,7 +276,7 @@ export class Client {
|
|||
metadata?: Metadata|CallOptions,
|
||||
options?: CallOptions): ClientReadableStream<ResponseType> {
|
||||
({metadata, options} = this.checkMetadataAndOptions(metadata, options));
|
||||
const call: Call = this[kChannel].createCall(
|
||||
const call: Call = this[CHANNEL_SYMBOL].createCall(
|
||||
method, options.deadline, options.host, null, options.propagate_flags);
|
||||
if (options.credentials) {
|
||||
call.setCredentials(options.credentials);
|
||||
|
@ -304,7 +303,7 @@ export class Client {
|
|||
metadata?: Metadata|CallOptions,
|
||||
options?: CallOptions): ClientDuplexStream<RequestType, ResponseType> {
|
||||
({metadata, options} = this.checkMetadataAndOptions(metadata, options));
|
||||
const call: Call = this[kChannel].createCall(
|
||||
const call: Call = this[CHANNEL_SYMBOL].createCall(
|
||||
method, options.deadline, options.host, null, options.propagate_flags);
|
||||
if (options.credentials) {
|
||||
call.setCredentials(options.credentials);
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
|
||||
import * as semver from 'semver';
|
||||
|
||||
import {ClientDuplexStream, ClientReadableStream, ClientUnaryCall, ClientWritableStream} from './call';
|
||||
import {ClientDuplexStream, ClientReadableStream, ClientUnaryCall, ClientWritableStream, ServiceError} from './call';
|
||||
import {CallCredentials} from './call-credentials';
|
||||
import {Deadline, StatusObject} from './call-stream';
|
||||
import {Channel, ConnectivityState, Http2Channel} from './channel';
|
||||
|
@ -27,9 +27,10 @@ import {LogVerbosity, Status} from './constants';
|
|||
import * as logging from './logging';
|
||||
import {Deserialize, loadPackageDefinition, makeClientConstructor, Serialize} from './make-client';
|
||||
import {Metadata} from './metadata';
|
||||
import {KeyCertPair, ServerCredentials} from './server-credentials';
|
||||
import {StatusBuilder} from './status-builder';
|
||||
|
||||
const supportedNodeVersions = '^8.11.2 || >=9.4';
|
||||
const supportedNodeVersions = '^8.13.0 || >=10.10.0';
|
||||
if (!semver.satisfies(process.version, supportedNodeVersions)) {
|
||||
throw new Error(`@grpc/grpc-js only works on Node ${supportedNodeVersions}`);
|
||||
}
|
||||
|
@ -179,7 +180,8 @@ export {
|
|||
ClientWritableStream,
|
||||
ClientDuplexStream,
|
||||
CallOptions,
|
||||
StatusObject
|
||||
StatusObject,
|
||||
ServiceError
|
||||
};
|
||||
|
||||
/* tslint:disable:no-any */
|
||||
|
@ -226,15 +228,9 @@ export const Server = (options: any) => {
|
|||
throw new Error('Not yet implemented');
|
||||
};
|
||||
|
||||
export const ServerCredentials = {
|
||||
createSsl:
|
||||
(rootCerts: any, keyCertPairs: any, checkClientCertificate: any) => {
|
||||
throw new Error('Not yet implemented');
|
||||
},
|
||||
createInsecure: () => {
|
||||
throw new Error('Not yet implemented');
|
||||
}
|
||||
};
|
||||
export {ServerCredentials};
|
||||
export {KeyCertPair};
|
||||
|
||||
|
||||
export const getClientChannel = (client: Client) => {
|
||||
return Client.prototype.getChannel.call(client);
|
||||
|
@ -253,3 +249,5 @@ export const InterceptorBuilder = () => {
|
|||
export const InterceptingCall = () => {
|
||||
throw new Error('Not yet implemented');
|
||||
};
|
||||
|
||||
export {GrpcObject} from './make-client';
|
||||
|
|
|
@ -19,9 +19,13 @@ import {ChannelCredentials} from './channel-credentials';
|
|||
import {ChannelOptions} from './channel-options';
|
||||
import {Client} from './client';
|
||||
|
||||
export interface Serialize<T> { (value: T): Buffer; }
|
||||
export interface Serialize<T> {
|
||||
(value: T): Buffer;
|
||||
}
|
||||
|
||||
export interface Deserialize<T> { (bytes: Buffer): T; }
|
||||
export interface Deserialize<T> {
|
||||
(bytes: Buffer): T;
|
||||
}
|
||||
|
||||
export interface MethodDefinition<RequestType, ResponseType> {
|
||||
path: string;
|
||||
|
|
|
@ -0,0 +1,525 @@
|
|||
/*
|
||||
* Copyright 2019 gRPC authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
import {EventEmitter} from 'events';
|
||||
import * as http2 from 'http2';
|
||||
import {Duplex, Readable, Writable} from 'stream';
|
||||
|
||||
import {ServiceError} from './call';
|
||||
import {StatusObject} from './call-stream';
|
||||
import {Status} from './constants';
|
||||
import {Deserialize, Serialize} from './make-client';
|
||||
import {Metadata} from './metadata';
|
||||
import {StreamDecoder} from './stream-decoder';
|
||||
|
||||
function noop(): void {}
|
||||
|
||||
export type PartialServiceError = Partial<ServiceError>;
|
||||
|
||||
type DeadlineUnitIndexSignature = {
|
||||
[name: string]: number
|
||||
};
|
||||
|
||||
const GRPC_ACCEPT_ENCODING_HEADER = 'grpc-accept-encoding';
|
||||
const GRPC_ENCODING_HEADER = 'grpc-encoding';
|
||||
const GRPC_MESSAGE_HEADER = 'grpc-message';
|
||||
const GRPC_STATUS_HEADER = 'grpc-status';
|
||||
const GRPC_TIMEOUT_HEADER = 'grpc-timeout';
|
||||
const DEADLINE_REGEX = /(\d{1,8})\s*([HMSmun])/;
|
||||
const deadlineUnitsToMs: DeadlineUnitIndexSignature = {
|
||||
H: 3600000,
|
||||
M: 60000,
|
||||
S: 1000,
|
||||
m: 1,
|
||||
u: 0.001,
|
||||
n: 0.000001
|
||||
};
|
||||
const defaultResponseHeaders = {
|
||||
// TODO(cjihrig): Remove these encoding headers from the default response
|
||||
// once compression is integrated.
|
||||
[GRPC_ACCEPT_ENCODING_HEADER]: 'identity',
|
||||
[GRPC_ENCODING_HEADER]: 'identity',
|
||||
[http2.constants.HTTP2_HEADER_STATUS]: http2.constants.HTTP_STATUS_OK,
|
||||
[http2.constants.HTTP2_HEADER_CONTENT_TYPE]: 'application/grpc+proto'
|
||||
};
|
||||
const defaultResponseOptions = {
|
||||
waitForTrailers: true
|
||||
} as http2.ServerStreamResponseOptions;
|
||||
|
||||
|
||||
export type ServerSurfaceCall = {
|
||||
cancelled: boolean; getPeer(): string;
|
||||
sendMetadata(responseMetadata: Metadata): void
|
||||
};
|
||||
|
||||
export type ServerUnaryCall<RequestType, ResponseType> =
|
||||
ServerSurfaceCall&{request: RequestType | null};
|
||||
export type ServerReadableStream<RequestType, ResponseType> =
|
||||
ServerSurfaceCall&Readable;
|
||||
export type ServerWritableStream<RequestType, ResponseType> =
|
||||
ServerSurfaceCall&Writable&{request: RequestType | null};
|
||||
export type ServerDuplexStream<RequestType, ResponseType> =
|
||||
ServerSurfaceCall&Duplex;
|
||||
|
||||
export class ServerUnaryCallImpl<RequestType, ResponseType> extends EventEmitter
|
||||
implements ServerUnaryCall<RequestType, ResponseType> {
|
||||
cancelled: boolean;
|
||||
request: RequestType|null;
|
||||
|
||||
constructor(
|
||||
private call: Http2ServerCallStream<RequestType, ResponseType>,
|
||||
public metadata: Metadata) {
|
||||
super();
|
||||
this.cancelled = false;
|
||||
this.request = null;
|
||||
}
|
||||
|
||||
getPeer(): string {
|
||||
throw new Error('not implemented yet');
|
||||
}
|
||||
|
||||
sendMetadata(responseMetadata: Metadata): void {
|
||||
this.call.sendMetadata(responseMetadata);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export class ServerReadableStreamImpl<RequestType, ResponseType> extends
|
||||
Readable implements ServerReadableStream<RequestType, ResponseType> {
|
||||
cancelled: boolean;
|
||||
|
||||
constructor(
|
||||
private call: Http2ServerCallStream<RequestType, ResponseType>,
|
||||
public metadata: Metadata,
|
||||
private _deserialize: Deserialize<RequestType>) {
|
||||
super({objectMode: true});
|
||||
this.cancelled = false;
|
||||
this.call.setupReadable(this);
|
||||
}
|
||||
|
||||
_read(size: number) {
|
||||
this.call.resume();
|
||||
}
|
||||
|
||||
deserialize(input: Buffer): RequestType {
|
||||
return this._deserialize(input);
|
||||
}
|
||||
|
||||
getPeer(): string {
|
||||
throw new Error('not implemented yet');
|
||||
}
|
||||
|
||||
sendMetadata(responseMetadata: Metadata): void {
|
||||
this.call.sendMetadata(responseMetadata);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export class ServerWritableStreamImpl<RequestType, ResponseType> extends
|
||||
Writable implements ServerWritableStream<RequestType, ResponseType> {
|
||||
cancelled: boolean;
|
||||
request: RequestType|null;
|
||||
private trailingMetadata: Metadata;
|
||||
|
||||
constructor(
|
||||
private call: Http2ServerCallStream<RequestType, ResponseType>,
|
||||
public metadata: Metadata, private _serialize: Serialize<ResponseType>) {
|
||||
super({objectMode: true});
|
||||
this.cancelled = false;
|
||||
this.request = null;
|
||||
this.trailingMetadata = new Metadata();
|
||||
|
||||
this.on('error', (err) => {
|
||||
this.call.sendError(err as ServiceError);
|
||||
this.end();
|
||||
});
|
||||
}
|
||||
|
||||
getPeer(): string {
|
||||
throw new Error('not implemented yet');
|
||||
}
|
||||
|
||||
sendMetadata(responseMetadata: Metadata): void {
|
||||
this.call.sendMetadata(responseMetadata);
|
||||
}
|
||||
|
||||
async _write(
|
||||
chunk: ResponseType, encoding: string,
|
||||
// tslint:disable-next-line:no-any
|
||||
callback: (...args: any[]) => void) {
|
||||
try {
|
||||
const response = await this.call.serializeMessage(chunk);
|
||||
|
||||
if (!this.call.write(response)) {
|
||||
this.call.once('drain', callback);
|
||||
return;
|
||||
}
|
||||
} catch (err) {
|
||||
err.code = Status.INTERNAL;
|
||||
this.emit('error', err);
|
||||
}
|
||||
|
||||
callback();
|
||||
}
|
||||
|
||||
_final(callback: Function): void {
|
||||
this.call.sendStatus(
|
||||
{code: Status.OK, details: 'OK', metadata: this.trailingMetadata});
|
||||
callback(null);
|
||||
}
|
||||
|
||||
// tslint:disable-next-line:no-any
|
||||
end(metadata?: any) {
|
||||
if (metadata) {
|
||||
this.trailingMetadata = metadata;
|
||||
}
|
||||
|
||||
super.end();
|
||||
}
|
||||
|
||||
serialize(input: ResponseType): Buffer|null {
|
||||
if (input === null || input === undefined) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this._serialize(input);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export class ServerDuplexStreamImpl<RequestType, ResponseType> extends Duplex
|
||||
implements ServerDuplexStream<RequestType, ResponseType> {
|
||||
cancelled: boolean;
|
||||
|
||||
constructor(
|
||||
private call: Http2ServerCallStream<RequestType, ResponseType>,
|
||||
public metadata: Metadata, private _serialize: Serialize<ResponseType>,
|
||||
private _deserialize: Deserialize<RequestType>) {
|
||||
super({objectMode: true});
|
||||
this.cancelled = false;
|
||||
}
|
||||
|
||||
getPeer(): string {
|
||||
throw new Error('not implemented yet');
|
||||
}
|
||||
|
||||
sendMetadata(responseMetadata: Metadata): void {
|
||||
this.call.sendMetadata(responseMetadata);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Unary response callback signature.
|
||||
export type sendUnaryData<ResponseType> =
|
||||
(error: ServiceError|null, value: ResponseType|null, trailer?: Metadata,
|
||||
flags?: number) => void;
|
||||
|
||||
// User provided handler for unary calls.
|
||||
export type handleUnaryCall<RequestType, ResponseType> =
|
||||
(call: ServerUnaryCall<RequestType, ResponseType>,
|
||||
callback: sendUnaryData<ResponseType>) => void;
|
||||
|
||||
// User provided handler for client streaming calls.
|
||||
export type handleClientStreamingCall<RequestType, ResponseType> =
|
||||
(call: ServerReadableStream<RequestType, ResponseType>,
|
||||
callback: sendUnaryData<ResponseType>) => void;
|
||||
|
||||
// User provided handler for server streaming calls.
|
||||
export type handleServerStreamingCall<RequestType, ResponseType> =
|
||||
(call: ServerWritableStream<RequestType, ResponseType>) => void;
|
||||
|
||||
// User provided handler for bidirectional streaming calls.
|
||||
export type handleBidiStreamingCall<RequestType, ResponseType> =
|
||||
(call: ServerDuplexStream<RequestType, ResponseType>) => void;
|
||||
|
||||
export type HandleCall<RequestType, ResponseType> =
|
||||
handleUnaryCall<RequestType, ResponseType>|
|
||||
handleClientStreamingCall<RequestType, ResponseType>|
|
||||
handleServerStreamingCall<RequestType, ResponseType>|
|
||||
handleBidiStreamingCall<RequestType, ResponseType>;
|
||||
|
||||
export type UnaryHandler<RequestType, ResponseType> = {
|
||||
func: handleUnaryCall<RequestType, ResponseType>;
|
||||
serialize: Serialize<ResponseType>;
|
||||
deserialize: Deserialize<RequestType>;
|
||||
type: HandlerType;
|
||||
};
|
||||
|
||||
export type ClientStreamingHandler<RequestType, ResponseType> = {
|
||||
func: handleClientStreamingCall<RequestType, ResponseType>;
|
||||
serialize: Serialize<ResponseType>;
|
||||
deserialize: Deserialize<RequestType>;
|
||||
type: HandlerType;
|
||||
};
|
||||
|
||||
export type ServerStreamingHandler<RequestType, ResponseType> = {
|
||||
func: handleServerStreamingCall<RequestType, ResponseType>;
|
||||
serialize: Serialize<ResponseType>;
|
||||
deserialize: Deserialize<RequestType>;
|
||||
type: HandlerType;
|
||||
};
|
||||
|
||||
export type BidiStreamingHandler<RequestType, ResponseType> = {
|
||||
func: handleBidiStreamingCall<RequestType, ResponseType>;
|
||||
serialize: Serialize<ResponseType>;
|
||||
deserialize: Deserialize<RequestType>;
|
||||
type: HandlerType;
|
||||
};
|
||||
|
||||
export type Handler<RequestType, ResponseType> =
|
||||
UnaryHandler<RequestType, ResponseType>|
|
||||
ClientStreamingHandler<RequestType, ResponseType>|
|
||||
ServerStreamingHandler<RequestType, ResponseType>|
|
||||
BidiStreamingHandler<RequestType, ResponseType>;
|
||||
|
||||
export type HandlerType = 'bidi'|'clientStream'|'serverStream'|'unary';
|
||||
|
||||
const noopTimer: NodeJS.Timer = setTimeout(() => {}, 0);
|
||||
|
||||
// Internal class that wraps the HTTP2 request.
|
||||
export class Http2ServerCallStream<RequestType, ResponseType> extends
|
||||
EventEmitter {
|
||||
cancelled = false;
|
||||
deadline: NodeJS.Timer = noopTimer;
|
||||
private wantTrailers = false;
|
||||
private metadataSent = false;
|
||||
|
||||
constructor(
|
||||
private stream: http2.ServerHttp2Stream,
|
||||
private handler: Handler<RequestType, ResponseType>) {
|
||||
super();
|
||||
|
||||
this.stream.once('error', (err: ServiceError) => {
|
||||
err.code = Status.INTERNAL;
|
||||
this.sendError(err);
|
||||
});
|
||||
|
||||
this.stream.once('close', () => {
|
||||
if (this.stream.rstCode === http2.constants.NGHTTP2_CANCEL) {
|
||||
this.cancelled = true;
|
||||
this.emit('cancelled', 'cancelled');
|
||||
}
|
||||
});
|
||||
|
||||
this.stream.on('drain', () => {
|
||||
this.emit('drain');
|
||||
});
|
||||
}
|
||||
|
||||
sendMetadata(customMetadata?: Metadata) {
|
||||
if (this.metadataSent) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.metadataSent = true;
|
||||
const custom = customMetadata ? customMetadata.toHttp2Headers() : null;
|
||||
// TODO(cjihrig): Include compression headers.
|
||||
const headers = Object.assign(defaultResponseHeaders, custom);
|
||||
this.stream.respond(headers, defaultResponseOptions);
|
||||
}
|
||||
|
||||
receiveMetadata(headers: http2.IncomingHttpHeaders) {
|
||||
const metadata = Metadata.fromHttp2Headers(headers);
|
||||
|
||||
// TODO(cjihrig): Receive compression metadata.
|
||||
|
||||
const timeoutHeader = metadata.get(GRPC_TIMEOUT_HEADER);
|
||||
|
||||
if (timeoutHeader.length > 0) {
|
||||
const match = timeoutHeader[0].toString().match(DEADLINE_REGEX);
|
||||
|
||||
if (match === null) {
|
||||
const err = new Error('Invalid deadline') as ServiceError;
|
||||
err.code = Status.OUT_OF_RANGE;
|
||||
this.sendError(err);
|
||||
return;
|
||||
}
|
||||
|
||||
const timeout = (+match[1] * deadlineUnitsToMs[match[2]]) | 0;
|
||||
|
||||
this.deadline = setTimeout(handleExpiredDeadline, timeout, this);
|
||||
metadata.remove(GRPC_TIMEOUT_HEADER);
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
receiveUnaryMessage(): Promise<RequestType> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const stream = this.stream;
|
||||
const chunks: Buffer[] = [];
|
||||
let totalLength = 0;
|
||||
|
||||
stream.on('data', (data: Buffer) => {
|
||||
chunks.push(data);
|
||||
totalLength += data.byteLength;
|
||||
});
|
||||
|
||||
stream.once('end', async () => {
|
||||
try {
|
||||
const requestBytes = Buffer.concat(chunks, totalLength);
|
||||
|
||||
resolve(await this.deserializeMessage(requestBytes));
|
||||
} catch (err) {
|
||||
err.code = Status.INTERNAL;
|
||||
this.sendError(err);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
serializeMessage(value: ResponseType) {
|
||||
const messageBuffer = this.handler.serialize(value);
|
||||
|
||||
// TODO(cjihrig): Call compression aware serializeMessage().
|
||||
const byteLength = messageBuffer.byteLength;
|
||||
const output = Buffer.allocUnsafe(byteLength + 5);
|
||||
output.writeUInt8(0, 0);
|
||||
output.writeUInt32BE(byteLength, 1);
|
||||
messageBuffer.copy(output, 5);
|
||||
return output;
|
||||
}
|
||||
|
||||
async deserializeMessage(bytes: Buffer) {
|
||||
// TODO(cjihrig): Call compression aware deserializeMessage().
|
||||
const receivedMessage = bytes.slice(5);
|
||||
|
||||
return this.handler.deserialize(receivedMessage);
|
||||
}
|
||||
|
||||
async sendUnaryMessage(
|
||||
err: ServiceError|null, value: ResponseType|null, metadata?: Metadata,
|
||||
flags?: number) {
|
||||
if (!metadata) {
|
||||
metadata = new Metadata();
|
||||
}
|
||||
|
||||
if (err) {
|
||||
err.metadata = metadata;
|
||||
this.sendError(err);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await this.serializeMessage(value!);
|
||||
|
||||
this.write(response);
|
||||
this.sendStatus({code: Status.OK, details: 'OK', metadata});
|
||||
} catch (err) {
|
||||
err.code = Status.INTERNAL;
|
||||
this.sendError(err);
|
||||
}
|
||||
}
|
||||
|
||||
sendStatus(statusObj: StatusObject) {
|
||||
if (this.cancelled) {
|
||||
return;
|
||||
}
|
||||
|
||||
clearTimeout(this.deadline);
|
||||
|
||||
if (!this.wantTrailers) {
|
||||
this.wantTrailers = true;
|
||||
this.stream.once('wantTrailers', () => {
|
||||
const trailersToSend = Object.assign(
|
||||
{
|
||||
[GRPC_STATUS_HEADER]: statusObj.code,
|
||||
[GRPC_MESSAGE_HEADER]: encodeURI(statusObj.details as string)
|
||||
},
|
||||
statusObj.metadata.toHttp2Headers());
|
||||
|
||||
this.stream.sendTrailers(trailersToSend);
|
||||
});
|
||||
this.sendMetadata();
|
||||
this.stream.end();
|
||||
}
|
||||
}
|
||||
|
||||
sendError(error: ServiceError) {
|
||||
const status: StatusObject = {
|
||||
code: Status.UNKNOWN,
|
||||
details: error.hasOwnProperty('message') ? error.message :
|
||||
'Unknown Error',
|
||||
metadata: error.hasOwnProperty('metadata') ? error.metadata :
|
||||
new Metadata()
|
||||
};
|
||||
|
||||
if (error.hasOwnProperty('code') && Number.isInteger(error.code)) {
|
||||
status.code = error.code;
|
||||
|
||||
if (error.hasOwnProperty('details')) {
|
||||
status.details = error.details;
|
||||
}
|
||||
}
|
||||
|
||||
this.sendStatus(status);
|
||||
}
|
||||
|
||||
write(chunk: Buffer) {
|
||||
if (this.cancelled) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.sendMetadata();
|
||||
return this.stream.write(chunk);
|
||||
}
|
||||
|
||||
resume() {
|
||||
this.stream.resume();
|
||||
}
|
||||
|
||||
setupReadable(readable: ServerReadableStream<RequestType, ResponseType>|
|
||||
ServerDuplexStream<RequestType, ResponseType>) {
|
||||
const decoder = new StreamDecoder();
|
||||
|
||||
this.stream.on('data', async (data: Buffer) => {
|
||||
const message = decoder.write(data);
|
||||
|
||||
if (message === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const deserialized = await this.deserializeMessage(message);
|
||||
|
||||
if (!readable.push(deserialized)) {
|
||||
this.stream.pause();
|
||||
}
|
||||
} catch (err) {
|
||||
err.code = Status.INTERNAL;
|
||||
readable.emit('error', err);
|
||||
}
|
||||
});
|
||||
|
||||
this.stream.once('end', () => {
|
||||
readable.push(null);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// tslint:disable:no-any
|
||||
type UntypedServerCall = Http2ServerCallStream<any, any>;
|
||||
|
||||
function handleExpiredDeadline(call: UntypedServerCall) {
|
||||
const err = new Error('Deadline exceeded') as ServiceError;
|
||||
err.code = Status.DEADLINE_EXCEEDED;
|
||||
|
||||
call.sendError(err);
|
||||
call.cancelled = true;
|
||||
call.emit('cancelled', 'deadline');
|
||||
}
|
|
@ -0,0 +1,108 @@
|
|||
/*
|
||||
* Copyright 2019 gRPC authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
import {SecureServerOptions} from 'http2';
|
||||
|
||||
|
||||
export type KeyCertPair = {
|
||||
private_key: Buffer,
|
||||
cert_chain: Buffer
|
||||
};
|
||||
|
||||
|
||||
export abstract class ServerCredentials {
|
||||
abstract _isSecure(): boolean;
|
||||
abstract _getSettings(): SecureServerOptions|null;
|
||||
|
||||
static createInsecure(): ServerCredentials {
|
||||
return new InsecureServerCredentials();
|
||||
}
|
||||
|
||||
static createSsl(
|
||||
rootCerts: Buffer|null, keyCertPairs: KeyCertPair[],
|
||||
checkClientCertificate = false): ServerCredentials {
|
||||
if (rootCerts !== null && !Buffer.isBuffer(rootCerts)) {
|
||||
throw new TypeError('rootCerts must be null or a Buffer');
|
||||
}
|
||||
|
||||
if (!Array.isArray(keyCertPairs)) {
|
||||
throw new TypeError('keyCertPairs must be an array');
|
||||
}
|
||||
|
||||
if (typeof checkClientCertificate !== 'boolean') {
|
||||
throw new TypeError('checkClientCertificate must be a boolean');
|
||||
}
|
||||
|
||||
const cert = [];
|
||||
const key = [];
|
||||
|
||||
for (let i = 0; i < keyCertPairs.length; i++) {
|
||||
const pair = keyCertPairs[i];
|
||||
|
||||
if (pair === null || typeof pair !== 'object') {
|
||||
throw new TypeError(`keyCertPair[${i}] must be an object`);
|
||||
}
|
||||
|
||||
if (!Buffer.isBuffer(pair.private_key)) {
|
||||
throw new TypeError(`keyCertPair[${i}].private_key must be a Buffer`);
|
||||
}
|
||||
|
||||
if (!Buffer.isBuffer(pair.cert_chain)) {
|
||||
throw new TypeError(`keyCertPair[${i}].cert_chain must be a Buffer`);
|
||||
}
|
||||
|
||||
cert.push(pair.cert_chain);
|
||||
key.push(pair.private_key);
|
||||
}
|
||||
|
||||
return new SecureServerCredentials({
|
||||
ca: rootCerts || undefined,
|
||||
cert,
|
||||
key,
|
||||
requestCert: checkClientCertificate
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class InsecureServerCredentials extends ServerCredentials {
|
||||
_isSecure(): boolean {
|
||||
return false;
|
||||
}
|
||||
|
||||
_getSettings(): null {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class SecureServerCredentials extends ServerCredentials {
|
||||
private options: SecureServerOptions;
|
||||
|
||||
constructor(options: SecureServerOptions) {
|
||||
super();
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
_isSecure(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
_getSettings(): SecureServerOptions {
|
||||
return this.options;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,359 @@
|
|||
/*
|
||||
* Copyright 2019 gRPC authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
import * as http2 from 'http2';
|
||||
import {AddressInfo, ListenOptions} from 'net';
|
||||
import {URL} from 'url';
|
||||
|
||||
import {ServiceError} from './call';
|
||||
import {StatusObject} from './call-stream';
|
||||
import {Status} from './constants';
|
||||
import {Deserialize, Serialize, ServiceDefinition} from './make-client';
|
||||
import {Metadata} from './metadata';
|
||||
import {BidiStreamingHandler, ClientStreamingHandler, HandleCall, Handler, HandlerType, Http2ServerCallStream, PartialServiceError, sendUnaryData, ServerDuplexStream, ServerDuplexStreamImpl, ServerReadableStream, ServerReadableStreamImpl, ServerStreamingHandler, ServerUnaryCall, ServerUnaryCallImpl, ServerWritableStream, ServerWritableStreamImpl, UnaryHandler} from './server-call';
|
||||
import {ServerCredentials} from './server-credentials';
|
||||
|
||||
function noop(): void {}
|
||||
|
||||
const unimplementedStatusResponse: PartialServiceError = {
|
||||
code: Status.UNIMPLEMENTED,
|
||||
details: 'The server does not implement this method',
|
||||
};
|
||||
|
||||
// tslint:disable:no-any
|
||||
type UntypedUnaryHandler = UnaryHandler<any, any>;
|
||||
type UntypedClientStreamingHandler = ClientStreamingHandler<any, any>;
|
||||
type UntypedServerStreamingHandler = ServerStreamingHandler<any, any>;
|
||||
type UntypedBidiStreamingHandler = BidiStreamingHandler<any, any>;
|
||||
type UntypedHandleCall = HandleCall<any, any>;
|
||||
type UntypedHandler = Handler<any, any>;
|
||||
type UntypedServiceImplementation = {
|
||||
[name: string]: UntypedHandleCall
|
||||
};
|
||||
|
||||
const defaultHandler = {
|
||||
unary(call: ServerUnaryCall<any, any>, callback: sendUnaryData<any>): void {
|
||||
callback(unimplementedStatusResponse as ServiceError, null);
|
||||
},
|
||||
clientStream(
|
||||
call: ServerReadableStream<any, any>, callback: sendUnaryData<any>):
|
||||
void {
|
||||
callback(unimplementedStatusResponse as ServiceError, null);
|
||||
},
|
||||
serverStream(call: ServerWritableStream<any, any>): void {
|
||||
call.emit('error', unimplementedStatusResponse);
|
||||
},
|
||||
bidi(call: ServerDuplexStream<any, any>): void {
|
||||
call.emit('error', unimplementedStatusResponse);
|
||||
}
|
||||
};
|
||||
// tslint:enable:no-any
|
||||
|
||||
export class Server {
|
||||
private http2Server: http2.Http2Server|http2.Http2SecureServer|null = null;
|
||||
private handlers: Map<string, UntypedHandler> =
|
||||
new Map<string, UntypedHandler>();
|
||||
private started = false;
|
||||
|
||||
constructor(options?: object) {}
|
||||
|
||||
addProtoService(): void {
|
||||
throw new Error('Not implemented. Use addService() instead');
|
||||
}
|
||||
|
||||
addService(service: ServiceDefinition, implementation: object): void {
|
||||
if (this.started === true) {
|
||||
throw new Error('Can\'t add a service to a started server.');
|
||||
}
|
||||
|
||||
if (service === null || typeof service !== 'object' ||
|
||||
implementation === null || typeof implementation !== 'object') {
|
||||
throw new Error('addService() requires two objects as arguments');
|
||||
}
|
||||
|
||||
const serviceKeys = Object.keys(service);
|
||||
|
||||
if (serviceKeys.length === 0) {
|
||||
throw new Error('Cannot add an empty service to a server');
|
||||
}
|
||||
|
||||
const implMap: UntypedServiceImplementation =
|
||||
implementation as UntypedServiceImplementation;
|
||||
|
||||
serviceKeys.forEach((name) => {
|
||||
const attrs = service[name];
|
||||
let methodType: HandlerType;
|
||||
|
||||
if (attrs.requestStream) {
|
||||
if (attrs.responseStream) {
|
||||
methodType = 'bidi';
|
||||
} else {
|
||||
methodType = 'clientStream';
|
||||
}
|
||||
} else {
|
||||
if (attrs.responseStream) {
|
||||
methodType = 'serverStream';
|
||||
} else {
|
||||
methodType = 'unary';
|
||||
}
|
||||
}
|
||||
|
||||
let implFn = implMap[name];
|
||||
let impl;
|
||||
|
||||
if (implFn === undefined && typeof attrs.originalName === 'string') {
|
||||
implFn = implMap[attrs.originalName];
|
||||
}
|
||||
|
||||
if (implFn !== undefined) {
|
||||
impl = implFn.bind(implementation);
|
||||
} else {
|
||||
impl = defaultHandler[methodType];
|
||||
}
|
||||
|
||||
const success = this.register(
|
||||
attrs.path, impl as UntypedHandleCall, attrs.responseSerialize,
|
||||
attrs.requestDeserialize, methodType);
|
||||
|
||||
if (success === false) {
|
||||
throw new Error(`Method handler for ${attrs.path} already provided.`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
bind(port: string, creds: ServerCredentials): void {
|
||||
throw new Error('Not implemented. Use bindAsync() instead');
|
||||
}
|
||||
|
||||
bindAsync(
|
||||
port: string, creds: ServerCredentials,
|
||||
callback: (error: Error|null, port: number) => void): void {
|
||||
if (this.started === true) {
|
||||
throw new Error('server is already started');
|
||||
}
|
||||
|
||||
if (typeof port !== 'string') {
|
||||
throw new TypeError('port must be a string');
|
||||
}
|
||||
|
||||
if (creds === null || typeof creds !== 'object') {
|
||||
throw new TypeError('creds must be an object');
|
||||
}
|
||||
|
||||
if (typeof callback !== 'function') {
|
||||
throw new TypeError('callback must be a function');
|
||||
}
|
||||
|
||||
const url = new URL(`http://${port}`);
|
||||
const options: ListenOptions = {host: url.hostname, port: +url.port};
|
||||
|
||||
if (creds._isSecure()) {
|
||||
this.http2Server = http2.createSecureServer(
|
||||
creds._getSettings() as http2.SecureServerOptions);
|
||||
} else {
|
||||
this.http2Server = http2.createServer();
|
||||
}
|
||||
|
||||
this._setupHandlers();
|
||||
|
||||
function onError(err: Error): void {
|
||||
callback(err, -1);
|
||||
}
|
||||
|
||||
this.http2Server.once('error', onError);
|
||||
|
||||
this.http2Server.listen(options, () => {
|
||||
const server =
|
||||
this.http2Server as http2.Http2Server | http2.Http2SecureServer;
|
||||
const port = (server.address() as AddressInfo).port;
|
||||
|
||||
server.removeListener('error', onError);
|
||||
callback(null, port);
|
||||
});
|
||||
}
|
||||
|
||||
forceShutdown(): void {
|
||||
throw new Error('Not yet implemented');
|
||||
}
|
||||
|
||||
register<RequestType, ResponseType>(
|
||||
name: string, handler: HandleCall<RequestType, ResponseType>,
|
||||
serialize: Serialize<ResponseType>, deserialize: Deserialize<RequestType>,
|
||||
type: string): boolean {
|
||||
if (this.handlers.has(name)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
this.handlers.set(
|
||||
name, {func: handler, serialize, deserialize, type} as UntypedHandler);
|
||||
return true;
|
||||
}
|
||||
|
||||
start(): void {
|
||||
if (this.http2Server === null || this.http2Server.listening !== true) {
|
||||
throw new Error('server must be bound in order to start');
|
||||
}
|
||||
|
||||
if (this.started === true) {
|
||||
throw new Error('server is already started');
|
||||
}
|
||||
|
||||
this.started = true;
|
||||
}
|
||||
|
||||
tryShutdown(callback: (error?: Error) => void): void {
|
||||
callback = typeof callback === 'function' ? callback : noop;
|
||||
|
||||
if (this.http2Server === null) {
|
||||
callback(new Error('server is not running'));
|
||||
return;
|
||||
}
|
||||
|
||||
this.http2Server.close((err?: Error) => {
|
||||
this.started = false;
|
||||
callback(err);
|
||||
});
|
||||
}
|
||||
|
||||
addHttp2Port(): void {
|
||||
throw new Error('Not yet implemented');
|
||||
}
|
||||
|
||||
private _setupHandlers(): void {
|
||||
if (this.http2Server === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.http2Server.on(
|
||||
'stream',
|
||||
(stream: http2.ServerHttp2Stream,
|
||||
headers: http2.IncomingHttpHeaders) => {
|
||||
if (!this.started) {
|
||||
stream.end();
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const path = headers[http2.constants.HTTP2_HEADER_PATH] as string;
|
||||
const handler = this.handlers.get(path);
|
||||
|
||||
if (handler === undefined) {
|
||||
throw unimplementedStatusResponse;
|
||||
}
|
||||
|
||||
const call = new Http2ServerCallStream(stream, handler);
|
||||
const metadata: Metadata =
|
||||
call.receiveMetadata(headers) as Metadata;
|
||||
|
||||
switch (handler.type) {
|
||||
case 'unary':
|
||||
handleUnary(call, handler as UntypedUnaryHandler, metadata);
|
||||
break;
|
||||
case 'clientStream':
|
||||
handleClientStreaming(
|
||||
call, handler as UntypedClientStreamingHandler, metadata);
|
||||
break;
|
||||
case 'serverStream':
|
||||
handleServerStreaming(
|
||||
call, handler as UntypedServerStreamingHandler, metadata);
|
||||
break;
|
||||
case 'bidi':
|
||||
handleBidiStreaming(
|
||||
call, handler as UntypedBidiStreamingHandler, metadata);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknown handler type: ${handler.type}`);
|
||||
}
|
||||
} catch (err) {
|
||||
const call = new Http2ServerCallStream(stream, null!);
|
||||
err.code = Status.INTERNAL;
|
||||
call.sendError(err);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async function handleUnary<RequestType, ResponseType>(
|
||||
call: Http2ServerCallStream<RequestType, ResponseType>,
|
||||
handler: UnaryHandler<RequestType, ResponseType>,
|
||||
metadata: Metadata): Promise<void> {
|
||||
const emitter =
|
||||
new ServerUnaryCallImpl<RequestType, ResponseType>(call, metadata);
|
||||
const request = await call.receiveUnaryMessage();
|
||||
|
||||
if (request === undefined || call.cancelled) {
|
||||
return;
|
||||
}
|
||||
|
||||
emitter.request = request;
|
||||
handler.func(
|
||||
emitter,
|
||||
(err: ServiceError|null, value: ResponseType|null, trailer?: Metadata,
|
||||
flags?: number) => {
|
||||
call.sendUnaryMessage(err, value, trailer, flags);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
function handleClientStreaming<RequestType, ResponseType>(
|
||||
call: Http2ServerCallStream<RequestType, ResponseType>,
|
||||
handler: ClientStreamingHandler<RequestType, ResponseType>,
|
||||
metadata: Metadata): void {
|
||||
const stream = new ServerReadableStreamImpl<RequestType, ResponseType>(
|
||||
call, metadata, handler.deserialize);
|
||||
|
||||
function respond(
|
||||
err: ServiceError|null, value: ResponseType|null, trailer?: Metadata,
|
||||
flags?: number) {
|
||||
stream.destroy();
|
||||
call.sendUnaryMessage(err, value, trailer, flags);
|
||||
}
|
||||
|
||||
if (call.cancelled) {
|
||||
return;
|
||||
}
|
||||
|
||||
stream.on('error', respond);
|
||||
handler.func(stream, respond);
|
||||
}
|
||||
|
||||
|
||||
async function handleServerStreaming<RequestType, ResponseType>(
|
||||
call: Http2ServerCallStream<RequestType, ResponseType>,
|
||||
handler: ServerStreamingHandler<RequestType, ResponseType>,
|
||||
metadata: Metadata): Promise<void> {
|
||||
const request = await call.receiveUnaryMessage();
|
||||
|
||||
if (request === undefined || call.cancelled) {
|
||||
return;
|
||||
}
|
||||
|
||||
const stream = new ServerWritableStreamImpl<RequestType, ResponseType>(
|
||||
call, metadata, handler.serialize);
|
||||
|
||||
stream.request = request;
|
||||
handler.func(stream);
|
||||
}
|
||||
|
||||
|
||||
function handleBidiStreaming<RequestType, ResponseType>(
|
||||
call: Http2ServerCallStream<RequestType, ResponseType>,
|
||||
handler: BidiStreamingHandler<RequestType, ResponseType>,
|
||||
metadata: Metadata): void {
|
||||
throw new Error('not implemented yet');
|
||||
}
|
|
@ -0,0 +1,98 @@
|
|||
/*
|
||||
* Copyright 2019 gRPC authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
enum ReadState {
|
||||
NO_DATA,
|
||||
READING_SIZE,
|
||||
READING_MESSAGE
|
||||
}
|
||||
|
||||
|
||||
export class StreamDecoder {
|
||||
private readState: ReadState = ReadState.NO_DATA;
|
||||
private readCompressFlag: Buffer = Buffer.alloc(1);
|
||||
private readPartialSize: Buffer = Buffer.alloc(4);
|
||||
private readSizeRemaining = 4;
|
||||
private readMessageSize = 0;
|
||||
private readPartialMessage: Buffer[] = [];
|
||||
private readMessageRemaining = 0;
|
||||
|
||||
|
||||
write(data: Buffer): Buffer|null {
|
||||
let readHead = 0;
|
||||
let toRead: number;
|
||||
|
||||
while (readHead < data.length) {
|
||||
switch (this.readState) {
|
||||
case ReadState.NO_DATA:
|
||||
this.readCompressFlag = data.slice(readHead, readHead + 1);
|
||||
readHead += 1;
|
||||
this.readState = ReadState.READING_SIZE;
|
||||
this.readPartialSize.fill(0);
|
||||
this.readSizeRemaining = 4;
|
||||
this.readMessageSize = 0;
|
||||
this.readMessageRemaining = 0;
|
||||
this.readPartialMessage = [];
|
||||
break;
|
||||
case ReadState.READING_SIZE:
|
||||
toRead = Math.min(data.length - readHead, this.readSizeRemaining);
|
||||
data.copy(
|
||||
this.readPartialSize, 4 - this.readSizeRemaining, readHead,
|
||||
readHead + toRead);
|
||||
this.readSizeRemaining -= toRead;
|
||||
readHead += toRead;
|
||||
// readSizeRemaining >=0 here
|
||||
if (this.readSizeRemaining === 0) {
|
||||
this.readMessageSize = this.readPartialSize.readUInt32BE(0);
|
||||
this.readMessageRemaining = this.readMessageSize;
|
||||
if (this.readMessageRemaining > 0) {
|
||||
this.readState = ReadState.READING_MESSAGE;
|
||||
} else {
|
||||
const message = Buffer.concat(
|
||||
[this.readCompressFlag, this.readPartialSize], 5);
|
||||
|
||||
this.readState = ReadState.NO_DATA;
|
||||
return message;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case ReadState.READING_MESSAGE:
|
||||
toRead = Math.min(data.length - readHead, this.readMessageRemaining);
|
||||
this.readPartialMessage.push(data.slice(readHead, readHead + toRead));
|
||||
this.readMessageRemaining -= toRead;
|
||||
readHead += toRead;
|
||||
// readMessageRemaining >=0 here
|
||||
if (this.readMessageRemaining === 0) {
|
||||
// At this point, we have read a full message
|
||||
const framedMessageBuffers = [
|
||||
this.readCompressFlag, this.readPartialSize
|
||||
].concat(this.readPartialMessage);
|
||||
const framedMessage =
|
||||
Buffer.concat(framedMessageBuffers, this.readMessageSize + 5);
|
||||
|
||||
this.readState = ReadState.NO_DATA;
|
||||
return framedMessage;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new Error('Unexpected read state');
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -15,8 +15,19 @@
|
|||
*
|
||||
*/
|
||||
|
||||
import * as loader from '@grpc/proto-loader';
|
||||
import * as assert from 'assert';
|
||||
|
||||
import {GrpcObject, loadPackageDefinition} from '../src/make-client';
|
||||
|
||||
const protoLoaderOptions = {
|
||||
keepCase: true,
|
||||
longs: String,
|
||||
enums: String,
|
||||
defaults: true,
|
||||
oneofs: true
|
||||
};
|
||||
|
||||
export function mockFunction(): never {
|
||||
throw new Error('Not implemented');
|
||||
}
|
||||
|
@ -100,3 +111,8 @@ export namespace assert2 {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function loadProtoFile(file: string): GrpcObject {
|
||||
const packageDefinition = loader.loadSync(file, protoLoaderOptions);
|
||||
return loadPackageDefinition(packageDefinition);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
/*
|
||||
* Copyright 2019 gRPC authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
message EchoMessage {
|
||||
string value = 1;
|
||||
int32 value2 = 2;
|
||||
}
|
||||
|
||||
service EchoService {
|
||||
rpc Echo (EchoMessage) returns (EchoMessage);
|
||||
|
||||
rpc EchoClientStream (stream EchoMessage) returns (EchoMessage);
|
||||
|
||||
rpc EchoServerStream (EchoMessage) returns (stream EchoMessage);
|
||||
|
||||
rpc EchoBidiStream (stream EchoMessage) returns (stream EchoMessage);
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package math;
|
||||
|
||||
message DivArgs {
|
||||
int64 dividend = 1;
|
||||
int64 divisor = 2;
|
||||
}
|
||||
|
||||
message DivReply {
|
||||
int64 quotient = 1;
|
||||
int64 remainder = 2;
|
||||
}
|
||||
|
||||
message FibArgs {
|
||||
int64 limit = 1;
|
||||
}
|
||||
|
||||
message Num {
|
||||
int64 num = 1;
|
||||
}
|
||||
|
||||
message FibReply {
|
||||
int64 count = 1;
|
||||
}
|
||||
|
||||
service Math {
|
||||
// Div divides DivArgs.dividend by DivArgs.divisor and returns the quotient
|
||||
// and remainder.
|
||||
rpc Div (DivArgs) returns (DivReply) {
|
||||
}
|
||||
|
||||
// DivMany accepts an arbitrary number of division args from the client stream
|
||||
// and sends back the results in the reply stream. The stream continues until
|
||||
// the client closes its end; the server does the same after sending all the
|
||||
// replies. The stream ends immediately if either end aborts.
|
||||
rpc DivMany (stream DivArgs) returns (stream DivReply) {
|
||||
}
|
||||
|
||||
// Fib generates numbers in the Fibonacci sequence. If FibArgs.limit > 0, Fib
|
||||
// generates up to limit numbers; otherwise it continues until the call is
|
||||
// canceled. Unlike Fib above, Fib has no final FibReply.
|
||||
rpc Fib (FibArgs) returns (stream Num) {
|
||||
}
|
||||
|
||||
// Sum sums a stream of numbers, returning the final result once the stream
|
||||
// is closed.
|
||||
rpc Sum (stream Num) returns (Num) {
|
||||
}
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
/*
|
||||
* Copyright 2019 gRPC authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
message Request {
|
||||
bool error = 1;
|
||||
string message = 2;
|
||||
}
|
||||
|
||||
message Response {
|
||||
int32 count = 1;
|
||||
}
|
||||
|
||||
service TestService {
|
||||
rpc Unary (Request) returns (Response) {
|
||||
}
|
||||
|
||||
rpc ClientStream (stream Request) returns (Response) {
|
||||
}
|
||||
|
||||
rpc ServerStream (Request) returns (stream Response) {
|
||||
}
|
||||
|
||||
rpc BidiStream (stream Request) returns (stream Response) {
|
||||
}
|
||||
}
|
|
@ -64,6 +64,7 @@ class ClientHttp2StreamMock extends stream.Duplex implements
|
|||
endAfterHeaders = false;
|
||||
pending = false;
|
||||
rstCode = 0;
|
||||
readonly bufferSize: number = 0;
|
||||
readonly sentHeaders: OutgoingHttpHeaders = {};
|
||||
readonly sentInfoHeaders?: OutgoingHttpHeaders[] = [];
|
||||
readonly sentTrailers?: OutgoingHttpHeaders = undefined;
|
||||
|
|
|
@ -0,0 +1,126 @@
|
|||
/*
|
||||
* Copyright 2019 gRPC authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
// Allow `any` data type for testing runtime type checking.
|
||||
// tslint:disable no-any
|
||||
import * as assert from 'assert';
|
||||
import {readFileSync} from 'fs';
|
||||
import {join} from 'path';
|
||||
import {ServerCredentials} from '../src';
|
||||
|
||||
const ca = readFileSync(join(__dirname, 'fixtures', 'ca.pem'));
|
||||
const key = readFileSync(join(__dirname, 'fixtures', 'server1.key'));
|
||||
const cert = readFileSync(join(__dirname, 'fixtures', 'server1.pem'));
|
||||
|
||||
describe('Server Credentials', () => {
|
||||
describe('createInsecure', () => {
|
||||
it('creates insecure credentials', () => {
|
||||
const creds = ServerCredentials.createInsecure();
|
||||
|
||||
assert.strictEqual(creds._isSecure(), false);
|
||||
assert.strictEqual(creds._getSettings(), null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('createSsl', () => {
|
||||
it('accepts a buffer and array as the first two arguments', () => {
|
||||
const creds = ServerCredentials.createSsl(ca, []);
|
||||
|
||||
assert.strictEqual(creds._isSecure(), true);
|
||||
assert.deepStrictEqual(
|
||||
creds._getSettings(), {ca, cert: [], key: [], requestCert: false});
|
||||
});
|
||||
|
||||
it('accepts a boolean as the third argument', () => {
|
||||
const creds = ServerCredentials.createSsl(ca, [], true);
|
||||
|
||||
assert.strictEqual(creds._isSecure(), true);
|
||||
assert.deepStrictEqual(
|
||||
creds._getSettings(), {ca, cert: [], key: [], requestCert: true});
|
||||
});
|
||||
|
||||
it('accepts an object with two buffers in the second argument', () => {
|
||||
const keyCertPairs = [{private_key: key, cert_chain: cert}];
|
||||
const creds = ServerCredentials.createSsl(null, keyCertPairs);
|
||||
|
||||
assert.strictEqual(creds._isSecure(), true);
|
||||
assert.deepStrictEqual(
|
||||
creds._getSettings(),
|
||||
{ca: undefined, cert: [cert], key: [key], requestCert: false});
|
||||
});
|
||||
|
||||
it('accepts multiple objects in the second argument', () => {
|
||||
const keyCertPairs = [
|
||||
{private_key: key, cert_chain: cert},
|
||||
{private_key: key, cert_chain: cert}
|
||||
];
|
||||
const creds = ServerCredentials.createSsl(null, keyCertPairs, false);
|
||||
|
||||
assert.strictEqual(creds._isSecure(), true);
|
||||
assert.deepStrictEqual(creds._getSettings(), {
|
||||
ca: undefined,
|
||||
cert: [cert, cert],
|
||||
key: [key, key],
|
||||
requestCert: false
|
||||
});
|
||||
});
|
||||
|
||||
it('fails if the second argument is not an Array', () => {
|
||||
assert.throws(() => {
|
||||
ServerCredentials.createSsl(ca, 'test' as any);
|
||||
}, /TypeError: keyCertPairs must be an array/);
|
||||
});
|
||||
|
||||
it('fails if the first argument is a non-Buffer value', () => {
|
||||
assert.throws(() => {
|
||||
ServerCredentials.createSsl('test' as any, []);
|
||||
}, /TypeError: rootCerts must be null or a Buffer/);
|
||||
});
|
||||
|
||||
it('fails if the third argument is a non-boolean value', () => {
|
||||
assert.throws(() => {
|
||||
ServerCredentials.createSsl(ca, [], 'test' as any);
|
||||
}, /TypeError: checkClientCertificate must be a boolean/);
|
||||
});
|
||||
|
||||
it('fails if the array elements are not objects', () => {
|
||||
assert.throws(() => {
|
||||
ServerCredentials.createSsl(ca, ['test'] as any);
|
||||
}, /TypeError: keyCertPair\[0\] must be an object/);
|
||||
|
||||
assert.throws(() => {
|
||||
ServerCredentials.createSsl(ca, [null] as any);
|
||||
}, /TypeError: keyCertPair\[0\] must be an object/);
|
||||
});
|
||||
|
||||
it('fails if the object does not have a Buffer private key', () => {
|
||||
const keyCertPairs: any = [{private_key: 'test', cert_chain: cert}];
|
||||
|
||||
assert.throws(() => {
|
||||
ServerCredentials.createSsl(null, keyCertPairs);
|
||||
}, /TypeError: keyCertPair\[0\].private_key must be a Buffer/);
|
||||
});
|
||||
|
||||
it('fails if the object does not have a Buffer cert chain', () => {
|
||||
const keyCertPairs: any = [{private_key: key, cert_chain: 'test'}];
|
||||
|
||||
assert.throws(() => {
|
||||
ServerCredentials.createSsl(null, keyCertPairs);
|
||||
}, /TypeError: keyCertPair\[0\].cert_chain must be a Buffer/);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,619 @@
|
|||
/*
|
||||
* Copyright 2019 gRPC authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
// Allow `any` data type for testing runtime type checking.
|
||||
// tslint:disable no-any
|
||||
import * as assert from 'assert';
|
||||
import {join} from 'path';
|
||||
|
||||
import * as grpc from '../src';
|
||||
import {ServiceError} from '../src/call';
|
||||
import {ServiceClient, ServiceClientConstructor} from '../src/make-client';
|
||||
import {Server} from '../src/server';
|
||||
import {sendUnaryData, ServerDuplexStream, ServerReadableStream, ServerUnaryCall, ServerWritableStream} from '../src/server-call';
|
||||
|
||||
import {loadProtoFile} from './common';
|
||||
|
||||
const protoFile = join(__dirname, 'fixtures', 'test_service.proto');
|
||||
const testServiceDef = loadProtoFile(protoFile);
|
||||
const testServiceClient =
|
||||
testServiceDef.TestService as ServiceClientConstructor;
|
||||
const clientInsecureCreds = grpc.credentials.createInsecure();
|
||||
const serverInsecureCreds = grpc.ServerCredentials.createInsecure();
|
||||
|
||||
|
||||
describe('Client malformed response handling', () => {
|
||||
let server: Server;
|
||||
let client: ServiceClient;
|
||||
const badArg = Buffer.from([0xFF]);
|
||||
|
||||
before((done) => {
|
||||
const malformedTestService = {
|
||||
unary: {
|
||||
path: '/TestService/Unary',
|
||||
requestStream: false,
|
||||
responseStream: false,
|
||||
requestDeserialize: identity,
|
||||
responseSerialize: identity
|
||||
},
|
||||
clientStream: {
|
||||
path: '/TestService/ClientStream',
|
||||
requestStream: true,
|
||||
responseStream: false,
|
||||
requestDeserialize: identity,
|
||||
responseSerialize: identity
|
||||
},
|
||||
serverStream: {
|
||||
path: '/TestService/ServerStream',
|
||||
requestStream: false,
|
||||
responseStream: true,
|
||||
requestDeserialize: identity,
|
||||
responseSerialize: identity
|
||||
},
|
||||
bidiStream: {
|
||||
path: '/TestService/BidiStream',
|
||||
requestStream: true,
|
||||
responseStream: true,
|
||||
requestDeserialize: identity,
|
||||
responseSerialize: identity
|
||||
}
|
||||
} as any;
|
||||
|
||||
server = new Server();
|
||||
|
||||
server.addService(malformedTestService, {
|
||||
unary(call: ServerUnaryCall<any, any>, cb: sendUnaryData<any>) {
|
||||
cb(null, badArg);
|
||||
},
|
||||
|
||||
clientStream(
|
||||
stream: ServerReadableStream<any, any>, cb: sendUnaryData<any>) {
|
||||
stream.on('data', noop);
|
||||
stream.on('end', () => {
|
||||
cb(null, badArg);
|
||||
});
|
||||
},
|
||||
|
||||
serverStream(stream: ServerWritableStream<any, any>) {
|
||||
stream.write(badArg);
|
||||
stream.end();
|
||||
},
|
||||
|
||||
bidiStream(stream: ServerDuplexStream<any, any>) {
|
||||
stream.on('data', () => {
|
||||
// Ignore requests
|
||||
stream.write(badArg);
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
stream.end();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.bindAsync('localhost:0', serverInsecureCreds, (err, port) => {
|
||||
assert.ifError(err);
|
||||
client = new testServiceClient(`localhost:${port}`, clientInsecureCreds);
|
||||
server.start();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
after((done) => {
|
||||
client.close();
|
||||
server.tryShutdown(done);
|
||||
});
|
||||
|
||||
it('should get an INTERNAL status with a unary call', (done) => {
|
||||
client.unary({}, (err: ServiceError, data: any) => {
|
||||
assert(err);
|
||||
assert.strictEqual(err.code, grpc.status.INTERNAL);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should get an INTERNAL status with a client stream call', (done) => {
|
||||
const call = client.clientStream((err: ServiceError, data: any) => {
|
||||
assert(err);
|
||||
assert.strictEqual(err.code, grpc.status.INTERNAL);
|
||||
done();
|
||||
});
|
||||
|
||||
call.write({});
|
||||
call.end();
|
||||
});
|
||||
|
||||
it('should get an INTERNAL status with a server stream call', (done) => {
|
||||
const call = client.serverStream({});
|
||||
|
||||
call.on('data', noop);
|
||||
call.on('error', (err: ServiceError) => {
|
||||
assert(err);
|
||||
assert.strictEqual(err.code, grpc.status.INTERNAL);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Server serialization failure handling', () => {
|
||||
let client: ServiceClient;
|
||||
let server: Server;
|
||||
|
||||
before((done) => {
|
||||
function serializeFail(obj: any) {
|
||||
throw new Error('Serialization failed');
|
||||
}
|
||||
|
||||
const malformedTestService = {
|
||||
unary: {
|
||||
path: '/TestService/Unary',
|
||||
requestStream: false,
|
||||
responseStream: false,
|
||||
requestDeserialize: identity,
|
||||
responseSerialize: serializeFail
|
||||
},
|
||||
clientStream: {
|
||||
path: '/TestService/ClientStream',
|
||||
requestStream: true,
|
||||
responseStream: false,
|
||||
requestDeserialize: identity,
|
||||
responseSerialize: serializeFail
|
||||
},
|
||||
serverStream: {
|
||||
path: '/TestService/ServerStream',
|
||||
requestStream: false,
|
||||
responseStream: true,
|
||||
requestDeserialize: identity,
|
||||
responseSerialize: serializeFail
|
||||
},
|
||||
bidiStream: {
|
||||
path: '/TestService/BidiStream',
|
||||
requestStream: true,
|
||||
responseStream: true,
|
||||
requestDeserialize: identity,
|
||||
responseSerialize: serializeFail
|
||||
}
|
||||
};
|
||||
|
||||
server = new Server();
|
||||
server.addService(malformedTestService as any, {
|
||||
unary(call: ServerUnaryCall<any, any>, cb: sendUnaryData<any>) {
|
||||
cb(null, {});
|
||||
},
|
||||
|
||||
clientStream(
|
||||
stream: ServerReadableStream<any, any>, cb: sendUnaryData<any>) {
|
||||
stream.on('data', noop);
|
||||
stream.on('end', () => {
|
||||
cb(null, {});
|
||||
});
|
||||
},
|
||||
|
||||
serverStream(stream: ServerWritableStream<any, any>) {
|
||||
stream.write({});
|
||||
stream.end();
|
||||
},
|
||||
|
||||
bidiStream(stream: ServerDuplexStream<any, any>) {
|
||||
stream.on('data', () => {
|
||||
// Ignore requests
|
||||
stream.write({});
|
||||
});
|
||||
stream.on('end', () => {
|
||||
stream.end();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.bindAsync('localhost:0', serverInsecureCreds, (err, port) => {
|
||||
assert.ifError(err);
|
||||
client = new testServiceClient(`localhost:${port}`, clientInsecureCreds);
|
||||
server.start();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
after((done) => {
|
||||
client.close();
|
||||
server.tryShutdown(done);
|
||||
});
|
||||
|
||||
it('should get an INTERNAL status with a unary call', (done) => {
|
||||
client.unary({}, (err: ServiceError, data: any) => {
|
||||
assert(err);
|
||||
assert.strictEqual(err.code, grpc.status.INTERNAL);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should get an INTERNAL status with a client stream call', (done) => {
|
||||
const call = client.clientStream((err: ServiceError, data: any) => {
|
||||
assert(err);
|
||||
assert.strictEqual(err.code, grpc.status.INTERNAL);
|
||||
done();
|
||||
});
|
||||
|
||||
call.write({});
|
||||
call.end();
|
||||
});
|
||||
|
||||
it('should get an INTERNAL status with a server stream call', (done) => {
|
||||
const call = client.serverStream({});
|
||||
|
||||
call.on('data', noop);
|
||||
call.on('error', (err: ServiceError) => {
|
||||
assert(err);
|
||||
assert.strictEqual(err.code, grpc.status.INTERNAL);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
describe('Other conditions', () => {
|
||||
let client: ServiceClient;
|
||||
let server: Server;
|
||||
let port: number;
|
||||
|
||||
before((done) => {
|
||||
const trailerMetadata = new grpc.Metadata();
|
||||
|
||||
server = new Server();
|
||||
trailerMetadata.add('trailer-present', 'yes');
|
||||
|
||||
server.addService(testServiceClient.service, {
|
||||
unary(call: ServerUnaryCall<any, any>, cb: sendUnaryData<any>) {
|
||||
const req = call.request;
|
||||
|
||||
if (req.error) {
|
||||
const details = req.message || 'Requested error';
|
||||
|
||||
cb({code: grpc.status.UNKNOWN, details} as ServiceError, null,
|
||||
trailerMetadata);
|
||||
} else {
|
||||
cb(null, {count: 1}, trailerMetadata);
|
||||
}
|
||||
},
|
||||
|
||||
clientStream(
|
||||
stream: ServerReadableStream<any, any>, cb: sendUnaryData<any>) {
|
||||
let count = 0;
|
||||
let errored = false;
|
||||
|
||||
stream.on('data', (data: any) => {
|
||||
if (data.error) {
|
||||
const message = data.message || 'Requested error';
|
||||
errored = true;
|
||||
cb(new Error(message) as ServiceError, null, trailerMetadata);
|
||||
} else {
|
||||
count++;
|
||||
}
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
if (!errored) {
|
||||
cb(null, {count}, trailerMetadata);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
serverStream(stream: ServerWritableStream<any, any>) {
|
||||
const req = stream.request;
|
||||
|
||||
if (req.error) {
|
||||
stream.emit('error', {
|
||||
code: grpc.status.UNKNOWN,
|
||||
details: req.message || 'Requested error',
|
||||
metadata: trailerMetadata
|
||||
});
|
||||
} else {
|
||||
for (let i = 0; i < 5; i++) {
|
||||
stream.write({count: i});
|
||||
}
|
||||
|
||||
stream.end(trailerMetadata);
|
||||
}
|
||||
},
|
||||
|
||||
bidiStream(stream: ServerDuplexStream<any, any>) {
|
||||
let count = 0;
|
||||
stream.on('data', (data: any) => {
|
||||
if (data.error) {
|
||||
const message = data.message || 'Requested error';
|
||||
const err = new Error(message) as ServiceError;
|
||||
|
||||
err.metadata = trailerMetadata.clone();
|
||||
err.metadata.add('count', '' + count);
|
||||
stream.emit('error', err);
|
||||
} else {
|
||||
stream.write({count});
|
||||
count++;
|
||||
}
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
stream.end(trailerMetadata);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.bindAsync('localhost:0', serverInsecureCreds, (err, _port) => {
|
||||
assert.ifError(err);
|
||||
port = _port;
|
||||
client = new testServiceClient(`localhost:${port}`, clientInsecureCreds);
|
||||
server.start();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
after((done) => {
|
||||
client.close();
|
||||
server.tryShutdown(done);
|
||||
});
|
||||
|
||||
describe('Server receiving bad input', () => {
|
||||
let misbehavingClient: ServiceClient;
|
||||
const badArg = Buffer.from([0xFF]);
|
||||
|
||||
before(() => {
|
||||
const testServiceAttrs = {
|
||||
unary: {
|
||||
path: '/TestService/Unary',
|
||||
requestStream: false,
|
||||
responseStream: false,
|
||||
requestSerialize: identity,
|
||||
responseDeserialize: identity
|
||||
},
|
||||
clientStream: {
|
||||
path: '/TestService/ClientStream',
|
||||
requestStream: true,
|
||||
responseStream: false,
|
||||
requestSerialize: identity,
|
||||
responseDeserialize: identity
|
||||
},
|
||||
serverStream: {
|
||||
path: '/TestService/ServerStream',
|
||||
requestStream: false,
|
||||
responseStream: true,
|
||||
requestSerialize: identity,
|
||||
responseDeserialize: identity
|
||||
},
|
||||
bidiStream: {
|
||||
path: '/TestService/BidiStream',
|
||||
requestStream: true,
|
||||
responseStream: true,
|
||||
requestSerialize: identity,
|
||||
responseDeserialize: identity
|
||||
}
|
||||
} as any;
|
||||
|
||||
const client =
|
||||
grpc.makeGenericClientConstructor(testServiceAttrs, 'TestService');
|
||||
|
||||
misbehavingClient = new client(`localhost:${port}`, clientInsecureCreds);
|
||||
});
|
||||
|
||||
after(() => {
|
||||
misbehavingClient.close();
|
||||
});
|
||||
|
||||
it('should respond correctly to a unary call', (done) => {
|
||||
misbehavingClient.unary(badArg, (err: ServiceError, data: any) => {
|
||||
assert(err);
|
||||
assert.strictEqual(err.code, grpc.status.INTERNAL);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should respond correctly to a client stream', (done) => {
|
||||
const call =
|
||||
misbehavingClient.clientStream((err: ServiceError, data: any) => {
|
||||
assert(err);
|
||||
assert.strictEqual(err.code, grpc.status.INTERNAL);
|
||||
done();
|
||||
});
|
||||
|
||||
call.write(badArg);
|
||||
call.end();
|
||||
});
|
||||
|
||||
it('should respond correctly to a server stream', (done) => {
|
||||
const call = misbehavingClient.serverStream(badArg);
|
||||
|
||||
call.on('data', (data: any) => {
|
||||
assert.fail(data);
|
||||
});
|
||||
|
||||
call.on('error', (err: ServiceError) => {
|
||||
assert(err);
|
||||
assert.strictEqual(err.code, grpc.status.INTERNAL);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Trailing metadata', () => {
|
||||
it('should be present when a unary call succeeds', (done) => {
|
||||
let count = 0;
|
||||
const call =
|
||||
client.unary({error: false}, (err: ServiceError, data: any) => {
|
||||
assert.ifError(err);
|
||||
|
||||
count++;
|
||||
if (count === 2) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
call.on('status', (status: grpc.StatusObject) => {
|
||||
assert.deepStrictEqual(status.metadata.get('trailer-present'), ['yes']);
|
||||
|
||||
count++;
|
||||
if (count === 2) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should be present when a unary call fails', (done) => {
|
||||
let count = 0;
|
||||
const call =
|
||||
client.unary({error: true}, (err: ServiceError, data: any) => {
|
||||
assert(err);
|
||||
|
||||
count++;
|
||||
if (count === 2) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
call.on('status', (status: grpc.StatusObject) => {
|
||||
assert.deepStrictEqual(status.metadata.get('trailer-present'), ['yes']);
|
||||
|
||||
count++;
|
||||
if (count === 2) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should be present when a client stream call succeeds', (done) => {
|
||||
let count = 0;
|
||||
const call = client.clientStream((err: ServiceError, data: any) => {
|
||||
assert.ifError(err);
|
||||
|
||||
count++;
|
||||
if (count === 2) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
call.write({error: false});
|
||||
call.write({error: false});
|
||||
call.end();
|
||||
|
||||
call.on('status', (status: grpc.StatusObject) => {
|
||||
assert.deepStrictEqual(status.metadata.get('trailer-present'), ['yes']);
|
||||
|
||||
count++;
|
||||
if (count === 2) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should be present when a client stream call fails', (done) => {
|
||||
let count = 0;
|
||||
const call = client.clientStream((err: ServiceError, data: any) => {
|
||||
assert(err);
|
||||
|
||||
count++;
|
||||
if (count === 2) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
call.write({error: false});
|
||||
call.write({error: true});
|
||||
call.end();
|
||||
|
||||
call.on('status', (status: grpc.StatusObject) => {
|
||||
assert.deepStrictEqual(status.metadata.get('trailer-present'), ['yes']);
|
||||
|
||||
count++;
|
||||
if (count === 2) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should be present when a server stream call succeeds', (done) => {
|
||||
const call = client.serverStream({error: false});
|
||||
|
||||
call.on('data', noop);
|
||||
call.on('status', (status: grpc.StatusObject) => {
|
||||
assert.strictEqual(status.code, grpc.status.OK);
|
||||
assert.deepStrictEqual(status.metadata.get('trailer-present'), ['yes']);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should be present when a server stream call fails', (done) => {
|
||||
const call = client.serverStream({error: true});
|
||||
|
||||
call.on('data', noop);
|
||||
call.on('error', (error: ServiceError) => {
|
||||
assert.deepStrictEqual(error.metadata.get('trailer-present'), ['yes']);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error object should contain the status', () => {
|
||||
it('for a unary call', (done) => {
|
||||
client.unary({error: true}, (err: ServiceError, data: any) => {
|
||||
assert(err);
|
||||
assert.strictEqual(err.code, grpc.status.UNKNOWN);
|
||||
assert.strictEqual(err.details, 'Requested error');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('for a client stream call', (done) => {
|
||||
const call = client.clientStream((err: ServiceError, data: any) => {
|
||||
assert(err);
|
||||
assert.strictEqual(err.code, grpc.status.UNKNOWN);
|
||||
assert.strictEqual(err.details, 'Requested error');
|
||||
done();
|
||||
});
|
||||
|
||||
call.write({error: false});
|
||||
call.write({error: true});
|
||||
call.end();
|
||||
});
|
||||
|
||||
it('for a server stream call', (done) => {
|
||||
const call = client.serverStream({error: true});
|
||||
|
||||
call.on('data', noop);
|
||||
call.on('error', (error: ServiceError) => {
|
||||
assert.strictEqual(error.code, grpc.status.UNKNOWN);
|
||||
assert.strictEqual(error.details, 'Requested error');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('for a UTF-8 error message', (done) => {
|
||||
client.unary(
|
||||
{error: true, message: '測試字符串'},
|
||||
(err: ServiceError, data: any) => {
|
||||
assert(err);
|
||||
assert.strictEqual(err.code, grpc.status.UNKNOWN);
|
||||
assert.strictEqual(err.details, '測試字符串');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
function identity(arg: any): any {
|
||||
return arg;
|
||||
}
|
||||
|
||||
|
||||
function noop(): void {}
|
|
@ -0,0 +1,399 @@
|
|||
/*
|
||||
* Copyright 2019 gRPC authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
// Allow `any` data type for testing runtime type checking.
|
||||
// tslint:disable no-any
|
||||
import * as assert from 'assert';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as grpc from '../src';
|
||||
import {ServerCredentials} from '../src';
|
||||
import {ServiceError} from '../src/call';
|
||||
import {ServiceClient, ServiceClientConstructor} from '../src/make-client';
|
||||
import {Server} from '../src/server';
|
||||
import {sendUnaryData, ServerDuplexStream, ServerReadableStream, ServerUnaryCall, ServerWritableStream} from '../src/server-call';
|
||||
|
||||
import {loadProtoFile} from './common';
|
||||
|
||||
const ca = fs.readFileSync(path.join(__dirname, 'fixtures', 'ca.pem'));
|
||||
const key = fs.readFileSync(path.join(__dirname, 'fixtures', 'server1.key'));
|
||||
const cert = fs.readFileSync(path.join(__dirname, 'fixtures', 'server1.pem'));
|
||||
function noop(): void {}
|
||||
|
||||
|
||||
describe('Server', () => {
|
||||
describe('constructor', () => {
|
||||
it('should work with no arguments', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
new Server(); // tslint:disable-line:no-unused-expression
|
||||
});
|
||||
});
|
||||
|
||||
it('should work with an empty object argument', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
new Server({}); // tslint:disable-line:no-unused-expression
|
||||
});
|
||||
});
|
||||
|
||||
it('should be an instance of Server', () => {
|
||||
const server = new Server();
|
||||
|
||||
assert(server instanceof Server);
|
||||
});
|
||||
});
|
||||
|
||||
describe('bindAsync', () => {
|
||||
it('binds with insecure credentials', (done) => {
|
||||
const server = new Server();
|
||||
|
||||
server.bindAsync(
|
||||
'localhost:0', ServerCredentials.createInsecure(), (err, port) => {
|
||||
assert.ifError(err);
|
||||
assert(typeof port === 'number' && port > 0);
|
||||
server.tryShutdown(done);
|
||||
});
|
||||
});
|
||||
|
||||
it('binds with secure credentials', (done) => {
|
||||
const server = new Server();
|
||||
const creds = ServerCredentials.createSsl(
|
||||
ca, [{private_key: key, cert_chain: cert}], true);
|
||||
|
||||
server.bindAsync('localhost:0', creds, (err, port) => {
|
||||
assert.ifError(err);
|
||||
assert(typeof port === 'number' && port > 0);
|
||||
server.tryShutdown(done);
|
||||
});
|
||||
});
|
||||
|
||||
it('throws if bind is called after the server is started', () => {
|
||||
const server = new Server();
|
||||
|
||||
server.bindAsync(
|
||||
'localhost:0', ServerCredentials.createInsecure(), (err, port) => {
|
||||
assert.ifError(err);
|
||||
server.start();
|
||||
assert.throws(() => {
|
||||
server.bindAsync(
|
||||
'localhost:0', ServerCredentials.createInsecure(), noop);
|
||||
}, /server is already started/);
|
||||
});
|
||||
});
|
||||
|
||||
it('throws on invalid inputs', () => {
|
||||
const server = new Server();
|
||||
|
||||
assert.throws(() => {
|
||||
server.bindAsync(null as any, ServerCredentials.createInsecure(), noop);
|
||||
}, /port must be a string/);
|
||||
|
||||
assert.throws(() => {
|
||||
server.bindAsync('localhost:0', null as any, noop);
|
||||
}, /creds must be an object/);
|
||||
|
||||
assert.throws(() => {
|
||||
server.bindAsync(
|
||||
'localhost:0', ServerCredentials.createInsecure(), null as any);
|
||||
}, /callback must be a function/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('tryShutdown', () => {
|
||||
it('calls back with an error if the server is not running', (done) => {
|
||||
const server = new Server();
|
||||
|
||||
server.tryShutdown((err) => {
|
||||
assert(err !== undefined && err.message === 'server is not running');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('start', () => {
|
||||
let server: Server;
|
||||
|
||||
beforeEach((done) => {
|
||||
server = new Server();
|
||||
server.bindAsync('localhost:0', ServerCredentials.createInsecure(), done);
|
||||
});
|
||||
|
||||
afterEach((done) => {
|
||||
server.tryShutdown(done);
|
||||
});
|
||||
|
||||
it('starts without error', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
server.start();
|
||||
});
|
||||
});
|
||||
|
||||
it('throws if started twice', () => {
|
||||
server.start();
|
||||
assert.throws(() => {
|
||||
server.start();
|
||||
}, /server is already started/);
|
||||
});
|
||||
|
||||
it('throws if the server is not bound', () => {
|
||||
const server = new Server();
|
||||
|
||||
assert.throws(() => {
|
||||
server.start();
|
||||
}, /server must be bound in order to start/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('addService', () => {
|
||||
const mathProtoFile = path.join(__dirname, 'fixtures', 'math.proto');
|
||||
const mathClient = (loadProtoFile(mathProtoFile).math as any).Math;
|
||||
const mathServiceAttrs = mathClient.service;
|
||||
const dummyImpls = {div() {}, divMany() {}, fib() {}, sum() {}};
|
||||
const altDummyImpls = {Div() {}, DivMany() {}, Fib() {}, Sum() {}};
|
||||
|
||||
it('succeeds with a single service', () => {
|
||||
const server = new Server();
|
||||
|
||||
assert.doesNotThrow(() => {
|
||||
server.addService(mathServiceAttrs, dummyImpls);
|
||||
});
|
||||
});
|
||||
|
||||
it('fails to add an empty service', () => {
|
||||
const server = new Server();
|
||||
|
||||
assert.throws(() => {
|
||||
server.addService({}, dummyImpls);
|
||||
}, /Cannot add an empty service to a server/);
|
||||
});
|
||||
|
||||
it('fails with conflicting method names', () => {
|
||||
const server = new Server();
|
||||
|
||||
server.addService(mathServiceAttrs, dummyImpls);
|
||||
assert.throws(() => {
|
||||
server.addService(mathServiceAttrs, dummyImpls);
|
||||
}, /Method handler for .+ already provided/);
|
||||
});
|
||||
|
||||
it('supports method names as originally written', () => {
|
||||
const server = new Server();
|
||||
|
||||
assert.doesNotThrow(() => {
|
||||
server.addService(mathServiceAttrs, altDummyImpls);
|
||||
});
|
||||
});
|
||||
|
||||
it('fails if the server has been started', (done) => {
|
||||
const server = new Server();
|
||||
|
||||
server.bindAsync(
|
||||
'localhost:0', ServerCredentials.createInsecure(), (err, port) => {
|
||||
assert.ifError(err);
|
||||
server.start();
|
||||
assert.throws(() => {
|
||||
server.addService(mathServiceAttrs, dummyImpls);
|
||||
}, /Can't add a service to a started server\./);
|
||||
server.tryShutdown(done);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('throws when unimplemented methods are called', () => {
|
||||
const server = new Server();
|
||||
|
||||
assert.throws(() => {
|
||||
server.addProtoService();
|
||||
}, /Not implemented. Use addService\(\) instead/);
|
||||
|
||||
assert.throws(() => {
|
||||
server.forceShutdown();
|
||||
}, /Not yet implemented/);
|
||||
|
||||
assert.throws(() => {
|
||||
server.addHttp2Port();
|
||||
}, /Not yet implemented/);
|
||||
|
||||
assert.throws(() => {
|
||||
server.bind('localhost:0', ServerCredentials.createInsecure());
|
||||
}, /Not implemented. Use bindAsync\(\) instead/);
|
||||
});
|
||||
|
||||
describe('Default handlers', () => {
|
||||
let server: Server;
|
||||
let client: ServiceClient;
|
||||
|
||||
const mathProtoFile = path.join(__dirname, 'fixtures', 'math.proto');
|
||||
const mathClient = (loadProtoFile(mathProtoFile).math as any).Math;
|
||||
const mathServiceAttrs = mathClient.service;
|
||||
|
||||
beforeEach((done) => {
|
||||
server = new Server();
|
||||
server.addService(mathServiceAttrs, {});
|
||||
server.bindAsync(
|
||||
'localhost:0', ServerCredentials.createInsecure(), (err, port) => {
|
||||
assert.ifError(err);
|
||||
client = new mathClient(
|
||||
`localhost:${port}`, grpc.credentials.createInsecure());
|
||||
server.start();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should respond to a unary call with UNIMPLEMENTED', (done) => {
|
||||
client.div(
|
||||
{divisor: 4, dividend: 3}, (error: ServiceError, response: any) => {
|
||||
assert(error);
|
||||
assert.strictEqual(error.code, grpc.status.UNIMPLEMENTED);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should respond to a client stream with UNIMPLEMENTED', (done) => {
|
||||
const call = client.sum((error: ServiceError, response: any) => {
|
||||
assert(error);
|
||||
assert.strictEqual(error.code, grpc.status.UNIMPLEMENTED);
|
||||
done();
|
||||
});
|
||||
|
||||
call.end();
|
||||
});
|
||||
|
||||
it('should respond to a server stream with UNIMPLEMENTED', (done) => {
|
||||
const call = client.fib({limit: 5});
|
||||
|
||||
call.on('data', (value: any) => {
|
||||
assert.fail('No messages expected');
|
||||
});
|
||||
|
||||
call.on('error', (err: ServiceError) => {
|
||||
assert(err);
|
||||
assert.strictEqual(err.code, grpc.status.UNIMPLEMENTED);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Echo service', () => {
|
||||
let server: Server;
|
||||
let client: ServiceClient;
|
||||
|
||||
before((done) => {
|
||||
const protoFile = path.join(__dirname, 'fixtures', 'echo_service.proto');
|
||||
const echoService =
|
||||
loadProtoFile(protoFile).EchoService as ServiceClientConstructor;
|
||||
|
||||
server = new Server();
|
||||
server.addService(echoService.service, {
|
||||
echo(call: ServerUnaryCall<any, any>, callback: sendUnaryData<any>) {
|
||||
callback(null, call.request);
|
||||
}
|
||||
});
|
||||
|
||||
server.bindAsync(
|
||||
'localhost:0', ServerCredentials.createInsecure(), (err, port) => {
|
||||
assert.ifError(err);
|
||||
client = new echoService(
|
||||
`localhost:${port}`, grpc.credentials.createInsecure());
|
||||
server.start();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
after((done) => {
|
||||
client.close();
|
||||
server.tryShutdown(done);
|
||||
});
|
||||
|
||||
it('should echo the recieved message directly', (done) => {
|
||||
client.echo(
|
||||
{value: 'test value', value2: 3},
|
||||
(error: ServiceError, response: any) => {
|
||||
assert.ifError(error);
|
||||
assert.deepStrictEqual(response, {value: 'test value', value2: 3});
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Generic client and server', () => {
|
||||
function toString(val: any) {
|
||||
return val.toString();
|
||||
}
|
||||
|
||||
function toBuffer(str: string) {
|
||||
return Buffer.from(str);
|
||||
}
|
||||
|
||||
function capitalize(str: string) {
|
||||
return str.charAt(0).toUpperCase() + str.slice(1);
|
||||
}
|
||||
|
||||
const stringServiceAttrs = {
|
||||
capitalize: {
|
||||
path: '/string/capitalize',
|
||||
requestStream: false,
|
||||
responseStream: false,
|
||||
requestSerialize: toBuffer,
|
||||
requestDeserialize: toString,
|
||||
responseSerialize: toBuffer,
|
||||
responseDeserialize: toString
|
||||
}
|
||||
};
|
||||
|
||||
describe('String client and server', () => {
|
||||
let client: ServiceClient;
|
||||
let server: Server;
|
||||
|
||||
before((done) => {
|
||||
server = new Server();
|
||||
|
||||
server.addService(stringServiceAttrs as any, {
|
||||
capitalize(
|
||||
call: ServerUnaryCall<any, any>, callback: sendUnaryData<any>) {
|
||||
callback(null, capitalize(call.request));
|
||||
}
|
||||
});
|
||||
|
||||
server.bindAsync(
|
||||
'localhost:0', ServerCredentials.createInsecure(), (err, port) => {
|
||||
assert.ifError(err);
|
||||
server.start();
|
||||
const clientConstr = grpc.makeGenericClientConstructor(
|
||||
stringServiceAttrs as any,
|
||||
'unused_but_lets_appease_typescript_anyway');
|
||||
client = new clientConstr(
|
||||
`localhost:${port}`, grpc.credentials.createInsecure());
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
after((done) => {
|
||||
client.close();
|
||||
server.tryShutdown(done);
|
||||
});
|
||||
|
||||
it('Should respond with a capitalized string', (done) => {
|
||||
client.capitalize('abc', (err: ServiceError, response: string) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response, 'Abc');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -91,7 +91,10 @@
|
|||
'GPR_BACKWARDS_COMPATIBILITY_MODE',
|
||||
'GRPC_ARES=0',
|
||||
'GRPC_UV',
|
||||
'GRPC_NODE_VERSION="1.19.0-pre1"'
|
||||
'GRPC_NODE_VERSION="1.20.3"'
|
||||
],
|
||||
'defines!': [
|
||||
'OPENSSL_THREADS'
|
||||
],
|
||||
'conditions': [
|
||||
['grpc_gcov=="true"', {
|
||||
|
@ -725,7 +728,6 @@
|
|||
'deps/grpc/src/core/lib/iomgr/udp_server.cc',
|
||||
'deps/grpc/src/core/lib/iomgr/unix_sockets_posix.cc',
|
||||
'deps/grpc/src/core/lib/iomgr/unix_sockets_posix_noop.cc',
|
||||
'deps/grpc/src/core/lib/iomgr/wakeup_fd_cv.cc',
|
||||
'deps/grpc/src/core/lib/iomgr/wakeup_fd_eventfd.cc',
|
||||
'deps/grpc/src/core/lib/iomgr/wakeup_fd_nospecial.cc',
|
||||
'deps/grpc/src/core/lib/iomgr/wakeup_fd_pipe.cc',
|
||||
|
@ -765,7 +767,6 @@
|
|||
'deps/grpc/src/core/lib/transport/metadata.cc',
|
||||
'deps/grpc/src/core/lib/transport/metadata_batch.cc',
|
||||
'deps/grpc/src/core/lib/transport/pid_controller.cc',
|
||||
'deps/grpc/src/core/lib/transport/service_config.cc',
|
||||
'deps/grpc/src/core/lib/transport/static_metadata.cc',
|
||||
'deps/grpc/src/core/lib/transport/status_conversion.cc',
|
||||
'deps/grpc/src/core/lib/transport/status_metadata.cc',
|
||||
|
@ -821,6 +822,7 @@
|
|||
'deps/grpc/src/core/lib/security/credentials/plugin/plugin_credentials.cc',
|
||||
'deps/grpc/src/core/lib/security/credentials/ssl/ssl_credentials.cc',
|
||||
'deps/grpc/src/core/lib/security/credentials/tls/grpc_tls_credentials_options.cc',
|
||||
'deps/grpc/src/core/lib/security/credentials/tls/spiffe_credentials.cc',
|
||||
'deps/grpc/src/core/lib/security/security_connector/alts/alts_security_connector.cc',
|
||||
'deps/grpc/src/core/lib/security/security_connector/fake/fake_security_connector.cc',
|
||||
'deps/grpc/src/core/lib/security/security_connector/load_system_roots_fallback.cc',
|
||||
|
@ -829,6 +831,7 @@
|
|||
'deps/grpc/src/core/lib/security/security_connector/security_connector.cc',
|
||||
'deps/grpc/src/core/lib/security/security_connector/ssl/ssl_security_connector.cc',
|
||||
'deps/grpc/src/core/lib/security/security_connector/ssl_utils.cc',
|
||||
'deps/grpc/src/core/lib/security/security_connector/tls/spiffe_security_connector.cc',
|
||||
'deps/grpc/src/core/lib/security/transport/client_auth_filter.cc',
|
||||
'deps/grpc/src/core/lib/security/transport/secure_endpoint.cc',
|
||||
'deps/grpc/src/core/lib/security/transport/security_handshaker.cc',
|
||||
|
@ -893,12 +896,13 @@
|
|||
'deps/grpc/src/core/ext/filters/client_channel/parse_address.cc',
|
||||
'deps/grpc/src/core/ext/filters/client_channel/proxy_mapper.cc',
|
||||
'deps/grpc/src/core/ext/filters/client_channel/proxy_mapper_registry.cc',
|
||||
'deps/grpc/src/core/ext/filters/client_channel/request_routing.cc',
|
||||
'deps/grpc/src/core/ext/filters/client_channel/resolver.cc',
|
||||
'deps/grpc/src/core/ext/filters/client_channel/resolver_registry.cc',
|
||||
'deps/grpc/src/core/ext/filters/client_channel/resolver_result_parsing.cc',
|
||||
'deps/grpc/src/core/ext/filters/client_channel/resolving_lb_policy.cc',
|
||||
'deps/grpc/src/core/ext/filters/client_channel/retry_throttle.cc',
|
||||
'deps/grpc/src/core/ext/filters/client_channel/server_address.cc',
|
||||
'deps/grpc/src/core/ext/filters/client_channel/service_config.cc',
|
||||
'deps/grpc/src/core/ext/filters/client_channel/subchannel.cc',
|
||||
'deps/grpc/src/core/ext/filters/client_channel/subchannel_pool_interface.cc',
|
||||
'deps/grpc/src/core/ext/filters/deadline/deadline_filter.cc',
|
||||
|
@ -963,7 +967,8 @@
|
|||
],
|
||||
'cflags': [
|
||||
'-pthread',
|
||||
'-Wno-error=deprecated-declarations'
|
||||
'-Wno-error=deprecated-declarations',
|
||||
'-Wno-cast-function-type'
|
||||
],
|
||||
"conditions": [
|
||||
['OS=="win" or runtime=="electron"', {
|
||||
|
|
|
@ -1,2 +1,3 @@
|
|||
settings:
|
||||
'#': It's possible to have node_version here as a key to override the core's version.
|
||||
node_version: 1.20.3
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 801266f7dc84dd7fc7d0f663a4d28762e7cfb1aa
|
||||
Subproject commit 7741e806a213cba63c96234f16d712a8aa101a49
|
|
@ -289,7 +289,7 @@ NAN_METHOD(Channel::GetConnectivityState) {
|
|||
return Nan::ThrowError(
|
||||
"Cannot call getConnectivityState on a closed Channel");
|
||||
}
|
||||
int try_to_connect = (int)info[0]->Equals(Nan::True());
|
||||
int try_to_connect = (int)info[0]->StrictEquals(Nan::True());
|
||||
info.GetReturnValue().Set(grpc_channel_check_connectivity_state(
|
||||
channel->wrapped_channel, try_to_connect));
|
||||
}
|
||||
|
|
|
@ -194,7 +194,7 @@ NAN_METHOD(ChannelCredentials::CreateSsl) {
|
|||
if (!info[3]->IsObject()) {
|
||||
return Nan::ThrowTypeError("createSsl's fourth argument must be an object");
|
||||
}
|
||||
Local<Object> object = info[3]->ToObject();
|
||||
Local<Object> object = Nan::To<Object>(info[3]).ToLocalChecked();
|
||||
|
||||
Local<Value> checkServerIdentityValue = Nan::Get(object,
|
||||
Nan::New("checkServerIdentity").ToLocalChecked()).ToLocalChecked();
|
||||
|
|
|
@ -66,7 +66,7 @@ void ServerCredentials::Init(Local<Object> exports) {
|
|||
Local<FunctionTemplate> tpl = Nan::New<FunctionTemplate>(New);
|
||||
tpl->SetClassName(Nan::New("ServerCredentials").ToLocalChecked());
|
||||
tpl->InstanceTemplate()->SetInternalFieldCount(1);
|
||||
Local<Function> ctr = tpl->GetFunction();
|
||||
Local<Function> ctr = Nan::GetFunction(tpl).ToLocalChecked();
|
||||
Nan::Set(
|
||||
ctr, Nan::New("createSsl").ToLocalChecked(),
|
||||
Nan::GetFunction(Nan::New<FunctionTemplate>(CreateSsl)).ToLocalChecked());
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright 2017 gRPC authors.
|
||||
* Copyright 2019 gRPC authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,18 +15,13 @@
|
|||
*
|
||||
*/
|
||||
|
||||
const _gulp = require('gulp');
|
||||
const help = require('gulp-help');
|
||||
|
||||
// gulp-help monkeypatches tasks to have an additional description parameter
|
||||
const gulp = help(_gulp);
|
||||
|
||||
const jsdoc = require('gulp-jsdoc3');
|
||||
const jshint = require('gulp-jshint');
|
||||
const mocha = require('gulp-mocha');
|
||||
const execa = require('execa');
|
||||
const path = require('path');
|
||||
const del = require('del');
|
||||
import * as gulp from 'gulp';
|
||||
import * as jsdoc from 'gulp-jsdoc3';
|
||||
import * as jshint from 'gulp-jshint';
|
||||
import * as mocha from 'gulp-mocha';
|
||||
import * as execa from 'execa';
|
||||
import * as path from 'path';
|
||||
import * as del from 'del';
|
||||
|
||||
const nativeCoreDir = __dirname;
|
||||
const srcDir = path.resolve(nativeCoreDir, 'src');
|
||||
|
@ -35,44 +30,54 @@ const testDir = path.resolve(nativeCoreDir, 'test');
|
|||
const pkg = require('./package');
|
||||
const jshintConfig = pkg.jshintConfig;
|
||||
|
||||
gulp.task('clean', 'Delete generated files', () => {
|
||||
return del([path.resolve(nativeCoreDir, 'build'),
|
||||
path.resolve(nativeCoreDir, 'ext/node')]);
|
||||
});
|
||||
const clean = () => del([path.resolve(nativeCoreDir, 'build'),
|
||||
path.resolve(nativeCoreDir, 'ext/node')]);
|
||||
|
||||
gulp.task('clean.all', 'Delete all files created by tasks',
|
||||
['clean']);
|
||||
const cleanAll = gulp.parallel(clean);
|
||||
|
||||
gulp.task('install', 'Install native core dependencies', () => {
|
||||
const install = () => {
|
||||
return execa('npm', ['install', '--build-from-source', '--unsafe-perm'],
|
||||
{cwd: nativeCoreDir, stdio: 'inherit'});
|
||||
});
|
||||
};
|
||||
|
||||
gulp.task('install.windows', 'Install native core dependencies for MS Windows', () => {
|
||||
const installWindows = () => {
|
||||
return execa('npm', ['install', '--build-from-source'],
|
||||
{cwd: nativeCoreDir, stdio: 'inherit'}).catch(() =>
|
||||
del(path.resolve(process.env.USERPROFILE, '.node-gyp', process.versions.node, 'include/node/openssl'), { force: true }).then(() =>
|
||||
execa('npm', ['install', '--build-from-source'],
|
||||
{cwd: nativeCoreDir, stdio: 'inherit'})
|
||||
))
|
||||
});
|
||||
));
|
||||
};
|
||||
|
||||
gulp.task('lint', 'Emits linting errors', () => {
|
||||
const lint = () => {
|
||||
return gulp.src([`${nativeCoreDir}/index.js`, `${srcDir}/*.js`, `${testDir}/*.js`])
|
||||
.pipe(jshint(pkg.jshintConfig))
|
||||
.pipe(jshint.reporter('default'));
|
||||
});
|
||||
};
|
||||
|
||||
gulp.task('build', 'Build native package', () => {
|
||||
const build = () => {
|
||||
return execa('npm', ['run', 'build'], {cwd: nativeCoreDir, stdio: 'inherit'});
|
||||
});
|
||||
};
|
||||
|
||||
gulp.task('test', 'Run all tests', ['build'], () => {
|
||||
const runTests = () => {
|
||||
return gulp.src(`${testDir}/*.js`).pipe(mocha({timeout: 5000, reporter: 'mocha-jenkins-reporter'}));
|
||||
});
|
||||
}
|
||||
|
||||
gulp.task('doc.gen', 'Generate docs', (cb) => {
|
||||
const test = gulp.series(build, runTests);
|
||||
|
||||
const docGen = (cb) => {
|
||||
var config = require('./jsdoc_conf.json');
|
||||
gulp.src([`${nativeCoreDir}/README.md`, `${nativeCoreDir}/index.js`, `${srcDir}/*.js`], {read: false})
|
||||
.pipe(jsdoc(config, cb));
|
||||
});
|
||||
return gulp.src([`${nativeCoreDir}/README.md`, `${nativeCoreDir}/index.js`, `${srcDir}/*.js`], {read: false})
|
||||
.pipe(jsdoc(config, cb));
|
||||
};
|
||||
|
||||
export {
|
||||
clean,
|
||||
cleanAll,
|
||||
install,
|
||||
installWindows,
|
||||
lint,
|
||||
build,
|
||||
test,
|
||||
docGen
|
||||
};
|
|
@ -963,18 +963,6 @@ declare module "grpc" {
|
|||
* instance.
|
||||
*/
|
||||
compose(callCredentials: CallCredentials): ChannelCredentials;
|
||||
|
||||
/**
|
||||
* Gets the set of per-call credentials associated with this instance.
|
||||
*/
|
||||
getCallCredentials(): CallCredentials;
|
||||
|
||||
/**
|
||||
* Gets a SecureContext object generated from input parameters if this
|
||||
* instance was created with createSsl, or null if this instance was created
|
||||
* with createInsecure.
|
||||
*/
|
||||
getSecureContext(): SecureContext | null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1545,7 +1533,7 @@ declare module "grpc" {
|
|||
TRANSIENT_FAILURE = 3,
|
||||
SHUTDOWN = 4
|
||||
}
|
||||
|
||||
|
||||
export class Channel {
|
||||
/**
|
||||
* This constructor API is almost identical to the Client constructor,
|
||||
|
@ -1583,8 +1571,8 @@ declare module "grpc" {
|
|||
watchConnectivityState(currentState: connectivityState, deadline: Date|number, callback: (error?: Error) => void): void;
|
||||
/**
|
||||
* Create a call object. Call is an opaque type that is used by the Client
|
||||
* and Server classes. This function is called by the gRPC library when
|
||||
* starting a request. Implementers should return an instance of Call that
|
||||
* and Server classes. This function is called by the gRPC library when
|
||||
* starting a request. Implementers should return an instance of Call that
|
||||
* is returned from calling createCall on an instance of the provided
|
||||
* Channel class.
|
||||
* @param method The full method string to request.
|
||||
|
@ -1595,5 +1583,5 @@ declare module "grpc" {
|
|||
* that indicates what information to propagate from parentCall.
|
||||
*/
|
||||
createCall(method: string, deadline: Date|number, host: string|null, parentCall: Call|null, propagateFlags: number|null): Call;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "grpc",
|
||||
"version": "1.19.0-pre1",
|
||||
"version": "1.20.3",
|
||||
"author": "Google Inc.",
|
||||
"description": "gRPC Library for Node",
|
||||
"homepage": "https://grpc.io/",
|
||||
|
@ -29,10 +29,11 @@
|
|||
"node-pre-gyp"
|
||||
],
|
||||
"dependencies": {
|
||||
"@types/protobufjs": "^5.0.31",
|
||||
"lodash.camelcase": "^4.3.0",
|
||||
"lodash.clone": "^4.5.0",
|
||||
"nan": "^2.0.0",
|
||||
"node-pre-gyp": "^0.12.0",
|
||||
"nan": "^2.13.2",
|
||||
"node-pre-gyp": "^0.13.0",
|
||||
"protobufjs": "^5.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
|
|
@ -376,12 +376,14 @@ function Client(address, credentials, options) {
|
|||
}
|
||||
self.$interceptors = options.interceptors || [];
|
||||
self.$interceptor_providers = options.interceptor_providers || [];
|
||||
Object.keys(self.$method_definitions).forEach(method_name => {
|
||||
const method_definition = self.$method_definitions[method_name];
|
||||
self[method_name].interceptors = client_interceptors
|
||||
.resolveInterceptorProviders(self.$interceptor_providers, method_definition)
|
||||
.concat(self.$interceptors);
|
||||
});
|
||||
if (self.$method_definitions) {
|
||||
Object.keys(self.$method_definitions).forEach(method_name => {
|
||||
const method_definition = self.$method_definitions[method_name];
|
||||
self[method_name].interceptors = client_interceptors
|
||||
.resolveInterceptorProviders(self.$interceptor_providers, method_definition)
|
||||
.concat(self.$interceptors);
|
||||
});
|
||||
}
|
||||
|
||||
this.$callInvocationTransformer = options.callInvocationTransformer;
|
||||
|
||||
|
|
|
@ -54,6 +54,7 @@ Original error: ${e.message}`;
|
|||
error.code = e.code;
|
||||
throw error;
|
||||
} else {
|
||||
e.message = `Failed to load ${binding_path}. ${e.message}`;
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -85,6 +85,9 @@
|
|||
'GRPC_UV',
|
||||
'GRPC_NODE_VERSION="${settings.get('node_version', settings.version)}"'
|
||||
],
|
||||
'defines!': [
|
||||
'OPENSSL_THREADS'
|
||||
],
|
||||
'conditions': [
|
||||
['grpc_gcov=="true"', {
|
||||
% for arg, prop in [('CPPFLAGS', 'cflags'), ('DEFINES', 'defines'), ('LDFLAGS', 'ldflags')]:
|
||||
|
@ -312,7 +315,8 @@
|
|||
],
|
||||
'cflags': [
|
||||
'-pthread',
|
||||
'-Wno-error=deprecated-declarations'
|
||||
'-Wno-error=deprecated-declarations',
|
||||
'-Wno-cast-function-type'
|
||||
],
|
||||
"conditions": [
|
||||
['OS=="win" or runtime=="electron"', {
|
||||
|
|
|
@ -31,10 +31,11 @@
|
|||
"node-pre-gyp"
|
||||
],
|
||||
"dependencies": {
|
||||
"@types/protobufjs": "^5.0.31",
|
||||
"lodash.camelcase": "^4.3.0",
|
||||
"lodash.clone": "^4.5.0",
|
||||
"nan": "^2.0.0",
|
||||
"node-pre-gyp": "^0.12.0",
|
||||
"nan": "^2.13.2",
|
||||
"node-pre-gyp": "^0.13.0",
|
||||
"protobufjs": "^5.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
|
|
@ -220,6 +220,13 @@ describe('Client constructor building', function() {
|
|||
assert.strictEqual(Client.prototype.add, Client.prototype.Add);
|
||||
});
|
||||
});
|
||||
describe('Generic client', function() {
|
||||
it('Should construct without error', function() {
|
||||
assert.doesNotThrow(() => {
|
||||
const client = new grpc.Client('localhost: 50051', grpc.credentials.createInsecure());
|
||||
});
|
||||
});
|
||||
});
|
||||
describe('waitForClientReady', function() {
|
||||
var server;
|
||||
var port;
|
||||
|
@ -314,7 +321,7 @@ describe('Echo service', function() {
|
|||
});
|
||||
});
|
||||
});
|
||||
describe('Generic client and server', function() {
|
||||
describe('Non-protobuf client and server', function() {
|
||||
function toString(val) {
|
||||
return val.toString();
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
|
||||
set arch_list=ia32 x64
|
||||
|
||||
set electron_versions=1.0.0 1.1.0 1.2.0 1.3.0 1.4.0 1.5.0 1.6.0 1.7.0 1.8.0 2.0.0 3.0.0 4.0.0
|
||||
set electron_versions=1.0.0 1.1.0 1.2.0 1.3.0 1.4.0 1.5.0 1.6.0 1.7.0 1.8.0 2.0.0 3.0.0 3.1.0 4.1.0 5.0.0
|
||||
|
||||
set PATH=%PATH%;C:\Program Files\nodejs\;%APPDATA%\npm
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
set -ex
|
||||
|
||||
arch_list=( ia32 x64 )
|
||||
electron_versions=( 1.0.0 1.1.0 1.2.0 1.3.0 1.4.0 1.5.0 1.6.0 1.7.0 1.8.0 2.0.0 3.0.0 4.0.0 )
|
||||
electron_versions=( 1.0.0 1.1.0 1.2.0 1.3.0 1.4.0 1.5.0 1.6.0 1.7.0 1.8.0 2.0.0 3.0.0 3.1.0 4.1.0 5.0.0 )
|
||||
|
||||
umask 022
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
|
||||
set arch_list=ia32 x64
|
||||
|
||||
set node_versions=4.0.0 5.0.0 6.0.0 7.0.0 8.0.0 9.0.0 10.0.0 11.0.0
|
||||
set node_versions=4.0.0 5.0.0 6.0.0 7.0.0 8.0.0 9.0.0 10.0.0 11.0.0 12.0.0
|
||||
|
||||
set PATH=%PATH%;C:\Program Files\nodejs\;%APPDATA%\npm
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
set -ex
|
||||
|
||||
arch_list=( ia32 x64 )
|
||||
node_versions=( 4.0.0 5.0.0 6.0.0 7.0.0 8.0.0 9.0.0 10.0.0 11.0.0 )
|
||||
node_versions=( 4.0.0 5.0.0 6.0.0 7.0.0 8.0.0 9.0.0 10.0.0 11.0.0 12.0.0 )
|
||||
|
||||
while true ; do
|
||||
case $1 in
|
||||
|
|
|
@ -26,7 +26,7 @@ mkdir -p "${ARTIFACTS_OUT}"
|
|||
|
||||
npm update
|
||||
|
||||
node_versions=( 4.0.0 5.0.0 6.0.0 7.0.0 8.0.0 9.0.0 10.0.0 11.0.0 )
|
||||
node_versions=( 4.0.0 5.0.0 6.0.0 7.0.0 8.0.0 9.0.0 10.0.0 11.0.0 12.0.0 )
|
||||
|
||||
for version in ${node_versions[@]}
|
||||
do
|
||||
|
|
|
@ -13,6 +13,8 @@ add_subdirectory(${PROTOBUF_ROOT_DIR}/cmake deps/protobuf)
|
|||
set(protobuf_BUILD_TESTS OFF CACHE BOOL "Build protobuf tests")
|
||||
set(protobuf_WITH_ZLIB OFF CACHE BOOL "Build protobuf with zlib.")
|
||||
|
||||
set(CMAKE_EXE_LINKER_FLAGS "-static-libstdc++")
|
||||
|
||||
add_executable(grpc_node_plugin
|
||||
src/node_generator.cc
|
||||
src/node_plugin.cc
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "grpc-tools",
|
||||
"version": "1.7.2",
|
||||
"version": "1.7.3",
|
||||
"author": "Google Inc.",
|
||||
"description": "Tools for developing with gRPC on Node.js",
|
||||
"homepage": "https://grpc.io/",
|
||||
|
|
|
@ -15,8 +15,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
import * as _gulp from 'gulp';
|
||||
import * as help from 'gulp-help';
|
||||
import * as gulp from 'gulp';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as mocha from 'gulp-mocha';
|
||||
|
@ -24,9 +23,6 @@ import * as path from 'path';
|
|||
import * as execa from 'execa';
|
||||
import * as semver from 'semver';
|
||||
|
||||
// gulp-help monkeypatches tasks to have an additional description parameter
|
||||
const gulp = help(_gulp);
|
||||
|
||||
Error.stackTraceLimit = Infinity;
|
||||
|
||||
const protojsDir = __dirname;
|
||||
|
@ -40,30 +36,29 @@ const execNpmVerb = (verb: string, ...args: string[]) =>
|
|||
execa('npm', [verb, ...args], {cwd: protojsDir, stdio: 'inherit'});
|
||||
const execNpmCommand = execNpmVerb.bind(null, 'run');
|
||||
|
||||
gulp.task('install', 'Install native core dependencies', () =>
|
||||
execNpmVerb('install', '--unsafe-perm'));
|
||||
const install = () => execNpmVerb('install', '--unsafe-perm');
|
||||
|
||||
/**
|
||||
* Runs tslint on files in src/, with linting rules defined in tslint.json.
|
||||
*/
|
||||
gulp.task('lint', 'Emits linting errors found in src/ and test/.', () =>
|
||||
execNpmCommand('check'));
|
||||
const lint = () => execNpmCommand('check');
|
||||
|
||||
gulp.task('clean', 'Deletes transpiled code.', ['install'],
|
||||
() => execNpmCommand('clean'));
|
||||
const cleanFiles = () => execNpmCommand('clean');
|
||||
|
||||
gulp.task('clean.all', 'Deletes all files added by targets', ['clean']);
|
||||
const clean = gulp.series(install, cleanFiles);
|
||||
|
||||
const cleanAll = gulp.parallel(clean);
|
||||
|
||||
/**
|
||||
* Transpiles TypeScript files in src/ and test/ to JavaScript according to the settings
|
||||
* found in tsconfig.json.
|
||||
*/
|
||||
gulp.task('compile', 'Transpiles src/ and test/.', () => execNpmCommand('compile'));
|
||||
const compile = () => execNpmCommand('compile');
|
||||
|
||||
/**
|
||||
* Transpiles src/ and test/, and then runs all tests.
|
||||
*/
|
||||
gulp.task('test', 'Runs all tests.', () => {
|
||||
const runTests = () => {
|
||||
if (semver.satisfies(process.version, ">=6")) {
|
||||
return gulp.src(`${outDir}/test/**/*.js`)
|
||||
.pipe(mocha({reporter: 'mocha-jenkins-reporter',
|
||||
|
@ -72,4 +67,15 @@ gulp.task('test', 'Runs all tests.', () => {
|
|||
console.log(`Skipping proto-loader tests for Node ${process.version}`);
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const test = gulp.series(install, runTests);
|
||||
|
||||
export {
|
||||
install,
|
||||
lint,
|
||||
clean,
|
||||
cleanAll,
|
||||
compile,
|
||||
test
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@grpc/proto-loader",
|
||||
"version": "0.4.0",
|
||||
"version": "0.5.0",
|
||||
"author": "Google Inc.",
|
||||
"contributors": [
|
||||
{
|
||||
|
|
|
@ -15,33 +15,34 @@
|
|||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
import * as Protobuf from 'protobufjs';
|
||||
import * as descriptor from 'protobufjs/ext/descriptor';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as Protobuf from 'protobufjs';
|
||||
import * as descriptor from 'protobufjs/ext/descriptor';
|
||||
|
||||
import camelCase = require('lodash.camelcase');
|
||||
|
||||
declare module 'protobufjs' {
|
||||
interface Type {
|
||||
toDescriptor(protoVersion: string): Protobuf.Message<descriptor.IDescriptorProto> & descriptor.IDescriptorProto;
|
||||
toDescriptor(protoVersion: string): Protobuf
|
||||
.Message<descriptor.IDescriptorProto>&descriptor.IDescriptorProto;
|
||||
}
|
||||
|
||||
interface Root {
|
||||
toDescriptor(protoVersion: string): Protobuf.Message<descriptor.IFileDescriptorSet> & descriptor.IFileDescriptorSet;
|
||||
toDescriptor(protoVersion: string): Protobuf
|
||||
.Message<descriptor.IFileDescriptorSet>&descriptor.IFileDescriptorSet;
|
||||
}
|
||||
|
||||
interface Enum {
|
||||
toDescriptor(protoVersion: string): Protobuf.Message<descriptor.IEnumDescriptorProto> & descriptor.IEnumDescriptorProto;
|
||||
toDescriptor(protoVersion: string):
|
||||
Protobuf.Message<descriptor.IEnumDescriptorProto>&
|
||||
descriptor.IEnumDescriptorProto;
|
||||
}
|
||||
}
|
||||
|
||||
export interface Serialize<T> {
|
||||
(value: T): Buffer;
|
||||
}
|
||||
export interface Serialize<T> { (value: T): Buffer; }
|
||||
|
||||
export interface Deserialize<T> {
|
||||
(bytes: Buffer): T;
|
||||
}
|
||||
export interface Deserialize<T> { (bytes: Buffer): T; }
|
||||
|
||||
export interface ProtobufTypeDefinition {
|
||||
format: string;
|
||||
|
@ -74,13 +75,12 @@ export interface ServiceDefinition {
|
|||
[index: string]: MethodDefinition<object, object>;
|
||||
}
|
||||
|
||||
export type AnyDefinition = ServiceDefinition | MessageTypeDefinition | EnumTypeDefinition;
|
||||
export type AnyDefinition =
|
||||
ServiceDefinition|MessageTypeDefinition|EnumTypeDefinition;
|
||||
|
||||
export interface PackageDefinition {
|
||||
[index: string]: AnyDefinition;
|
||||
}
|
||||
export interface PackageDefinition { [index: string]: AnyDefinition; }
|
||||
|
||||
export type Options = Protobuf.IParseOptions & Protobuf.IConversionOptions & {
|
||||
export type Options = Protobuf.IParseOptions&Protobuf.IConversionOptions&{
|
||||
includeDirs?: string[];
|
||||
};
|
||||
|
||||
|
@ -101,31 +101,41 @@ function joinName(baseName: string, name: string): string {
|
|||
}
|
||||
}
|
||||
|
||||
type HandledReflectionObject = Protobuf.Service | Protobuf.Type | Protobuf.Enum;
|
||||
type HandledReflectionObject = Protobuf.Service|Protobuf.Type|Protobuf.Enum;
|
||||
|
||||
function isHandledReflectionObject(obj: Protobuf.ReflectionObject): obj is HandledReflectionObject {
|
||||
return obj instanceof Protobuf.Service || obj instanceof Protobuf.Type || obj instanceof Protobuf.Enum;
|
||||
function isHandledReflectionObject(obj: Protobuf.ReflectionObject):
|
||||
obj is HandledReflectionObject {
|
||||
return obj instanceof Protobuf.Service || obj instanceof Protobuf.Type ||
|
||||
obj instanceof Protobuf.Enum;
|
||||
}
|
||||
|
||||
function isNamespaceBase(obj: Protobuf.ReflectionObject): obj is Protobuf.NamespaceBase {
|
||||
function isNamespaceBase(obj: Protobuf.ReflectionObject):
|
||||
obj is Protobuf.NamespaceBase {
|
||||
return obj instanceof Protobuf.Namespace || obj instanceof Protobuf.Root;
|
||||
}
|
||||
|
||||
function getAllHandledReflectionObjects(obj: Protobuf.ReflectionObject, parentName: string): Array<[string, HandledReflectionObject]> {
|
||||
function getAllHandledReflectionObjects(
|
||||
obj: Protobuf.ReflectionObject,
|
||||
parentName: string): Array<[string, HandledReflectionObject]> {
|
||||
const objName = joinName(parentName, obj.name);
|
||||
if (isHandledReflectionObject(obj)) {
|
||||
return [[objName, obj]];
|
||||
} else {
|
||||
if (isNamespaceBase(obj) && typeof obj.nested !== undefined) {
|
||||
return Object.keys(obj.nested!).map((name) => {
|
||||
return getAllHandledReflectionObjects(obj.nested![name], objName);
|
||||
}).reduce((accumulator, currentValue) => accumulator.concat(currentValue), []);
|
||||
return Object.keys(obj.nested!)
|
||||
.map((name) => {
|
||||
return getAllHandledReflectionObjects(obj.nested![name], objName);
|
||||
})
|
||||
.reduce(
|
||||
(accumulator, currentValue) => accumulator.concat(currentValue),
|
||||
[]);
|
||||
}
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
function createDeserializer(cls: Protobuf.Type, options: Options): Deserialize<object> {
|
||||
function createDeserializer(
|
||||
cls: Protobuf.Type, options: Options): Deserialize<object> {
|
||||
return function deserialize(argBuf: Buffer): object {
|
||||
return cls.toObject(cls.decode(argBuf), options);
|
||||
};
|
||||
|
@ -138,7 +148,9 @@ function createSerializer(cls: Protobuf.Type): Serialize<object> {
|
|||
};
|
||||
}
|
||||
|
||||
function createMethodDefinition(method: Protobuf.Method, serviceName: string, options: Options): MethodDefinition<object, object> {
|
||||
function createMethodDefinition(
|
||||
method: Protobuf.Method, serviceName: string,
|
||||
options: Options): MethodDefinition<object, object> {
|
||||
/* This is only ever called after the corresponding root.resolveAll(), so we
|
||||
* can assume that the resolved request and response types are non-null */
|
||||
const requestType: Protobuf.Type = method.resolvedRequestType!;
|
||||
|
@ -158,7 +170,9 @@ function createMethodDefinition(method: Protobuf.Method, serviceName: string, op
|
|||
};
|
||||
}
|
||||
|
||||
function createServiceDefinition(service: Protobuf.Service, name: string, options: Options): ServiceDefinition {
|
||||
function createServiceDefinition(
|
||||
service: Protobuf.Service, name: string,
|
||||
options: Options): ServiceDefinition {
|
||||
const def: ServiceDefinition = {};
|
||||
for (const method of service.methodsArray) {
|
||||
def[method.name] = createMethodDefinition(method, name, options);
|
||||
|
@ -166,29 +180,37 @@ function createServiceDefinition(service: Protobuf.Service, name: string, option
|
|||
return def;
|
||||
}
|
||||
|
||||
const fileDescriptorCache: Map<Protobuf.Root, Buffer[]> = new Map<Protobuf.Root, Buffer[]>();
|
||||
const fileDescriptorCache: Map<Protobuf.Root, Buffer[]> =
|
||||
new Map<Protobuf.Root, Buffer[]>();
|
||||
function getFileDescriptors(root: Protobuf.Root): Buffer[] {
|
||||
if (fileDescriptorCache.has(root)) {
|
||||
return fileDescriptorCache.get(root)!;
|
||||
} else {
|
||||
const descriptorList: descriptor.IFileDescriptorProto[] = root.toDescriptor('proto3').file;
|
||||
const bufferList: Buffer[] = descriptorList.map(value => Buffer.from(descriptor.FileDescriptorProto.encode(value).finish()));
|
||||
const descriptorList: descriptor.IFileDescriptorProto[] =
|
||||
root.toDescriptor('proto3').file;
|
||||
const bufferList: Buffer[] = descriptorList.map(
|
||||
value =>
|
||||
Buffer.from(descriptor.FileDescriptorProto.encode(value).finish()));
|
||||
fileDescriptorCache.set(root, bufferList);
|
||||
return bufferList;
|
||||
}
|
||||
}
|
||||
|
||||
function createMessageDefinition(message: Protobuf.Type): MessageTypeDefinition {
|
||||
const messageDescriptor: protobuf.Message<descriptor.IDescriptorProto> = message.toDescriptor('proto3');
|
||||
function createMessageDefinition(message: Protobuf.Type):
|
||||
MessageTypeDefinition {
|
||||
const messageDescriptor: protobuf.Message<descriptor.IDescriptorProto> =
|
||||
message.toDescriptor('proto3');
|
||||
return {
|
||||
format: 'Protocol Buffer 3 DescriptorProto',
|
||||
type: messageDescriptor.$type.toObject(messageDescriptor, descriptorOptions),
|
||||
type:
|
||||
messageDescriptor.$type.toObject(messageDescriptor, descriptorOptions),
|
||||
fileDescriptorProtos: getFileDescriptors(message.root)
|
||||
};
|
||||
}
|
||||
|
||||
function createEnumDefinition(enumType: Protobuf.Enum): EnumTypeDefinition {
|
||||
const enumDescriptor: protobuf.Message<descriptor.IEnumDescriptorProto> = enumType.toDescriptor('proto3');
|
||||
const enumDescriptor: protobuf.Message<descriptor.IEnumDescriptorProto> =
|
||||
enumType.toDescriptor('proto3');
|
||||
return {
|
||||
format: 'Protocol Buffer 3 EnumDescriptorProto',
|
||||
type: enumDescriptor.$type.toObject(enumDescriptor, descriptorOptions),
|
||||
|
@ -197,11 +219,15 @@ function createEnumDefinition(enumType: Protobuf.Enum): EnumTypeDefinition {
|
|||
}
|
||||
|
||||
/**
|
||||
* function createDefinition(obj: Protobuf.Service, name: string, options: Options): ServiceDefinition;
|
||||
* function createDefinition(obj: Protobuf.Type, name: string, options: Options): MessageTypeDefinition;
|
||||
* function createDefinition(obj: Protobuf.Enum, name: string, options: Options): EnumTypeDefinition;
|
||||
* function createDefinition(obj: Protobuf.Service, name: string, options:
|
||||
* Options): ServiceDefinition; function createDefinition(obj: Protobuf.Type,
|
||||
* name: string, options: Options): MessageTypeDefinition; function
|
||||
* createDefinition(obj: Protobuf.Enum, name: string, options: Options):
|
||||
* EnumTypeDefinition;
|
||||
*/
|
||||
function createDefinition(obj: HandledReflectionObject, name: string, options: Options): AnyDefinition {
|
||||
function createDefinition(
|
||||
obj: HandledReflectionObject, name: string,
|
||||
options: Options): AnyDefinition {
|
||||
if (obj instanceof Protobuf.Service) {
|
||||
return createServiceDefinition(obj, name, options);
|
||||
} else if (obj instanceof Protobuf.Type) {
|
||||
|
@ -213,7 +239,8 @@ function createDefinition(obj: HandledReflectionObject, name: string, options: O
|
|||
}
|
||||
}
|
||||
|
||||
function createPackageDefinition(root: Protobuf.Root, options: Options): PackageDefinition {
|
||||
function createPackageDefinition(
|
||||
root: Protobuf.Root, options: Options): PackageDefinition {
|
||||
const def: PackageDefinition = {};
|
||||
root.resolveAll();
|
||||
for (const [name, obj] of getAllHandledReflectionObjects(root, '')) {
|
||||
|
@ -265,12 +292,14 @@ function addIncludePathResolver(root: Protobuf.Root, includePaths: string[]) {
|
|||
* name
|
||||
* @param options.includeDirs Paths to search for imported `.proto` files.
|
||||
*/
|
||||
export function load(filename: string | string[], options?: Options): Promise<PackageDefinition> {
|
||||
export function load(
|
||||
filename: string | string[], options?: Options): Promise<PackageDefinition> {
|
||||
const root: Protobuf.Root = new Protobuf.Root();
|
||||
options = options || {};
|
||||
if (!!options.includeDirs) {
|
||||
if (!(Array.isArray(options.includeDirs))) {
|
||||
return Promise.reject(new Error('The includeDirs option must be an array'));
|
||||
return Promise.reject(
|
||||
new Error('The includeDirs option must be an array'));
|
||||
}
|
||||
addIncludePathResolver(root, options.includeDirs as string[]);
|
||||
}
|
||||
|
@ -280,7 +309,8 @@ export function load(filename: string | string[], options?: Options): Promise<Pa
|
|||
});
|
||||
}
|
||||
|
||||
export function loadSync(filename: string | string[], options?: Options): PackageDefinition {
|
||||
export function loadSync(
|
||||
filename: string | string[], options?: Options): PackageDefinition {
|
||||
const root: Protobuf.Root = new Protobuf.Root();
|
||||
options = options || {};
|
||||
if (!!options.includeDirs) {
|
||||
|
@ -293,3 +323,20 @@ export function loadSync(filename: string | string[], options?: Options): Packag
|
|||
loadedRoot.resolveAll();
|
||||
return createPackageDefinition(root, options!);
|
||||
}
|
||||
|
||||
// Load Google's well-known proto files that aren't exposed by Protobuf.js.
|
||||
{
|
||||
// Protobuf.js exposes: any, duration, empty, field_mask, struct, timestamp,
|
||||
// and wrappers. compiler/plugin is excluded in Protobuf.js and here.
|
||||
const wellKnownProtos = ['api', 'descriptor', 'source_context', 'type'];
|
||||
const sourceDir = path.join(
|
||||
path.dirname(require.resolve('protobufjs')), 'google', 'protobuf');
|
||||
|
||||
for (const proto of wellKnownProtos) {
|
||||
const file = path.join(sourceDir, `${proto}.proto`);
|
||||
const descriptor = Protobuf.loadSync(file).toJSON();
|
||||
|
||||
// @ts-ignore
|
||||
Protobuf.common(proto, descriptor.nested.google.nested);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,8 @@ import * as proto_loader from '../src/index';
|
|||
// Relative path from build output directory to test_protos directory
|
||||
const TEST_PROTO_DIR = `${__dirname}/../../test_protos/`;
|
||||
|
||||
type TypeDefinition = proto_loader.EnumTypeDefinition | proto_loader.MessageTypeDefinition;
|
||||
type TypeDefinition =
|
||||
proto_loader.EnumTypeDefinition|proto_loader.MessageTypeDefinition;
|
||||
|
||||
function isTypeObject(obj: proto_loader.AnyDefinition): obj is TypeDefinition {
|
||||
return 'format' in obj;
|
||||
|
@ -30,41 +31,65 @@ function isTypeObject(obj: proto_loader.AnyDefinition): obj is TypeDefinition {
|
|||
|
||||
describe('Descriptor types', () => {
|
||||
it('Should be output for each enum', (done) => {
|
||||
proto_loader.load(`${TEST_PROTO_DIR}/enums.proto`).then((packageDefinition) => {
|
||||
assert('Enum1' in packageDefinition);
|
||||
assert(isTypeObject(packageDefinition.Enum1));
|
||||
// Need additional check because compiler doesn't understand asserts
|
||||
if(isTypeObject(packageDefinition.Enum1)) {
|
||||
const enum1Def: TypeDefinition = packageDefinition.Enum1;
|
||||
assert.strictEqual(enum1Def.format, 'Protocol Buffer 3 EnumDescriptorProto');
|
||||
}
|
||||
proto_loader.load(`${TEST_PROTO_DIR}/enums.proto`)
|
||||
.then(
|
||||
(packageDefinition) => {
|
||||
assert('Enum1' in packageDefinition);
|
||||
assert(isTypeObject(packageDefinition.Enum1));
|
||||
// Need additional check because compiler doesn't understand
|
||||
// asserts
|
||||
if (isTypeObject(packageDefinition.Enum1)) {
|
||||
const enum1Def: TypeDefinition = packageDefinition.Enum1;
|
||||
assert.strictEqual(
|
||||
enum1Def.format, 'Protocol Buffer 3 EnumDescriptorProto');
|
||||
}
|
||||
|
||||
assert('Enum2' in packageDefinition);
|
||||
assert(isTypeObject(packageDefinition.Enum2));
|
||||
// Need additional check because compiler doesn't understand asserts
|
||||
if(isTypeObject(packageDefinition.Enum2)) {
|
||||
const enum2Def: TypeDefinition = packageDefinition.Enum2;
|
||||
assert.strictEqual(enum2Def.format, 'Protocol Buffer 3 EnumDescriptorProto');
|
||||
}
|
||||
done();
|
||||
}, (error) => {done(error);});
|
||||
assert('Enum2' in packageDefinition);
|
||||
assert(isTypeObject(packageDefinition.Enum2));
|
||||
// Need additional check because compiler doesn't understand
|
||||
// asserts
|
||||
if (isTypeObject(packageDefinition.Enum2)) {
|
||||
const enum2Def: TypeDefinition = packageDefinition.Enum2;
|
||||
assert.strictEqual(
|
||||
enum2Def.format, 'Protocol Buffer 3 EnumDescriptorProto');
|
||||
}
|
||||
done();
|
||||
},
|
||||
(error) => {
|
||||
done(error);
|
||||
});
|
||||
});
|
||||
it('Should be output for each message', (done) => {
|
||||
proto_loader.load(`${TEST_PROTO_DIR}/messages.proto`).then((packageDefinition) => {
|
||||
assert('LongValues' in packageDefinition);
|
||||
assert(isTypeObject(packageDefinition.LongValues));
|
||||
if(isTypeObject(packageDefinition.LongValues)) {
|
||||
const longValuesDef: TypeDefinition = packageDefinition.LongValues;
|
||||
assert.strictEqual(longValuesDef.format, 'Protocol Buffer 3 DescriptorProto');
|
||||
}
|
||||
proto_loader.load(`${TEST_PROTO_DIR}/messages.proto`)
|
||||
.then(
|
||||
(packageDefinition) => {
|
||||
assert('LongValues' in packageDefinition);
|
||||
assert(isTypeObject(packageDefinition.LongValues));
|
||||
if (isTypeObject(packageDefinition.LongValues)) {
|
||||
const longValuesDef: TypeDefinition =
|
||||
packageDefinition.LongValues;
|
||||
assert.strictEqual(
|
||||
longValuesDef.format, 'Protocol Buffer 3 DescriptorProto');
|
||||
}
|
||||
|
||||
assert('SequenceValues' in packageDefinition);
|
||||
assert(isTypeObject(packageDefinition.SequenceValues));
|
||||
if(isTypeObject(packageDefinition.SequenceValues)) {
|
||||
const sequenceValuesDef: TypeDefinition = packageDefinition.SequenceValues;
|
||||
assert.strictEqual(sequenceValuesDef.format, 'Protocol Buffer 3 DescriptorProto');
|
||||
}
|
||||
done();
|
||||
}, (error) => {done(error);});
|
||||
assert('SequenceValues' in packageDefinition);
|
||||
assert(isTypeObject(packageDefinition.SequenceValues));
|
||||
if (isTypeObject(packageDefinition.SequenceValues)) {
|
||||
const sequenceValuesDef: TypeDefinition =
|
||||
packageDefinition.SequenceValues;
|
||||
assert.strictEqual(
|
||||
sequenceValuesDef.format,
|
||||
'Protocol Buffer 3 DescriptorProto');
|
||||
}
|
||||
done();
|
||||
},
|
||||
(error) => {
|
||||
done(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Can use well known Google protos', () => {
|
||||
// This will throw if the well known protos are not available.
|
||||
proto_loader.loadSync(`${TEST_PROTO_DIR}/well_known.proto`);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
// Copyright 2019 gRPC authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
import "google/protobuf/descriptor.proto";
|
||||
|
||||
extend google.protobuf.FieldOptions {
|
||||
bool redact = 52000;
|
||||
}
|
|
@ -38,7 +38,7 @@ call npm install || goto :error
|
|||
SET JUNIT_REPORT_STACK=1
|
||||
SET FAILED=0
|
||||
|
||||
for %%v in (6 7 8 9 10 11) do (
|
||||
for %%v in (6 7 8 9 10 11 12) do (
|
||||
call nvm install %%v
|
||||
call nvm use %%v
|
||||
if "%%v"=="4" (
|
||||
|
@ -53,8 +53,8 @@ for %%v in (6 7 8 9 10 11) do (
|
|||
|
||||
node -e "process.exit(process.version.startsWith('v%%v') ? 0 : -1)" || goto :error
|
||||
|
||||
call .\node_modules\.bin\gulp clean.all || SET FAILED=1
|
||||
call .\node_modules\.bin\gulp setup.windows || SET FAILED=1
|
||||
call .\node_modules\.bin\gulp cleanAll || SET FAILED=1
|
||||
call .\node_modules\.bin\gulp setupWindows || SET FAILED=1
|
||||
call .\node_modules\.bin\gulp test || SET FAILED=1
|
||||
)
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ set -ex
|
|||
cd $ROOT
|
||||
|
||||
if [ ! -n "$node_versions" ] ; then
|
||||
node_versions="6 7 8 9 10 11"
|
||||
node_versions="6 7 8 9 10 11 12"
|
||||
fi
|
||||
|
||||
set +ex
|
||||
|
@ -68,7 +68,7 @@ do
|
|||
node -e 'process.exit(process.version.startsWith("v'$version'") ? 0 : -1)'
|
||||
|
||||
# Install dependencies and link packages together.
|
||||
./node_modules/.bin/gulp clean.all
|
||||
./node_modules/.bin/gulp cleanAll
|
||||
./node_modules/.bin/gulp setup
|
||||
|
||||
# npm test calls nyc gulp test
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
#!/bin/sh
|
||||
# Copyright 2019 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
npm install -g node-gyp gulp
|
||||
npm install
|
||||
gulp setup
|
|
@ -0,0 +1,18 @@
|
|||
#!/bin/sh
|
||||
# Copyright 2019 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
npm install -g gulp
|
||||
npm install
|
||||
gulp setupPureJSInterop
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright 2017 gRPC authors.
|
||||
* Copyright 2019 gRPC authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,33 +15,28 @@
|
|||
*
|
||||
*/
|
||||
|
||||
const _gulp = require('gulp');
|
||||
const help = require('gulp-help');
|
||||
const mocha = require('gulp-mocha');
|
||||
const execa = require('execa');
|
||||
const path = require('path');
|
||||
const del = require('del');
|
||||
const semver = require('semver');
|
||||
const linkSync = require('../util').linkSync;
|
||||
|
||||
// gulp-help monkeypatches tasks to have an additional description parameter
|
||||
const gulp = help(_gulp);
|
||||
import * as gulp from 'gulp';
|
||||
import * as mocha from 'gulp-mocha';
|
||||
import * as execa from 'execa';
|
||||
import * as path from 'path';
|
||||
import * as del from 'del';
|
||||
import * as semver from 'semver';
|
||||
|
||||
const testDir = __dirname;
|
||||
const apiTestDir = path.resolve(testDir, 'api');
|
||||
|
||||
gulp.task('install', 'Install test dependencies', () => {
|
||||
const install = () => {
|
||||
return execa('npm', ['install'], {cwd: testDir, stdio: 'inherit'});
|
||||
});
|
||||
};
|
||||
|
||||
gulp.task('clean.all', 'Delete all files created by tasks', () => {});
|
||||
const cleanAll = () => Promise.resolve();
|
||||
|
||||
gulp.task('test', 'Run API-level tests', () => {
|
||||
const test = () => {
|
||||
// run mocha tests matching a glob with a pre-required fixture,
|
||||
// returning the associated gulp stream
|
||||
if (!semver.satisfies(process.version, '>=9.4')) {
|
||||
if (!semver.satisfies(process.version, '>=10.10.0')) {
|
||||
console.log(`Skipping cross-implementation tests for Node ${process.version}`);
|
||||
return;
|
||||
return Promise.resolve();
|
||||
}
|
||||
const apiTestGlob = `${apiTestDir}/*.js`;
|
||||
const runTestsWithFixture = (server, client) => new Promise((resolve, reject) => {
|
||||
|
@ -50,14 +45,14 @@ gulp.task('test', 'Run API-level tests', () => {
|
|||
gulp.src(apiTestGlob)
|
||||
.pipe(mocha({
|
||||
reporter: 'mocha-jenkins-reporter',
|
||||
require: `${testDir}/fixtures/${fixture}.js`
|
||||
require: [`${testDir}/fixtures/${fixture}.js`]
|
||||
}))
|
||||
.resume() // put the stream in flowing mode
|
||||
.on('end', resolve)
|
||||
.on('error', reject);
|
||||
});
|
||||
var runTestsArgPairs;
|
||||
if (semver.satisfies(process.version, '^ 8.11.2 || >=9.4')) {
|
||||
if (semver.satisfies(process.version, '^8.13.0 || >=10.10.0')) {
|
||||
runTestsArgPairs = [
|
||||
['native', 'native'],
|
||||
['native', 'js'],
|
||||
|
@ -72,4 +67,10 @@ gulp.task('test', 'Run API-level tests', () => {
|
|||
return runTestsArgPairs.reduce((previousPromise, argPair) => {
|
||||
return previousPromise.then(runTestsWithFixture.bind(null, argPair[0], argPair[1]));
|
||||
}, Promise.resolve());
|
||||
});
|
||||
};
|
||||
|
||||
export {
|
||||
install,
|
||||
cleanAll,
|
||||
test
|
||||
};
|
|
@ -1,6 +1,8 @@
|
|||
FROM debian:jessie
|
||||
|
||||
RUN echo "deb http://ftp.debian.org/debian jessie-backports main" > /etc/apt/sources.list.d/backports.list
|
||||
RUN echo "deb http://archive.debian.org/debian jessie-backports main" > /etc/apt/sources.list.d/backports.list
|
||||
RUN echo 'Acquire::Check-Valid-Until "false";' > /etc/apt/apt.conf
|
||||
RUN sed -i '/deb http:\/\/deb.debian.org\/debian jessie-updates main/d' /etc/apt/sources.list
|
||||
RUN apt-get update
|
||||
RUN apt-get -t jessie-backports install -y cmake
|
||||
RUN apt-get install -y curl build-essential python libc6-dev-i386 lib32stdc++-4.9-dev jq
|
||||
|
|
Loading…
Reference in New Issue