mirror of https://github.com/grpc/grpc-node.git
Merge branch 'master' into metadata_plugin_test_fix
This commit is contained in:
commit
83da9b4b12
|
|
@ -15,26 +15,52 @@
|
|||
*
|
||||
*/
|
||||
|
||||
const _gulp = require('gulp');
|
||||
const help = require('gulp-help');
|
||||
import * as _gulp from 'gulp';
|
||||
import * as help from 'gulp-help';
|
||||
|
||||
// gulp-help monkeypatches tasks to have an additional description parameter
|
||||
const gulp = help(_gulp);
|
||||
|
||||
var runSequence = require('run-sequence');
|
||||
const runSequence = require('run-sequence');
|
||||
|
||||
require('./packages/grpc-health-check/gulpfile');
|
||||
require('./packages/grpc-js/gulpfile');
|
||||
require('./packages/grpc-js-core/gulpfile');
|
||||
require('./packages/grpc-native/gulpfile');
|
||||
require('./packages/grpc-native-core/gulpfile');
|
||||
require('./packages/grpc-surface/gulpfile');
|
||||
require('./test/gulpfile');
|
||||
/**
|
||||
* Require a module at the given path with a patched gulp object that prepends
|
||||
* the given prefix to each task name.
|
||||
* @param path The path to require.
|
||||
* @param prefix The string to use as a prefix. This will be prepended to a task
|
||||
* name with a '.' separator.
|
||||
*/
|
||||
function loadGulpTasksWithPrefix(path: string, prefix: string) {
|
||||
const gulpTask = gulp.task;
|
||||
gulp.task = ((taskName: string, ...args: any[]) => {
|
||||
// Don't create a task for ${prefix}.help
|
||||
if (taskName === 'help') {
|
||||
return;
|
||||
}
|
||||
// The only array passed to gulp.task must be a list of dependent tasks.
|
||||
const newArgs = args.map(arg => Array.isArray(arg) ?
|
||||
arg.map(dep => `${prefix}.${dep}`) : arg);
|
||||
gulpTask(`${prefix}.${taskName}`, ...newArgs);
|
||||
});
|
||||
const result = require(path);
|
||||
gulp.task = gulpTask;
|
||||
return result;
|
||||
}
|
||||
|
||||
[
|
||||
['./packages/grpc-health-check/gulpfile', 'health-check'],
|
||||
['./packages/grpc-js/gulpfile', 'js'],
|
||||
['./packages/grpc-js-core/gulpfile', 'js.core'],
|
||||
['./packages/grpc-native/gulpfile', 'native'],
|
||||
['./packages/grpc-native-core/gulpfile', 'native.core'],
|
||||
['./packages/grpc-surface/gulpfile', 'surface'],
|
||||
['./test/gulpfile', 'internal.test']
|
||||
].forEach((args) => loadGulpTasksWithPrefix(args[0], args[1]));
|
||||
|
||||
const root = __dirname;
|
||||
|
||||
gulp.task('install.all', 'Install dependencies for all subdirectory packages',
|
||||
['js.core.install', 'native.core.install', 'surface.install', 'health-check.install', 'internal.test.install']);
|
||||
['js.install', 'js.core.install', 'native.core.install', 'surface.install', 'health-check.install', 'internal.test.install']);
|
||||
|
||||
gulp.task('install.all.windows', 'Install dependencies for all subdirectory packages for MS Windows',
|
||||
['js.core.install', 'native.core.install.windows', 'surface.install', 'health-check.install', 'internal.test.install']);
|
||||
|
|
@ -42,22 +68,19 @@ gulp.task('install.all.windows', 'Install dependencies for all subdirectory pack
|
|||
gulp.task('lint', 'Emit linting errors in source and test files',
|
||||
['js.core.lint', 'native.core.lint']);
|
||||
|
||||
gulp.task('build', 'Build packages', ['js.core.compile', 'native.core.build']);
|
||||
gulp.task('build', 'Build packages', ['js.compile', 'js.core.compile', 'native.core.build']);
|
||||
|
||||
gulp.task('core.link', 'Add links to core packages without rebuilding',
|
||||
gulp.task('link.core', 'Add links to core packages without rebuilding',
|
||||
['js.link.add', 'native.link.add']);
|
||||
|
||||
gulp.task('surface.link', 'Link to surface packages',
|
||||
gulp.task('link.surface', 'Link to surface packages',
|
||||
['health-check.link.add']);
|
||||
|
||||
gulp.task('link', 'Link together packages', (callback) => {
|
||||
/* Currently, the target 'surface.link.create' doesn't work properly, and it
|
||||
* is also not needed for the existing tests. The comment indicates where it
|
||||
* belongs in the sequence. See npm/npm#18835 for the primary problem with it.
|
||||
* This also means that 'core.link' is not needed, and the item
|
||||
* 'native.core.link.create' should actually be 'core.link.create'
|
||||
/**
|
||||
* We use workarounds for linking in some modules. See npm/npm#18835
|
||||
*/
|
||||
runSequence('core.link', 'surface.link',
|
||||
runSequence('link.core', 'link.surface',
|
||||
callback);
|
||||
});
|
||||
|
||||
|
|
@ -9,7 +9,13 @@
|
|||
},
|
||||
"license": "Apache-2.0",
|
||||
"devDependencies": {
|
||||
"@types/execa": "^0.8.0",
|
||||
"@types/gulp": "^4.0.5",
|
||||
"@types/gulp-help": "0.0.34",
|
||||
"@types/gulp-mocha": "0.0.31",
|
||||
"@types/ncp": "^2.0.1",
|
||||
"@types/node": "^8.0.32",
|
||||
"@types/pify": "^3.0.0",
|
||||
"del": "^3.0.0",
|
||||
"execa": "^0.8.0",
|
||||
"gulp": "^3.9.1",
|
||||
|
|
@ -27,7 +33,10 @@
|
|||
"merge2": "^1.1.0",
|
||||
"mocha": "^3.5.3",
|
||||
"mocha-jenkins-reporter": "^0.3.9",
|
||||
"ncp": "^2.0.0",
|
||||
"pify": "^3.0.0",
|
||||
"through2": "^2.0.3",
|
||||
"ts-node": "^3.3.0",
|
||||
"tslint": "^5.5.0",
|
||||
"typescript": "^2.5.1",
|
||||
"xml2js": "^0.4.19"
|
||||
|
|
|
|||
|
|
@ -29,22 +29,22 @@ const healthCheckDir = __dirname;
|
|||
const baseDir = path.resolve(healthCheckDir, '..', '..');
|
||||
const testDir = path.resolve(healthCheckDir, 'test');
|
||||
|
||||
gulp.task('health-check.clean.links', 'Delete npm links', () => {
|
||||
gulp.task('clean.links', 'Delete npm links', () => {
|
||||
return del(path.resolve(healthCheckDir, 'node_modules/grpc'));
|
||||
});
|
||||
|
||||
gulp.task('health-check.clean.all', 'Delete all code created by tasks',
|
||||
['health-check.clean.links']);
|
||||
gulp.task('clean.all', 'Delete all code created by tasks',
|
||||
['clean.links']);
|
||||
|
||||
gulp.task('health-check.install', 'Install health check dependencies', () => {
|
||||
gulp.task('install', 'Install health check dependencies', () => {
|
||||
return execa('npm', ['install', '--unsafe-perm'], {cwd: healthCheckDir, stdio: 'inherit'});
|
||||
});
|
||||
|
||||
gulp.task('health-check.link.add', 'Link local copy of grpc', () => {
|
||||
gulp.task('link.add', 'Link local copy of grpc', () => {
|
||||
linkSync(healthCheckDir, './node_modules/grpc', '../grpc-native-core');
|
||||
});
|
||||
|
||||
gulp.task('health-check.test', 'Run health check tests',
|
||||
gulp.task('test', 'Run health check tests',
|
||||
() => {
|
||||
return gulp.src(`${testDir}/*.js`).pipe(mocha({reporter: 'mocha-jenkins-reporter'}));
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,197 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 gRPC authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const _gulp = require('gulp');
|
||||
const help = require('gulp-help');
|
||||
|
||||
// gulp-help monkeypatches tasks to have an additional description parameter
|
||||
const gulp = help(_gulp);
|
||||
|
||||
const del = require('del');
|
||||
const mocha = require('gulp-mocha');
|
||||
const sourcemaps = require('gulp-sourcemaps');
|
||||
const tslint = require('gulp-tslint');
|
||||
const typescript = require('gulp-typescript');
|
||||
const util = require('gulp-util');
|
||||
const merge2 = require('merge2');
|
||||
const path = require('path');
|
||||
const through = require('through2');
|
||||
const execa = require('execa');
|
||||
|
||||
Error.stackTraceLimit = Infinity;
|
||||
|
||||
const jsCoreDir = __dirname;
|
||||
const tslintPath = path.resolve(jsCoreDir, 'node_modules/google-ts-style/tslint.json');
|
||||
const tsconfigPath = path.resolve(jsCoreDir, 'tsconfig.json');
|
||||
const outDir = path.resolve(jsCoreDir, 'build');
|
||||
const srcDir = path.resolve(jsCoreDir, 'src');
|
||||
const testDir = path.resolve(jsCoreDir, 'test');
|
||||
|
||||
function onError() {}
|
||||
|
||||
// Coalesces all specified --file parameters into a single array
|
||||
const files = !util.env.file ? [] :
|
||||
Array.isArray(util.env.file) ? util.env.file : [util.env.file];
|
||||
|
||||
// If --dev is passed, override certain ts config options
|
||||
let tsDevOptions = {};
|
||||
if (util.env.dev) {
|
||||
tsDevOptions = {
|
||||
allowUnreachableCode: true,
|
||||
noUnusedParameters: false,
|
||||
noImplicitAny: false,
|
||||
noImplicitThis: false,
|
||||
noEmitOnError: false
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function that creates a gulp task function that opens files in a
|
||||
* directory that match a certain glob pattern, transpiles them, and writes them
|
||||
* to an output directory.
|
||||
* @param {Object} globs
|
||||
* @param {string=} globs.transpile The glob pattern for files to transpile.
|
||||
* Defaults to match all *.ts files in baseDir (incl. subdirectories).
|
||||
* @param {string=} globs.copy The glob pattern for files to transpile.
|
||||
* Defaults to match all but *.ts files in baseDir (incl. subdirectories).
|
||||
* @return A gulp task function.
|
||||
*/
|
||||
function makeCompileFn(globs) {
|
||||
const transpileGlob = globs.transpile || `${srcDir}/**/*.ts`;
|
||||
const copyGlob = globs.copy || '!(**/*)';
|
||||
return () => {
|
||||
const tsProject = typescript.createProject(tsconfigPath, tsDevOptions)();
|
||||
const data = gulp.src(transpileGlob, { base: jsCoreDir })
|
||||
.pipe(sourcemaps.init())
|
||||
.pipe(tsProject)
|
||||
.on('error', onError);
|
||||
const dts = data.dts;
|
||||
const js = data.js;
|
||||
const jsmap = js.pipe(sourcemaps.write('.', {
|
||||
includeContent: false,
|
||||
sourceRoot: '..'
|
||||
}));
|
||||
const copy = gulp.src(copyGlob, { base: jsCoreDir });
|
||||
return merge2([
|
||||
js.pipe(gulp.dest(`${outDir}`)),
|
||||
dts.pipe(gulp.dest(`${outDir}/types`)),
|
||||
jsmap.pipe(gulp.dest(`${outDir}`)),
|
||||
copy.pipe(gulp.dest(`${outDir}`))
|
||||
]);
|
||||
};
|
||||
}
|
||||
|
||||
gulp.task('js.core.install', 'Install native core dependencies', () => {
|
||||
return execa('npm', ['install', '--unsafe-perm'], {cwd: jsCoreDir, stdio: 'inherit'});
|
||||
});
|
||||
|
||||
/**
|
||||
* Runs tslint on files in src/, with linting rules defined in tslint.json.
|
||||
*/
|
||||
gulp.task('js.core.lint', 'Emits linting errors found in src/ and test/.', () => {
|
||||
const program = require('tslint').Linter.createProgram(tsconfigPath);
|
||||
gulp.src([`${srcDir}/**/*.ts`, `${testDir}/**/*.ts`])
|
||||
.pipe(tslint({
|
||||
configuration: tslintPath,
|
||||
formatter: 'codeFrame',
|
||||
program
|
||||
}))
|
||||
.pipe(tslint.report())
|
||||
.on('warning', onError);
|
||||
});
|
||||
|
||||
gulp.task('js.core.clean', 'Deletes transpiled code.', () => {
|
||||
return del(outDir);
|
||||
});
|
||||
|
||||
gulp.task('js.core.clean.all', 'Deletes all files added by targets',
|
||||
['js.core.clean']);
|
||||
|
||||
/**
|
||||
* Transpiles TypeScript files in src/ to JavaScript according to the settings
|
||||
* found in tsconfig.json.
|
||||
* Currently, all errors are emitted twice. This is being tracked here:
|
||||
* https://github.com/ivogabe/gulp-typescript/issues/438
|
||||
*/
|
||||
gulp.task('js.core.compile', 'Transpiles src/.',
|
||||
makeCompileFn({ transpile: [`${srcDir}/**/*.ts`] }));
|
||||
|
||||
/**
|
||||
* Transpiles TypeScript files in both src/ and test/.
|
||||
*/
|
||||
gulp.task('js.core.test.compile', 'After dep tasks, transpiles test/.', ['js.core.compile'],
|
||||
makeCompileFn({ transpile: [`${testDir}/**/*.ts`], copy: `${testDir}/**/!(*.ts)` }));
|
||||
|
||||
/**
|
||||
* Transpiles src/ and test/, and then runs all tests.
|
||||
*/
|
||||
gulp.task('js.core.test', 'After dep tasks, runs all tests.',
|
||||
['js.core.test.compile'], () => {
|
||||
return gulp.src(`${outDir}/test/**/*.js`)
|
||||
.pipe(mocha({reporter: 'mocha-jenkins-reporter'}));
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* Transpiles individual files, specified by the --file flag.
|
||||
*/
|
||||
gulp.task('js.core.compile.single', 'Transpiles individual files specified by --file.',
|
||||
makeCompileFn({
|
||||
transpile: files.map(f => path.relative('.', f))
|
||||
})
|
||||
);
|
||||
|
||||
/**
|
||||
* Run individual tests, specified by their pre-transpiled source path (as
|
||||
* supplied through the '--file' flag). This is intended to be used as part of a
|
||||
* VS Code "Gulp task" launch configuration; setting the "args" field to
|
||||
* ["test.single", "--file", "${file}"] makes it possible for one to debug the
|
||||
* currently open TS mocha test file in one step.
|
||||
*/
|
||||
gulp.task('js.core.test.single', 'After dep tasks, runs individual files specified ' +
|
||||
'by --file.', ['js.core.compile', 'js.core.compile.single'], () => {
|
||||
// util.env contains CLI arguments for the gulp task.
|
||||
// Determine the path to the transpiled version of this TS file.
|
||||
const getTranspiledPath = (file) => {
|
||||
const dir = path.dirname(path.relative(jsCoreDir, file));
|
||||
const basename = path.basename(file, '.ts');
|
||||
const result = `${outDir}/${dir}/${basename}.js`;
|
||||
console.log(result);
|
||||
return result;
|
||||
};
|
||||
// Construct an instance of Mocha's runner API and feed it the path to the
|
||||
// transpiled source.
|
||||
return gulp.src(files.map(getTranspiledPath))
|
||||
.pipe(through.obj((file, enc, cb) => {
|
||||
// Construct a new Mocha runner instance.
|
||||
const Mocha = require('mocha');
|
||||
const runner = new Mocha();
|
||||
// Add the path to the test file to debug.
|
||||
runner.addFile(file.path);
|
||||
// Run the test suite.
|
||||
runner.run((failures) => {
|
||||
if (failures > 0) {
|
||||
cb(new Error(`Mocha: ${failures} failures in ${file.path}]`));
|
||||
} else {
|
||||
cb(null);
|
||||
}
|
||||
});
|
||||
}));
|
||||
}
|
||||
);
|
||||
|
|
@ -0,0 +1,76 @@
|
|||
/*
|
||||
* Copyright 2017 gRPC authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
import * as _gulp from 'gulp';
|
||||
import * as help from 'gulp-help';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as mocha from 'gulp-mocha';
|
||||
import * as path from 'path';
|
||||
import * as execa from 'execa';
|
||||
import * as pify from 'pify';
|
||||
import { ncp } from 'ncp';
|
||||
|
||||
// gulp-help monkeypatches tasks to have an additional description parameter
|
||||
const gulp = help(_gulp);
|
||||
|
||||
const ncpP = pify(ncp);
|
||||
|
||||
Error.stackTraceLimit = Infinity;
|
||||
|
||||
const jsCoreDir = __dirname;
|
||||
const tslintPath = path.resolve(jsCoreDir, 'node_modules/google-ts-style/tslint.json');
|
||||
const tsconfigPath = path.resolve(jsCoreDir, 'tsconfig.json');
|
||||
const outDir = path.resolve(jsCoreDir, 'build');
|
||||
const srcDir = path.resolve(jsCoreDir, 'src');
|
||||
const testDir = path.resolve(jsCoreDir, 'test');
|
||||
|
||||
const execNpmVerb = (verb: string, ...args: string[]) =>
|
||||
execa('npm', [verb, ...args], {cwd: jsCoreDir, stdio: 'inherit'});
|
||||
const execNpmCommand = execNpmVerb.bind(null, 'run');
|
||||
|
||||
gulp.task('install', 'Install native core dependencies', () =>
|
||||
execNpmVerb('install', '--unsafe-perm'));
|
||||
|
||||
/**
|
||||
* Runs tslint on files in src/, with linting rules defined in tslint.json.
|
||||
*/
|
||||
gulp.task('lint', 'Emits linting errors found in src/ and test/.', () =>
|
||||
execNpmCommand('check'));
|
||||
|
||||
gulp.task('clean', 'Deletes transpiled code.', ['install'],
|
||||
() => execNpmCommand('clean'));
|
||||
|
||||
gulp.task('clean.all', 'Deletes all files added by targets', ['clean']);
|
||||
|
||||
/**
|
||||
* Transpiles TypeScript files in src/ to JavaScript according to the settings
|
||||
* found in tsconfig.json.
|
||||
*/
|
||||
gulp.task('compile', 'Transpiles src/.', () => execNpmCommand('compile'));
|
||||
|
||||
gulp.task('copy-test-fixtures', 'Copy test fixtures.', () => {
|
||||
return ncpP(`${jsCoreDir}/test/fixtures`, `${outDir}/test/fixtures`);
|
||||
});
|
||||
|
||||
/**
|
||||
* Transpiles src/ and test/, and then runs all tests.
|
||||
*/
|
||||
gulp.task('test', 'Runs all tests.', ['copy-test-fixtures'], () => {
|
||||
return gulp.src(`${outDir}/test/**/*.js`)
|
||||
.pipe(mocha({reporter: 'mocha-jenkins-reporter'}));
|
||||
});
|
||||
|
|
@ -11,14 +11,15 @@
|
|||
"author": {
|
||||
"name": "Google Inc."
|
||||
},
|
||||
"types": "src/index.ts",
|
||||
"types": "build/src/index.d.ts",
|
||||
"license": "Apache-2.0",
|
||||
"devDependencies": {
|
||||
"@types/lodash": "^4.14.77",
|
||||
"@types/mocha": "^2.2.43",
|
||||
"@types/node": "^8.0.34",
|
||||
"@types/node": "^8.0.55",
|
||||
"clang-format": "^1.0.55",
|
||||
"google-ts-style": "^0.2.0"
|
||||
"gts": "^0.5.1",
|
||||
"typescript": "^2.6.1"
|
||||
},
|
||||
"contributors": [
|
||||
{
|
||||
|
|
@ -28,12 +29,16 @@
|
|||
"_id": "@grpc/js-core@0.1.0",
|
||||
"scripts": {
|
||||
"build": "npm run compile",
|
||||
"clean": "gulp clean",
|
||||
"compile": "gulp js.core.compile",
|
||||
"clean": "gts clean",
|
||||
"compile": "tsc -p .",
|
||||
"format": "clang-format -i -style=\"{Language: JavaScript, BasedOnStyle: Google, ColumnLimit: 80}\" src/*.ts test/*.ts",
|
||||
"lint": "tslint -c node_modules/google-ts-style/tslint.json -p . -t codeFrame --type-check",
|
||||
"prepare": "npm run build",
|
||||
"test": "gulp test"
|
||||
"prepare": "npm run compile",
|
||||
"test": "gulp test",
|
||||
"check": "gts check",
|
||||
"fix": "gts fix",
|
||||
"pretest": "npm run compile",
|
||||
"posttest": "npm run check"
|
||||
},
|
||||
"dependencies": {
|
||||
"lodash": "^4.17.4"
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ export class CallCredentialsFilter extends BaseFilter implements Filter {
|
|||
|
||||
async sendMetadata(metadata: Promise<Metadata>): Promise<Metadata> {
|
||||
// TODO(murgatroid99): pass real options to generateMetadata
|
||||
let credsMetadata = this.credentials.generateMetadata.bind({});
|
||||
let credsMetadata = this.credentials.generateMetadata({});
|
||||
let resultMetadata = await metadata;
|
||||
resultMetadata.merge(await credsMetadata);
|
||||
return resultMetadata;
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import {map, reduce} from 'lodash';
|
|||
import {Metadata} from './metadata';
|
||||
|
||||
export type CallMetadataGenerator =
|
||||
(options: Object, cb: (err: Error|null, metadata?: Metadata) => void) =>
|
||||
(options: {}, cb: (err: Error|null, metadata?: Metadata) => void) =>
|
||||
void;
|
||||
|
||||
/**
|
||||
|
|
@ -15,7 +15,7 @@ export interface CallCredentials {
|
|||
* Asynchronously generates a new Metadata object.
|
||||
* @param options Options used in generating the Metadata object.
|
||||
*/
|
||||
generateMetadata(options: Object): Promise<Metadata>;
|
||||
generateMetadata(options: {}): Promise<Metadata>;
|
||||
/**
|
||||
* Creates a new CallCredentials object from properties of both this and
|
||||
* another CallCredentials object. This object's metadata generator will be
|
||||
|
|
@ -28,7 +28,7 @@ export interface CallCredentials {
|
|||
class ComposedCallCredentials implements CallCredentials {
|
||||
constructor(private creds: CallCredentials[]) {}
|
||||
|
||||
async generateMetadata(options: Object): Promise<Metadata> {
|
||||
async generateMetadata(options: {}): Promise<Metadata> {
|
||||
let base: Metadata = new Metadata();
|
||||
let generated: Metadata[] = await Promise.all(
|
||||
map(this.creds, (cred) => cred.generateMetadata(options)));
|
||||
|
|
@ -46,7 +46,7 @@ class ComposedCallCredentials implements CallCredentials {
|
|||
class SingleCallCredentials implements CallCredentials {
|
||||
constructor(private metadataGenerator: CallMetadataGenerator) {}
|
||||
|
||||
async generateMetadata(options: Object): Promise<Metadata> {
|
||||
generateMetadata(options: {}): Promise<Metadata> {
|
||||
return new Promise<Metadata>((resolve, reject) => {
|
||||
this.metadataGenerator(options, (err, metadata) => {
|
||||
if (metadata !== undefined) {
|
||||
|
|
@ -64,8 +64,8 @@ class SingleCallCredentials implements CallCredentials {
|
|||
}
|
||||
|
||||
class EmptyCallCredentials implements CallCredentials {
|
||||
async generateMetadata(options: Object): Promise<Metadata> {
|
||||
return new Metadata();
|
||||
generateMetadata(options: {}): Promise<Metadata> {
|
||||
return Promise.resolve(new Metadata());
|
||||
}
|
||||
|
||||
compose(other: CallCredentials): CallCredentials {
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import {Duplex} from 'stream';
|
|||
|
||||
import {CallCredentials} from './call-credentials';
|
||||
import {Status} from './constants';
|
||||
import {EmitterAugmentation1} from './events';
|
||||
import {Filter} from './filter';
|
||||
import {FilterStackFactory} from './filter-stack';
|
||||
import {Metadata} from './metadata';
|
||||
|
|
@ -34,7 +35,7 @@ export interface WriteObject {
|
|||
/**
|
||||
* This interface represents a duplex stream associated with a single gRPC call.
|
||||
*/
|
||||
export interface CallStream extends ObjectDuplex<WriteObject, Buffer> {
|
||||
export type CallStream = {
|
||||
cancelWithStatus(status: Status, details: string): void;
|
||||
getPeer(): string;
|
||||
|
||||
|
|
@ -43,37 +44,9 @@ export interface CallStream extends ObjectDuplex<WriteObject, Buffer> {
|
|||
/* If the return value is null, the call has not ended yet. Otherwise, it has
|
||||
* ended with the specified status */
|
||||
getStatus(): StatusObject|null;
|
||||
|
||||
addListener(event: string, listener: Function): this;
|
||||
emit(event: string|symbol, ...args: any[]): boolean;
|
||||
on(event: string, listener: Function): this;
|
||||
once(event: string, listener: Function): this;
|
||||
prependListener(event: string, listener: Function): this;
|
||||
prependOnceListener(event: string, listener: Function): this;
|
||||
removeListener(event: string, listener: Function): this;
|
||||
|
||||
addListener(event: 'metadata', listener: (metadata: Metadata) => void): this;
|
||||
emit(event: 'metadata', metadata: Metadata): boolean;
|
||||
on(event: 'metadata', listener: (metadata: Metadata) => void): this;
|
||||
once(event: 'metadata', listener: (metadata: Metadata) => void): this;
|
||||
prependListener(event: 'metadata', listener: (metadata: Metadata) => void):
|
||||
this;
|
||||
prependOnceListener(
|
||||
event: 'metadata', listener: (metadata: Metadata) => void): this;
|
||||
removeListener(event: 'metadata', listener: (metadata: Metadata) => void):
|
||||
this;
|
||||
|
||||
addListener(event: 'status', listener: (status: StatusObject) => void): this;
|
||||
emit(event: 'status', status: StatusObject): boolean;
|
||||
on(event: 'status', listener: (status: StatusObject) => void): this;
|
||||
once(event: 'status', listener: (status: StatusObject) => void): this;
|
||||
prependListener(event: 'status', listener: (status: StatusObject) => void):
|
||||
this;
|
||||
prependOnceListener(
|
||||
event: 'status', listener: (status: StatusObject) => void): this;
|
||||
removeListener(event: 'status', listener: (status: StatusObject) => void):
|
||||
this;
|
||||
}
|
||||
} & EmitterAugmentation1<'metadata', Metadata>
|
||||
& EmitterAugmentation1<'status', StatusObject>
|
||||
& ObjectDuplex<WriteObject, Buffer>;
|
||||
|
||||
enum ReadState {
|
||||
NO_DATA,
|
||||
|
|
@ -105,6 +78,13 @@ export class Http2CallStream extends Duplex implements CallStream {
|
|||
// Status code mapped from :status. To be used if grpc-status is not received
|
||||
private mappedStatusCode: Status = Status.UNKNOWN;
|
||||
|
||||
// Promise objects that are re-assigned to resolving promises when headers
|
||||
// or trailers received. Processing headers/trailers is asynchronous, so we
|
||||
// can use these objects to await their completion. This helps us establish
|
||||
// order of precedence when obtaining the status of the call.
|
||||
private handlingHeaders = Promise.resolve();
|
||||
private handlingTrailers = Promise.resolve();
|
||||
|
||||
// This is populated (non-null) if and only if the call has ended
|
||||
private finalStatus: StatusObject|null = null;
|
||||
|
||||
|
|
@ -116,6 +96,11 @@ export class Http2CallStream extends Duplex implements CallStream {
|
|||
this.filterStack = filterStackFactory.createFilter(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* On first call, emits a 'status' event with the given StatusObject.
|
||||
* Subsequent calls are no-ops.
|
||||
* @param status The status of the call.
|
||||
*/
|
||||
private endCall(status: StatusObject): void {
|
||||
if (this.finalStatus === null) {
|
||||
this.finalStatus = status;
|
||||
|
|
@ -135,12 +120,46 @@ export class Http2CallStream extends Duplex implements CallStream {
|
|||
return canPush;
|
||||
}
|
||||
|
||||
private handleTrailers(headers: http2.IncomingHttpHeaders) {
|
||||
let code: Status = this.mappedStatusCode;
|
||||
let details = '';
|
||||
let metadata: Metadata;
|
||||
try {
|
||||
metadata = Metadata.fromHttp2Headers(headers);
|
||||
} catch (e) {
|
||||
metadata = new Metadata();
|
||||
}
|
||||
let status: StatusObject = {code, details, metadata};
|
||||
this.handlingTrailers = (async () => {
|
||||
let finalStatus;
|
||||
try {
|
||||
// Attempt to assign final status.
|
||||
finalStatus = await this.filterStack.receiveTrailers(Promise.resolve(status));
|
||||
} catch (error) {
|
||||
await this.handlingHeaders;
|
||||
// This is a no-op if the call was already ended when handling headers.
|
||||
this.endCall({
|
||||
code: Status.INTERNAL,
|
||||
details: 'Failed to process received status',
|
||||
metadata: new Metadata()
|
||||
});
|
||||
return;
|
||||
}
|
||||
// It's possible that headers were received but not fully handled yet.
|
||||
// Give the headers handler an opportunity to end the call first,
|
||||
// if an error occurred.
|
||||
await this.handlingHeaders;
|
||||
// This is a no-op if the call was already ended when handling headers.
|
||||
this.endCall(finalStatus);
|
||||
})();
|
||||
}
|
||||
|
||||
attachHttp2Stream(stream: http2.ClientHttp2Stream): void {
|
||||
if (this.finalStatus !== null) {
|
||||
stream.rstWithCancel();
|
||||
} else {
|
||||
this.http2Stream = stream;
|
||||
stream.on('response', (headers) => {
|
||||
stream.on('response', (headers, flags) => {
|
||||
switch (headers[HTTP2_HEADER_STATUS]) {
|
||||
// TODO(murgatroid99): handle 100 and 101
|
||||
case '400':
|
||||
|
|
@ -166,57 +185,27 @@ export class Http2CallStream extends Duplex implements CallStream {
|
|||
}
|
||||
delete headers[HTTP2_HEADER_STATUS];
|
||||
delete headers[HTTP2_HEADER_CONTENT_TYPE];
|
||||
let metadata: Metadata;
|
||||
try {
|
||||
metadata = Metadata.fromHttp2Headers(headers);
|
||||
} catch (e) {
|
||||
this.cancelWithStatus(Status.UNKNOWN, e.message);
|
||||
return;
|
||||
}
|
||||
this.filterStack.receiveMetadata(Promise.resolve(metadata))
|
||||
.then(
|
||||
(finalMetadata) => {
|
||||
this.emit('metadata', finalMetadata);
|
||||
},
|
||||
(error) => {
|
||||
this.cancelWithStatus(Status.UNKNOWN, error.message);
|
||||
});
|
||||
});
|
||||
stream.on('trailers', (headers) => {
|
||||
let code: Status = this.mappedStatusCode;
|
||||
if (headers.hasOwnProperty('grpc-status')) {
|
||||
let receivedCode = Number(headers['grpc-status']);
|
||||
if (receivedCode in Status) {
|
||||
code = receivedCode;
|
||||
} else {
|
||||
code = Status.UNKNOWN;
|
||||
if (flags & http2.constants.NGHTTP2_FLAG_END_STREAM) {
|
||||
this.handleTrailers(headers);
|
||||
} else {
|
||||
let metadata: Metadata;
|
||||
try {
|
||||
metadata = Metadata.fromHttp2Headers(headers);
|
||||
} catch (error) {
|
||||
this.endCall({code: Status.UNKNOWN, details: error.message, metadata: new Metadata()});
|
||||
return;
|
||||
}
|
||||
delete headers['grpc-status'];
|
||||
this.handlingHeaders =
|
||||
this.filterStack.receiveMetadata(Promise.resolve(metadata))
|
||||
.then((finalMetadata) => {
|
||||
this.emit('metadata', finalMetadata);
|
||||
}).catch((error) => {
|
||||
this.destroyHttp2Stream();
|
||||
this.endCall({code: Status.UNKNOWN, details: error.message, metadata: new Metadata()});
|
||||
});
|
||||
}
|
||||
let details = '';
|
||||
if (headers.hasOwnProperty('grpc-message')) {
|
||||
details = decodeURI(headers['grpc-message']);
|
||||
}
|
||||
let metadata: Metadata;
|
||||
try {
|
||||
metadata = Metadata.fromHttp2Headers(headers);
|
||||
} catch (e) {
|
||||
metadata = new Metadata();
|
||||
}
|
||||
let status: StatusObject = {code, details, metadata};
|
||||
this.filterStack.receiveTrailers(Promise.resolve(status))
|
||||
.then(
|
||||
(finalStatus) => {
|
||||
this.endCall(finalStatus);
|
||||
},
|
||||
(error) => {
|
||||
this.endCall({
|
||||
code: Status.INTERNAL,
|
||||
details: 'Failed to process received status',
|
||||
metadata: new Metadata()
|
||||
});
|
||||
});
|
||||
});
|
||||
stream.on('trailers', this.handleTrailers.bind(this));
|
||||
stream.on('data', (data) => {
|
||||
let readHead = 0;
|
||||
let canPush = true;
|
||||
|
|
@ -278,10 +267,13 @@ export class Http2CallStream extends Duplex implements CallStream {
|
|||
this.unpushedReadMessages.push(null);
|
||||
}
|
||||
});
|
||||
stream.on('streamClosed', (errorCode) => {
|
||||
stream.on('close', async (errorCode) => {
|
||||
let code: Status;
|
||||
let details = '';
|
||||
switch (errorCode) {
|
||||
case http2.constants.NGHTTP2_NO_ERROR:
|
||||
code = Status.OK;
|
||||
break;
|
||||
case http2.constants.NGHTTP2_REFUSED_STREAM:
|
||||
code = Status.UNAVAILABLE;
|
||||
break;
|
||||
|
|
@ -299,9 +291,16 @@ export class Http2CallStream extends Duplex implements CallStream {
|
|||
default:
|
||||
code = Status.INTERNAL;
|
||||
}
|
||||
// This guarantees that if trailers were received, the value of the
|
||||
// 'grpc-status' header takes precedence for emitted status data.
|
||||
await this.handlingTrailers;
|
||||
// This is a no-op if trailers were received at all.
|
||||
// This is OK, because status codes emitted here correspond to more
|
||||
// catastrophic issues that prevent us from receiving trailers in the
|
||||
// first place.
|
||||
this.endCall({code: code, details: details, metadata: new Metadata()});
|
||||
});
|
||||
stream.on('error', () => {
|
||||
stream.on('error', (err: Error) => {
|
||||
this.endCall({
|
||||
code: Status.INTERNAL,
|
||||
details: 'Internal HTTP2 error',
|
||||
|
|
@ -323,15 +322,26 @@ export class Http2CallStream extends Duplex implements CallStream {
|
|||
}
|
||||
}
|
||||
|
||||
cancelWithStatus(status: Status, details: string): void {
|
||||
this.endCall({code: status, details: details, metadata: new Metadata()});
|
||||
if (this.http2Stream !== null) {
|
||||
private destroyHttp2Stream() {
|
||||
// The http2 stream could already have been destroyed if cancelWithStatus
|
||||
// is called in response to an internal http2 error.
|
||||
if (this.http2Stream !== null && !this.http2Stream.destroyed) {
|
||||
/* TODO(murgatroid99): Determine if we want to send different RST_STREAM
|
||||
* codes based on the status code */
|
||||
this.http2Stream.rstWithCancel();
|
||||
}
|
||||
}
|
||||
|
||||
cancelWithStatus(status: Status, details: string): void {
|
||||
this.destroyHttp2Stream();
|
||||
(async () => {
|
||||
// If trailers are currently being processed, the call should be ended
|
||||
// by handleTrailers instead.
|
||||
await this.handlingTrailers;
|
||||
this.endCall({code: status, details: details, metadata: new Metadata()});
|
||||
})();
|
||||
}
|
||||
|
||||
getDeadline(): Deadline {
|
||||
return this.options.deadline;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import {EventEmitter} from 'events';
|
||||
import {EmitterAugmentation1} from './events';
|
||||
import {Duplex, Readable, Writable} from 'stream';
|
||||
|
||||
import {CallStream, StatusObject, WriteObject} from './call-stream';
|
||||
|
|
@ -16,38 +17,35 @@ export class ServiceErrorImpl extends Error implements ServiceError {
|
|||
metadata?: Metadata;
|
||||
}
|
||||
|
||||
export interface Call extends EventEmitter {
|
||||
export type Call = {
|
||||
cancel(): void;
|
||||
getPeer(): string;
|
||||
} & EmitterAugmentation1<'metadata', Metadata>
|
||||
& EmitterAugmentation1<'status', StatusObject>
|
||||
& EventEmitter;
|
||||
|
||||
addListener(event: string, listener: Function): this;
|
||||
emit(event: string|symbol, ...args: any[]): boolean;
|
||||
on(event: string, listener: Function): this;
|
||||
once(event: string, listener: Function): this;
|
||||
prependListener(event: string, listener: Function): this;
|
||||
prependOnceListener(event: string, listener: Function): this;
|
||||
removeListener(event: string, listener: Function): this;
|
||||
export type ClientUnaryCall = Call;
|
||||
|
||||
addListener(event: 'metadata', listener: (metadata: Metadata) => void): this;
|
||||
emit(event: 'metadata', metadata: Metadata): boolean;
|
||||
on(event: 'metadata', listener: (metadata: Metadata) => void): this;
|
||||
once(event: 'metadata', listener: (metadata: Metadata) => void): this;
|
||||
prependListener(event: 'metadata', listener: (metadata: Metadata) => void):
|
||||
this;
|
||||
prependOnceListener(
|
||||
event: 'metadata', listener: (metadata: Metadata) => void): this;
|
||||
removeListener(event: 'metadata', listener: (metadata: Metadata) => void):
|
||||
this;
|
||||
}
|
||||
export type ClientReadableStream<ResponseType> = {
|
||||
deserialize: (chunk: Buffer) => ResponseType;
|
||||
} & Call & ObjectReadable<ResponseType>;
|
||||
|
||||
export interface ClientUnaryCall extends Call {}
|
||||
export type ClientWritableStream<RequestType> = {
|
||||
serialize: (value: RequestType) => Buffer;
|
||||
} & Call & ObjectWritable<RequestType>;
|
||||
|
||||
export class ClientUnaryCallImpl extends EventEmitter implements Call {
|
||||
export type ClientDuplexStream<RequestType, ResponseType> =
|
||||
ClientWritableStream<RequestType> & ClientReadableStream<ResponseType>;
|
||||
|
||||
export class ClientUnaryCallImpl extends EventEmitter implements ClientUnaryCall {
|
||||
constructor(private readonly call: CallStream) {
|
||||
super();
|
||||
call.on('metadata', (metadata: Metadata) => {
|
||||
this.emit('metadata', metadata);
|
||||
});
|
||||
call.on('status', (status: StatusObject) => {
|
||||
this.emit('status', status);
|
||||
});
|
||||
}
|
||||
|
||||
cancel(): void {
|
||||
|
|
@ -59,54 +57,6 @@ export class ClientUnaryCallImpl extends EventEmitter implements Call {
|
|||
}
|
||||
}
|
||||
|
||||
export interface ClientReadableStream<ResponseType> extends
|
||||
Call, ObjectReadable<ResponseType> {
|
||||
deserialize: (chunk: Buffer) => ResponseType;
|
||||
|
||||
addListener(event: string, listener: Function): this;
|
||||
emit(event: string|symbol, ...args: any[]): boolean;
|
||||
on(event: string, listener: Function): this;
|
||||
once(event: string, listener: Function): this;
|
||||
prependListener(event: string, listener: Function): this;
|
||||
prependOnceListener(event: string, listener: Function): this;
|
||||
removeListener(event: string, listener: Function): this;
|
||||
|
||||
addListener(event: 'status', listener: (status: StatusObject) => void): this;
|
||||
emit(event: 'status', status: StatusObject): boolean;
|
||||
on(event: 'status', listener: (status: StatusObject) => void): this;
|
||||
once(event: 'status', listener: (status: StatusObject) => void): this;
|
||||
prependListener(event: 'status', listener: (status: StatusObject) => void):
|
||||
this;
|
||||
prependOnceListener(
|
||||
event: 'status', listener: (status: StatusObject) => void): this;
|
||||
removeListener(event: 'status', listener: (status: StatusObject) => void):
|
||||
this;
|
||||
}
|
||||
|
||||
export interface ClientWritableStream<RequestType> extends
|
||||
Call, ObjectWritable<RequestType> {
|
||||
serialize: (value: RequestType) => Buffer;
|
||||
|
||||
addListener(event: string, listener: Function): this;
|
||||
emit(event: string|symbol, ...args: any[]): boolean;
|
||||
on(event: string, listener: Function): this;
|
||||
once(event: string, listener: Function): this;
|
||||
prependListener(event: string, listener: Function): this;
|
||||
prependOnceListener(event: string, listener: Function): this;
|
||||
removeListener(event: string, listener: Function): this;
|
||||
}
|
||||
|
||||
export interface ClientDuplexStream<RequestType, ResponseType> extends
|
||||
ClientWritableStream<RequestType>, ClientReadableStream<ResponseType> {
|
||||
addListener(event: string, listener: Function): this;
|
||||
emit(event: string|symbol, ...args: any[]): boolean;
|
||||
on(event: string, listener: Function): this;
|
||||
once(event: string, listener: Function): this;
|
||||
prependListener(event: string, listener: Function): this;
|
||||
prependOnceListener(event: string, listener: Function): this;
|
||||
removeListener(event: string, listener: Function): this;
|
||||
}
|
||||
|
||||
function setUpReadableStream<ResponseType>(
|
||||
stream: ClientReadableStream<ResponseType>, call: CallStream,
|
||||
deserialize: (chunk: Buffer) => ResponseType): void {
|
||||
|
|
@ -190,6 +140,9 @@ export class ClientWritableStreamImpl<RequestType> extends Writable implements
|
|||
call.on('metadata', (metadata: Metadata) => {
|
||||
this.emit('metadata', metadata);
|
||||
});
|
||||
call.on('status', (status: StatusObject) => {
|
||||
this.emit('status', status);
|
||||
});
|
||||
}
|
||||
|
||||
cancel(): void {
|
||||
|
|
|
|||
|
|
@ -79,7 +79,14 @@ class SecureChannelCredentialsImpl extends ChannelCredentialsImpl {
|
|||
}
|
||||
}
|
||||
|
||||
function verifyIsBufferOrNull(obj: any, friendlyName: string): void {
|
||||
if (obj && !(obj instanceof Buffer)) {
|
||||
throw new TypeError(`${friendlyName}, if provided, must be a Buffer.`);
|
||||
}
|
||||
}
|
||||
|
||||
export namespace ChannelCredentials {
|
||||
|
||||
/**
|
||||
* Return a new ChannelCredentials instance with a given set of credentials.
|
||||
* The resulting instance can be used to construct a Channel that communicates
|
||||
|
|
@ -91,6 +98,9 @@ export namespace ChannelCredentials {
|
|||
export function createSsl(
|
||||
rootCerts?: Buffer|null, privateKey?: Buffer|null,
|
||||
certChain?: Buffer|null): ChannelCredentials {
|
||||
verifyIsBufferOrNull(rootCerts, 'Root certificate');
|
||||
verifyIsBufferOrNull(privateKey, 'Private key');
|
||||
verifyIsBufferOrNull(certChain, 'Certificate chain');
|
||||
if (privateKey && !certChain) {
|
||||
throw new Error(
|
||||
'Private key must be given with accompanying certificate chain');
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import {EventEmitter} from 'events';
|
||||
import * as http2 from 'http2';
|
||||
import {SecureContext} from 'tls';
|
||||
import {checkServerIdentity, SecureContext, PeerCertificate} from 'tls';
|
||||
import * as url from 'url';
|
||||
|
||||
import {CallCredentials} from './call-credentials';
|
||||
|
|
@ -12,6 +12,7 @@ import {Status} from './constants';
|
|||
import {DeadlineFilterFactory} from './deadline-filter';
|
||||
import {FilterStackFactory} from './filter-stack';
|
||||
import {Metadata, MetadataObject} from './metadata';
|
||||
import { MetadataStatusFilterFactory } from './metadata-status-filter';
|
||||
|
||||
const IDLE_TIMEOUT_MS = 300000;
|
||||
|
||||
|
|
@ -57,7 +58,7 @@ function uniformRandom(min:number, max: number) {
|
|||
export interface Channel extends EventEmitter {
|
||||
createStream(methodName: string, metadata: Metadata, options: CallOptions):
|
||||
CallStream;
|
||||
connect(callback: () => void): void;
|
||||
connect(): Promise<void>;
|
||||
getConnectivityState(): ConnectivityState;
|
||||
close(): void;
|
||||
|
||||
|
|
@ -71,6 +72,7 @@ export interface Channel extends EventEmitter {
|
|||
}
|
||||
|
||||
export class Http2Channel extends EventEmitter implements Channel {
|
||||
private readonly authority: url.URL;
|
||||
private connectivityState: ConnectivityState = ConnectivityState.IDLE;
|
||||
/* For now, we have up to one subchannel, which will exist as long as we are
|
||||
* connecting or trying to connect */
|
||||
|
|
@ -109,7 +111,7 @@ export class Http2Channel extends EventEmitter implements Channel {
|
|||
break;
|
||||
case ConnectivityState.IDLE:
|
||||
case ConnectivityState.SHUTDOWN:
|
||||
if (this.subChannel !== null) {
|
||||
if (this.subChannel) {
|
||||
this.subChannel.shutdown({graceful: true});
|
||||
this.subChannel.removeListener('connect', this.subChannelConnectCallback);
|
||||
this.subChannel.removeListener('close', this.subChannelCloseCallback);
|
||||
|
|
@ -134,9 +136,21 @@ export class Http2Channel extends EventEmitter implements Channel {
|
|||
let subChannel: http2.ClientHttp2Session;
|
||||
let secureContext = this.credentials.getSecureContext();
|
||||
if (secureContext === null) {
|
||||
subChannel = http2.connect(this.address);
|
||||
subChannel = http2.connect(this.authority);
|
||||
} else {
|
||||
subChannel = http2.connect(this.address, {secureContext});
|
||||
const connectionOptions: http2.SecureClientSessionOptions = {
|
||||
secureContext,
|
||||
}
|
||||
// If provided, the value of grpc.ssl_target_name_override should be used
|
||||
// to override the target hostname when checking server identity.
|
||||
// This option is used for testing only.
|
||||
if (this.options['grpc.ssl_target_name_override']) {
|
||||
const sslTargetNameOverride = this.options['grpc.ssl_target_name_override'] as string;
|
||||
connectionOptions.checkServerIdentity = (host: string, cert: PeerCertificate): Error | undefined => {
|
||||
return checkServerIdentity(sslTargetNameOverride, cert);
|
||||
}
|
||||
}
|
||||
subChannel = http2.connect(this.authority, connectionOptions);
|
||||
}
|
||||
this.subChannel = subChannel;
|
||||
let now = new Date();
|
||||
|
|
@ -165,18 +179,20 @@ export class Http2Channel extends EventEmitter implements Channel {
|
|||
}
|
||||
|
||||
constructor(
|
||||
private readonly address: url.URL,
|
||||
address: string,
|
||||
public readonly credentials: ChannelCredentials,
|
||||
private readonly options: ChannelOptions) {
|
||||
super();
|
||||
if (credentials.getSecureContext() === null) {
|
||||
address.protocol = 'http';
|
||||
this.authority = new url.URL(`http://${address}`);
|
||||
} else {
|
||||
address.protocol = 'https';
|
||||
this.authority = new url.URL(`https://${address}`);
|
||||
}
|
||||
this.filterStackFactory = new FilterStackFactory([
|
||||
new CompressionFilterFactory(this),
|
||||
new CallCredentialsFilterFactory(this), new DeadlineFilterFactory(this)
|
||||
new CallCredentialsFilterFactory(this),
|
||||
new DeadlineFilterFactory(this),
|
||||
new MetadataStatusFilterFactory(this)
|
||||
]);
|
||||
this.currentBackoffDeadline = new Date();
|
||||
/* The only purpose of these lines is to ensure that this.backoffTimerId has
|
||||
|
|
@ -189,34 +205,35 @@ export class Http2Channel extends EventEmitter implements Channel {
|
|||
methodName: string, stream: Http2CallStream, metadata: Metadata) {
|
||||
let finalMetadata: Promise<Metadata> =
|
||||
stream.filterStack.sendMetadata(Promise.resolve(metadata));
|
||||
this.connect(() => {
|
||||
finalMetadata.then(
|
||||
(metadataValue) => {
|
||||
let headers = metadataValue.toHttp2Headers();
|
||||
headers[HTTP2_HEADER_AUTHORITY] = this.address.hostname;
|
||||
headers[HTTP2_HEADER_CONTENT_TYPE] = 'application/grpc';
|
||||
headers[HTTP2_HEADER_METHOD] = 'POST';
|
||||
headers[HTTP2_HEADER_PATH] = methodName;
|
||||
headers[HTTP2_HEADER_TE] = 'trailers';
|
||||
if (stream.getStatus() === null) {
|
||||
if (this.connectivityState === ConnectivityState.READY) {
|
||||
let session: http2.ClientHttp2Session =
|
||||
(this.subChannel as http2.ClientHttp2Session);
|
||||
stream.attachHttp2Stream(session.request(headers));
|
||||
} else {
|
||||
/* In this case, we lost the connection while finalizing
|
||||
* metadata. That should be very unusual */
|
||||
setImmediate(() => {
|
||||
this.startHttp2Stream(methodName, stream, metadata);
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
(error) => {
|
||||
stream.cancelWithStatus(
|
||||
Status.UNKNOWN, 'Failed to generate metadata');
|
||||
});
|
||||
});
|
||||
Promise.all([finalMetadata, this.connect()])
|
||||
.then(([metadataValue]) => {
|
||||
let headers = metadataValue.toHttp2Headers();
|
||||
headers[HTTP2_HEADER_AUTHORITY] = this.authority.hostname;
|
||||
headers[HTTP2_HEADER_CONTENT_TYPE] = 'application/grpc';
|
||||
headers[HTTP2_HEADER_METHOD] = 'POST';
|
||||
headers[HTTP2_HEADER_PATH] = methodName;
|
||||
headers[HTTP2_HEADER_TE] = 'trailers';
|
||||
if (stream.getStatus() === null) {
|
||||
if (this.connectivityState === ConnectivityState.READY) {
|
||||
const session: http2.ClientHttp2Session = this.subChannel!;
|
||||
// Prevent the HTTP/2 session from keeping the process alive.
|
||||
// TODO(kjin): Monitor nodejs/node#17620, which adds unref
|
||||
// directly to the Http2Session object.
|
||||
session.socket.unref();
|
||||
stream.attachHttp2Stream(session.request(headers));
|
||||
} else {
|
||||
/* In this case, we lost the connection while finalizing
|
||||
* metadata. That should be very unusual */
|
||||
setImmediate(() => {
|
||||
this.startHttp2Stream(methodName, stream, metadata);
|
||||
});
|
||||
}
|
||||
}
|
||||
}).catch((error: Error & { code: number }) => {
|
||||
// We assume the error code isn't 0 (Status.OK)
|
||||
stream.cancelWithStatus(error.code || Status.UNKNOWN,
|
||||
`Getting metadata from plugin failed with error: ${error.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
createStream(methodName: string, metadata: Metadata, options: CallOptions):
|
||||
|
|
@ -235,13 +252,15 @@ export class Http2Channel extends EventEmitter implements Channel {
|
|||
return stream;
|
||||
}
|
||||
|
||||
connect(callback: () => void): void {
|
||||
this.transitionToState([ConnectivityState.IDLE], ConnectivityState.CONNECTING);
|
||||
if (this.connectivityState === ConnectivityState.READY) {
|
||||
setImmediate(callback);
|
||||
} else {
|
||||
this.once('connect', callback);
|
||||
}
|
||||
connect(): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
this.transitionToState([ConnectivityState.IDLE], ConnectivityState.CONNECTING);
|
||||
if (this.connectivityState === ConnectivityState.READY) {
|
||||
setImmediate(resolve);
|
||||
} else {
|
||||
this.once('connect', resolve);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
getConnectivityState(): ConnectivityState {
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ export class Client {
|
|||
}
|
||||
// TODO(murgatroid99): Figure out how to get version number
|
||||
// options['grpc.primary_user_agent'] += 'grpc-node/' + version;
|
||||
this.channel = new Http2Channel(new URL(address), credentials, options);
|
||||
this.channel = new Http2Channel(address, credentials, options);
|
||||
}
|
||||
|
||||
close(): void {
|
||||
|
|
@ -35,7 +35,7 @@ export class Client {
|
|||
void {
|
||||
let cb: (error: Error|null) => void = once(callback);
|
||||
let callbackCalled = false;
|
||||
this.channel.connect(() => {
|
||||
this.channel.connect().then(() => {
|
||||
cb(null);
|
||||
});
|
||||
if (deadline !== Infinity) {
|
||||
|
|
@ -135,7 +135,6 @@ export class Client {
|
|||
method: string, serialize: (value: RequestType) => Buffer,
|
||||
deserialize: (value: Buffer) => ResponseType, argument: RequestType,
|
||||
callback: UnaryCallback<ResponseType>): ClientUnaryCall;
|
||||
|
||||
makeUnaryRequest<RequestType, ResponseType>(
|
||||
method: string, serialize: (value: RequestType) => Buffer,
|
||||
deserialize: (value: Buffer) => ResponseType, argument: RequestType,
|
||||
|
|
@ -147,14 +146,13 @@ export class Client {
|
|||
metadata, options, callback));
|
||||
const call: CallStream =
|
||||
this.channel.createStream(method, metadata, options);
|
||||
const emitter: ClientUnaryCall = new ClientUnaryCallImpl(call);
|
||||
const message: Buffer = serialize(argument);
|
||||
const writeObj: WriteObject = {message: message};
|
||||
writeObj.flags = options.flags;
|
||||
call.write(writeObj);
|
||||
call.end();
|
||||
this.handleUnaryResponse<ResponseType>(call, deserialize, callback);
|
||||
return emitter;
|
||||
return new ClientUnaryCallImpl(call);
|
||||
}
|
||||
|
||||
makeClientStreamRequest<RequestType, ResponseType>(
|
||||
|
|
@ -174,7 +172,6 @@ export class Client {
|
|||
method: string, serialize: (value: RequestType) => Buffer,
|
||||
deserialize: (value: Buffer) => ResponseType,
|
||||
callback: UnaryCallback<ResponseType>): ClientWritableStream<RequestType>;
|
||||
|
||||
makeClientStreamRequest<RequestType, ResponseType>(
|
||||
method: string, serialize: (value: RequestType) => Buffer,
|
||||
deserialize: (value: Buffer) => ResponseType,
|
||||
|
|
@ -187,10 +184,8 @@ export class Client {
|
|||
metadata, options, callback));
|
||||
const call: CallStream =
|
||||
this.channel.createStream(method, metadata, options);
|
||||
const stream: ClientWritableStream<RequestType> =
|
||||
new ClientWritableStreamImpl<RequestType>(call, serialize);
|
||||
this.handleUnaryResponse<ResponseType>(call, deserialize, callback);
|
||||
return stream;
|
||||
return new ClientWritableStreamImpl<RequestType>(call, serialize);
|
||||
}
|
||||
|
||||
private checkMetadataAndOptions(
|
||||
|
|
@ -233,14 +228,12 @@ export class Client {
|
|||
({metadata, options} = this.checkMetadataAndOptions(metadata, options));
|
||||
const call: CallStream =
|
||||
this.channel.createStream(method, metadata, options);
|
||||
const stream: ClientReadableStream<ResponseType> =
|
||||
new ClientReadableStreamImpl<ResponseType>(call, deserialize);
|
||||
const message: Buffer = serialize(argument);
|
||||
const writeObj: WriteObject = {message: message};
|
||||
writeObj.flags = options.flags;
|
||||
call.write(writeObj);
|
||||
call.end();
|
||||
return stream;
|
||||
return new ClientReadableStreamImpl<ResponseType>(call, deserialize);
|
||||
}
|
||||
|
||||
makeBidiStreamRequest<RequestType, ResponseType>(
|
||||
|
|
@ -259,9 +252,7 @@ export class Client {
|
|||
({metadata, options} = this.checkMetadataAndOptions(metadata, options));
|
||||
const call: CallStream =
|
||||
this.channel.createStream(method, metadata, options);
|
||||
const stream: ClientDuplexStream<RequestType, ResponseType> =
|
||||
new ClientDuplexStreamImpl<RequestType, ResponseType>(
|
||||
call, serialize, deserialize);
|
||||
return stream;
|
||||
return new ClientDuplexStreamImpl<RequestType, ResponseType>(
|
||||
call, serialize, deserialize);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,6 +7,18 @@ import {Metadata} from './metadata';
|
|||
const units: [string, number][] =
|
||||
[['m', 1], ['S', 1000], ['M', 60 * 1000], ['H', 60 * 60 * 1000]];
|
||||
|
||||
function getDeadline(deadline: number) {
|
||||
let now = (new Date()).getTime();
|
||||
let timeoutMs = Math.max(deadline - now, 0);
|
||||
for (let [unit, factor] of units) {
|
||||
let amount = timeoutMs / factor;
|
||||
if (amount < 1e8) {
|
||||
return String(Math.ceil(amount)) + unit;
|
||||
}
|
||||
}
|
||||
throw new Error('Deadline is too far in the future');
|
||||
}
|
||||
|
||||
export class DeadlineFilter extends BaseFilter implements Filter {
|
||||
private deadline: number;
|
||||
constructor(
|
||||
|
|
@ -36,22 +48,10 @@ export class DeadlineFilter extends BaseFilter implements Filter {
|
|||
if (this.deadline === Infinity) {
|
||||
return await metadata;
|
||||
}
|
||||
let timeoutString: Promise<string> =
|
||||
new Promise<string>((resolve, reject) => {
|
||||
this.channel.connect(() => {
|
||||
let now = (new Date()).getTime();
|
||||
let timeoutMs = this.deadline - now;
|
||||
for (let [unit, factor] of units) {
|
||||
let amount = timeoutMs / factor;
|
||||
if (amount < 1e8) {
|
||||
resolve(String(Math.ceil(amount)) + unit);
|
||||
return;
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
let finalMetadata = await metadata;
|
||||
finalMetadata.set('grpc-timeout', await timeoutString);
|
||||
await this.channel.connect();
|
||||
const timeoutString = getDeadline(this.deadline);
|
||||
const finalMetadata = await metadata;
|
||||
finalMetadata.set('grpc-timeout', timeoutString);
|
||||
return finalMetadata;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,29 @@
|
|||
export interface EmitterAugmentation0<Name extends string|symbol> {
|
||||
addListener(event: Name, listener: () => void): this;
|
||||
emit(event: Name): boolean;
|
||||
on(event: Name, listener: () => void): this;
|
||||
once(event: Name, listener: () => void): this;
|
||||
prependListener(event: Name, listener: () => void): this;
|
||||
prependOnceListener(event: Name, listener: () => void): this;
|
||||
removeListener(event: Name, listener: () => void): this;
|
||||
}
|
||||
|
||||
export interface EmitterAugmentation1<Name extends string|symbol, Arg> {
|
||||
addListener(event: Name, listener: (arg1: Arg) => void): this;
|
||||
emit(event: Name, arg1: Arg): boolean;
|
||||
on(event: Name, listener: (arg1: Arg) => void): this;
|
||||
once(event: Name, listener: (arg1: Arg) => void): this;
|
||||
prependListener(event: Name, listener: (arg1: Arg) => void): this;
|
||||
prependOnceListener(event: Name, listener: (arg1: Arg) => void): this;
|
||||
removeListener(event: Name, listener: (arg1: Arg) => void): this;
|
||||
}
|
||||
|
||||
export interface EmitterAugmentation2<Name extends string|symbol, Arg1, Arg2> {
|
||||
addListener(event: Name, listener: (arg1: Arg1, arg2: Arg2) => void): this;
|
||||
emit(event: Name, arg1: Arg1, arg2: Arg2): boolean;
|
||||
on(event: Name, listener: (arg1: Arg1, arg2: Arg2) => void): this;
|
||||
once(event: Name, listener: (arg1: Arg1, arg2: Arg2) => void): this;
|
||||
prependListener(event: Name, listener: (arg1: Arg1, arg2: Arg2) => void): this;
|
||||
prependOnceListener(event: Name, listener: (arg1: Arg1, arg2: Arg2) => void): this;
|
||||
removeListener(event: Name, listener: (arg1: Arg1, arg2: Arg2) => void): this;
|
||||
}
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
import {CallStream} from './call-stream';
|
||||
import {Channel} from './channel';
|
||||
import {BaseFilter, Filter, FilterFactory} from './filter';
|
||||
import {StatusObject} from './call-stream';
|
||||
import {Status} from './constants';
|
||||
|
||||
export class MetadataStatusFilter extends BaseFilter implements Filter {
|
||||
async receiveTrailers(status: Promise<StatusObject>): Promise<StatusObject> {
|
||||
let { code, details, metadata } = await status;
|
||||
if (code !== Status.UNKNOWN) {
|
||||
// we already have a known status, so don't assign a new one.
|
||||
return { code, details, metadata };
|
||||
}
|
||||
const metadataMap = metadata.getMap();
|
||||
if (typeof metadataMap['grpc-status'] === 'string') {
|
||||
let receivedCode = Number(metadataMap['grpc-status']);
|
||||
if (receivedCode in Status) {
|
||||
code = receivedCode;
|
||||
}
|
||||
metadata.remove('grpc-status');
|
||||
}
|
||||
if (typeof metadataMap['grpc-message'] === 'string') {
|
||||
details = decodeURI(metadataMap['grpc-message'] as string);
|
||||
metadata.remove('grpc-message');
|
||||
}
|
||||
return { code, details, metadata };
|
||||
}
|
||||
}
|
||||
|
||||
export class MetadataStatusFilterFactory implements
|
||||
FilterFactory<MetadataStatusFilter> {
|
||||
constructor(private readonly channel: Channel) {}
|
||||
createFilter(callStream: CallStream): MetadataStatusFilter {
|
||||
return new MetadataStatusFilter();
|
||||
}
|
||||
}
|
||||
|
|
@ -26,7 +26,7 @@ function isLegalKey(key: string): boolean {
|
|||
}
|
||||
|
||||
function isLegalNonBinaryValue(value: string): boolean {
|
||||
return !!value.match(/^[ -~]+$/);
|
||||
return !!value.match(/^[ -~]*$/);
|
||||
}
|
||||
|
||||
function isBinaryKey(key: string): boolean {
|
||||
|
|
@ -166,6 +166,7 @@ export class Metadata {
|
|||
* Creates an OutgoingHttpHeaders object that can be used with the http2 API.
|
||||
*/
|
||||
toHttp2Headers(): http2.OutgoingHttpHeaders {
|
||||
// NOTE: Node <8.9 formats http2 headers incorrectly.
|
||||
const result: http2.OutgoingHttpHeaders = {};
|
||||
forOwn(this.internalRepr, (values, key) => {
|
||||
// We assume that the user's interaction with this object is limited to
|
||||
|
|
@ -194,7 +195,7 @@ export class Metadata {
|
|||
values.forEach((value) => {
|
||||
result.add(key, Buffer.from(value, 'base64'));
|
||||
});
|
||||
} else {
|
||||
} else if (values !== undefined) {
|
||||
result.add(key, Buffer.from(values, 'base64'));
|
||||
}
|
||||
} else {
|
||||
|
|
@ -202,7 +203,7 @@ export class Metadata {
|
|||
values.forEach((value) => {
|
||||
result.add(key, value);
|
||||
});
|
||||
} else {
|
||||
} else if (values !== undefined) {
|
||||
result.add(key, values);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,28 +1,14 @@
|
|||
import {Duplex, Readable, Writable} from 'stream';
|
||||
import {EmitterAugmentation1} from './events';
|
||||
|
||||
export interface IntermediateObjectReadable<T> extends Readable {
|
||||
read(size?: number): any&T;
|
||||
}
|
||||
|
||||
export interface ObjectReadable<T> extends IntermediateObjectReadable<T> {
|
||||
export type ObjectReadable<T> = {
|
||||
read(size?: number): T;
|
||||
|
||||
addListener(event: string, listener: Function): this;
|
||||
emit(event: string|symbol, ...args: any[]): boolean;
|
||||
on(event: string, listener: Function): this;
|
||||
once(event: string, listener: Function): this;
|
||||
prependListener(event: string, listener: Function): this;
|
||||
prependOnceListener(event: string, listener: Function): this;
|
||||
removeListener(event: string, listener: Function): this;
|
||||
|
||||
addListener(event: 'data', listener: (chunk: T) => void): this;
|
||||
emit(event: 'data', chunk: T): boolean;
|
||||
on(event: 'data', listener: (chunk: T) => void): this;
|
||||
once(event: 'data', listener: (chunk: T) => void): this;
|
||||
prependListener(event: 'data', listener: (chunk: T) => void): this;
|
||||
prependOnceListener(event: 'data', listener: (chunk: T) => void): this;
|
||||
removeListener(event: 'data', listener: (chunk: T) => void): this;
|
||||
}
|
||||
} & EmitterAugmentation1<'data', T>
|
||||
& IntermediateObjectReadable<T>;
|
||||
|
||||
export interface IntermediateObjectWritable<T> extends Writable {
|
||||
_write(chunk: any&T, encoding: string, callback: Function): void;
|
||||
|
|
@ -44,8 +30,7 @@ export interface ObjectWritable<T> extends IntermediateObjectWritable<T> {
|
|||
end(chunk: T, encoding?: any, cb?: Function): void;
|
||||
}
|
||||
|
||||
export interface ObjectDuplex<T, U> extends Duplex, ObjectWritable<T>,
|
||||
ObjectReadable<U> {
|
||||
export type ObjectDuplex<T, U> = {
|
||||
read(size?: number): U;
|
||||
|
||||
_write(chunk: T, encoding: string, callback: Function): void;
|
||||
|
|
@ -54,13 +39,4 @@ export interface ObjectDuplex<T, U> extends Duplex, ObjectWritable<T>,
|
|||
end(): void;
|
||||
end(chunk: T, cb?: Function): void;
|
||||
end(chunk: T, encoding?: any, cb?: Function): void;
|
||||
|
||||
|
||||
addListener(event: string, listener: Function): this;
|
||||
emit(event: string|symbol, ...args: any[]): boolean;
|
||||
on(event: string, listener: Function): this;
|
||||
once(event: string, listener: Function): this;
|
||||
prependListener(event: string, listener: Function): this;
|
||||
prependOnceListener(event: string, listener: Function): this;
|
||||
removeListener(event: string, listener: Function): this;
|
||||
}
|
||||
} & Duplex & ObjectWritable<T> & ObjectReadable<U>;
|
||||
|
|
|
|||
|
|
@ -1,13 +1,13 @@
|
|||
{
|
||||
"extends": "./node_modules/google-ts-style/tsconfig-google.json",
|
||||
"extends": "./node_modules/gts/tsconfig-google.json",
|
||||
"compilerOptions": {
|
||||
"lib": [ "es6" ],
|
||||
"typeRoots": [
|
||||
"node_modules/h2-types", "node_modules/@types"
|
||||
]
|
||||
"rootDir": ".",
|
||||
"outDir": "build"
|
||||
},
|
||||
"include": [
|
||||
"src/*.ts",
|
||||
"src/**/*.ts",
|
||||
"test/*.ts",
|
||||
"test/**/*.ts"
|
||||
],
|
||||
"exclude": [
|
||||
|
|
|
|||
|
|
@ -28,19 +28,26 @@ const linkSync = require('../../util').linkSync;
|
|||
|
||||
const jsDir = __dirname;
|
||||
|
||||
gulp.task('js.clean.links', 'Delete npm links', () => {
|
||||
const execNpmVerb = (verb: string, ...args: string[]) =>
|
||||
execa('npm', [verb, ...args], {cwd: jsDir, stdio: 'inherit'});
|
||||
const execNpmCommand = execNpmVerb.bind(null, 'run');
|
||||
|
||||
gulp.task('clean.links', 'Delete npm links', () => {
|
||||
return del([path.resolve(jsDir, 'node_modules/@grpc/js-core'),
|
||||
path.resolve(jsDir, 'node_modules/@grpc/surface')]);
|
||||
});
|
||||
|
||||
gulp.task('js.clean.all', 'Delete all files created by tasks',
|
||||
['js.clean.links']);
|
||||
gulp.task('clean.all', 'Delete all files created by tasks', ['clean.links']);
|
||||
|
||||
gulp.task('js.install', 'Install dependencies', () => {
|
||||
return execa('npm', ['install', '--unsafe-perm'], {cwd: jsDir, stdio: 'inherit'});
|
||||
});
|
||||
/**
|
||||
* Transpiles TypeScript files in src/ to JavaScript according to the settings
|
||||
* found in tsconfig.json.
|
||||
*/
|
||||
gulp.task('compile', 'Transpiles src/.', () => execNpmCommand('compile'));
|
||||
|
||||
gulp.task('js.link.add', 'Link local copies of dependencies', () => {
|
||||
gulp.task('install', 'Install dependencies', () => execNpmVerb('install'));
|
||||
|
||||
gulp.task('link.add', 'Link local copies of dependencies', () => {
|
||||
linkSync(jsDir, './node_modules/@grpc/js-core', '../grpc-js-core');
|
||||
linkSync(jsDir, './node_modules/@grpc/surface', '../grpc-surface');
|
||||
});
|
||||
|
|
@ -2,9 +2,16 @@
|
|||
"name": "@grpc/js",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"main": "build/src/index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
"compile": "tsc -p .",
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"check": "gts check",
|
||||
"clean": "gts clean",
|
||||
"fix": "gts fix",
|
||||
"prepare": "npm run compile",
|
||||
"pretest": "npm run compile",
|
||||
"posttest": "npm run check"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
|
@ -19,5 +26,9 @@
|
|||
"dependencies": {
|
||||
"@grpc/js-core": "^0.1.0",
|
||||
"@grpc/surface": "^0.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"gts": "^0.5.1",
|
||||
"typescript": "^2.6.1"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,16 @@
|
|||
{
|
||||
"extends": "./node_modules/gts/tsconfig-google.json",
|
||||
"compilerOptions": {
|
||||
"rootDir": ".",
|
||||
"outDir": "build"
|
||||
},
|
||||
"include": [
|
||||
"src/*.ts",
|
||||
"src/**/*.ts",
|
||||
"test/*.ts",
|
||||
"test/**/*.ts"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
]
|
||||
}
|
||||
|
|
@ -63,7 +63,6 @@
|
|||
'-Wno-long-long',
|
||||
'-Wno-unused-parameter',
|
||||
'-DOSATOMIC_USE_INLINED=1',
|
||||
'-Ithird_party/abseil-cpp',
|
||||
],
|
||||
'ldflags': [
|
||||
'-g',
|
||||
|
|
@ -186,7 +185,6 @@
|
|||
'-Wno-long-long',
|
||||
'-Wno-unused-parameter',
|
||||
'-DOSATOMIC_USE_INLINED=1',
|
||||
'-Ithird_party/abseil-cpp',
|
||||
],
|
||||
'OTHER_CPLUSPLUSFLAGS': [
|
||||
'-g',
|
||||
|
|
@ -196,7 +194,6 @@
|
|||
'-Wno-long-long',
|
||||
'-Wno-unused-parameter',
|
||||
'-DOSATOMIC_USE_INLINED=1',
|
||||
'-Ithird_party/abseil-cpp',
|
||||
'-stdlib=libc++',
|
||||
'-std=c++11',
|
||||
'-Wno-error=deprecated-declarations'
|
||||
|
|
@ -213,7 +210,7 @@
|
|||
'product_prefix': 'lib',
|
||||
'type': 'static_library',
|
||||
'cflags': [
|
||||
'-Wimplicit-fallthrough=0'
|
||||
'-Wno-implicit-fallthrough'
|
||||
],
|
||||
'dependencies': [
|
||||
],
|
||||
|
|
@ -615,6 +612,7 @@
|
|||
'deps/grpc/src/core/lib/support/env_linux.cc',
|
||||
'deps/grpc/src/core/lib/support/env_posix.cc',
|
||||
'deps/grpc/src/core/lib/support/env_windows.cc',
|
||||
'deps/grpc/src/core/lib/support/fork.cc',
|
||||
'deps/grpc/src/core/lib/support/histogram.cc',
|
||||
'deps/grpc/src/core/lib/support/host_port.cc',
|
||||
'deps/grpc/src/core/lib/support/log.cc',
|
||||
|
|
@ -624,7 +622,6 @@
|
|||
'deps/grpc/src/core/lib/support/log_windows.cc',
|
||||
'deps/grpc/src/core/lib/support/mpscq.cc',
|
||||
'deps/grpc/src/core/lib/support/murmur_hash.cc',
|
||||
'deps/grpc/src/core/lib/support/stack_lockfree.cc',
|
||||
'deps/grpc/src/core/lib/support/string.cc',
|
||||
'deps/grpc/src/core/lib/support/string_posix.cc',
|
||||
'deps/grpc/src/core/lib/support/string_util_windows.cc',
|
||||
|
|
@ -683,7 +680,6 @@
|
|||
'deps/grpc/src/core/lib/http/httpcli.cc',
|
||||
'deps/grpc/src/core/lib/http/parser.cc',
|
||||
'deps/grpc/src/core/lib/iomgr/call_combiner.cc',
|
||||
'deps/grpc/src/core/lib/iomgr/closure.cc',
|
||||
'deps/grpc/src/core/lib/iomgr/combiner.cc',
|
||||
'deps/grpc/src/core/lib/iomgr/endpoint.cc',
|
||||
'deps/grpc/src/core/lib/iomgr/endpoint_pair_posix.cc',
|
||||
|
|
@ -698,6 +694,8 @@
|
|||
'deps/grpc/src/core/lib/iomgr/ev_windows.cc',
|
||||
'deps/grpc/src/core/lib/iomgr/exec_ctx.cc',
|
||||
'deps/grpc/src/core/lib/iomgr/executor.cc',
|
||||
'deps/grpc/src/core/lib/iomgr/fork_posix.cc',
|
||||
'deps/grpc/src/core/lib/iomgr/fork_windows.cc',
|
||||
'deps/grpc/src/core/lib/iomgr/gethostname_fallback.cc',
|
||||
'deps/grpc/src/core/lib/iomgr/gethostname_host_name_max.cc',
|
||||
'deps/grpc/src/core/lib/iomgr/gethostname_sysconf.cc',
|
||||
|
|
|
|||
|
|
@ -64,7 +64,6 @@ grpc_completion_queue *GetCompletionQueue() { return queue; }
|
|||
|
||||
void CompletionQueueNext() {
|
||||
if (pending_batches == 0) {
|
||||
GPR_ASSERT(!uv_is_active((uv_handle_t *)&prepare));
|
||||
uv_prepare_start(&prepare, drain_completion_queue);
|
||||
}
|
||||
pending_batches++;
|
||||
|
|
|
|||
|
|
@ -28,9 +28,7 @@
|
|||
#include "grpc/support/time.h"
|
||||
|
||||
// TODO(murgatroid99): Remove this when the endpoint API becomes public
|
||||
extern "C" {
|
||||
#include "src/core/lib/iomgr/pollset_uv.h"
|
||||
}
|
||||
|
||||
#include "call.h"
|
||||
#include "call_credentials.h"
|
||||
|
|
|
|||
|
|
@ -35,20 +35,20 @@ const testDir = path.resolve(nativeCoreDir, 'test');
|
|||
const pkg = require('./package');
|
||||
const jshintConfig = pkg.jshintConfig;
|
||||
|
||||
gulp.task('native.core.clean', 'Delete generated files', () => {
|
||||
gulp.task('clean', 'Delete generated files', () => {
|
||||
return del([path.resolve(nativeCoreDir, 'build'),
|
||||
path.resolve(nativeCoreDir, 'ext/node')]);
|
||||
});
|
||||
|
||||
gulp.task('native.core.clean.all', 'Delete all files created by tasks',
|
||||
['native.core.clean']);
|
||||
gulp.task('clean.all', 'Delete all files created by tasks',
|
||||
['clean']);
|
||||
|
||||
gulp.task('native.core.install', 'Install native core dependencies', () => {
|
||||
gulp.task('install', 'Install native core dependencies', () => {
|
||||
return execa('npm', ['install', '--build-from-source', '--unsafe-perm'],
|
||||
{cwd: nativeCoreDir, stdio: 'inherit'});
|
||||
});
|
||||
|
||||
gulp.task('native.core.install.windows', 'Install native core dependencies for MS Windows', () => {
|
||||
gulp.task('install.windows', 'Install native core dependencies for MS Windows', () => {
|
||||
return execa('npm', ['install', '--build-from-source'],
|
||||
{cwd: nativeCoreDir, stdio: 'inherit'}).catch(() =>
|
||||
del(path.resolve(process.env.USERPROFILE, '.node-gyp', process.versions.node, 'include/node/openssl'), { force: true }).then(() =>
|
||||
|
|
@ -57,21 +57,21 @@ execa('npm', ['install', '--build-from-source'],
|
|||
))
|
||||
});
|
||||
|
||||
gulp.task('native.core.lint', 'Emits linting errors', () => {
|
||||
gulp.task('lint', 'Emits linting errors', () => {
|
||||
return gulp.src([`${nativeCoreDir}/index.js`, `${srcDir}/*.js`, `${testDir}/*.js`])
|
||||
.pipe(jshint(pkg.jshintConfig))
|
||||
.pipe(jshint.reporter('default'));
|
||||
});
|
||||
|
||||
gulp.task('native.core.build', 'Build native package', () => {
|
||||
gulp.task('build', 'Build native package', () => {
|
||||
return execa('npm', ['run', 'build'], {cwd: nativeCoreDir, stdio: 'inherit'});
|
||||
});
|
||||
|
||||
gulp.task('native.core.test', 'Run all tests', ['native.core.build'], () => {
|
||||
gulp.task('test', 'Run all tests', ['build'], () => {
|
||||
return gulp.src(`${testDir}/*.js`).pipe(mocha({reporter: 'mocha-jenkins-reporter'}));
|
||||
});
|
||||
|
||||
gulp.task('native.core.doc.gen', 'Generate docs', (cb) => {
|
||||
gulp.task('doc.gen', 'Generate docs', (cb) => {
|
||||
var config = require('./jsdoc_conf.json');
|
||||
gulp.src([`${nativeCoreDir}/README.md`, `${nativeCoreDir}/index.js`, `${srcDir}/*.js`], {read: false})
|
||||
.pipe(jsdoc(config, cb));
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "grpc",
|
||||
"version": "1.8.0-dev",
|
||||
"version": "1.9.0-dev",
|
||||
"author": "Google Inc.",
|
||||
"description": "gRPC Library for Node",
|
||||
"homepage": "https://grpc.io/",
|
||||
|
|
@ -16,7 +16,7 @@
|
|||
}
|
||||
],
|
||||
"directories": {
|
||||
"lib": "src/node/src"
|
||||
"lib": "src"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "./node_modules/.bin/node-pre-gyp build",
|
||||
|
|
@ -53,6 +53,7 @@
|
|||
"README.md",
|
||||
"deps/grpc/etc/",
|
||||
"index.js",
|
||||
"index.d.ts",
|
||||
"src/*.js",
|
||||
"ext/*.{cc,h}",
|
||||
"deps/grpc/include/grpc/**/*.h",
|
||||
|
|
@ -67,7 +68,7 @@
|
|||
"binding.gyp"
|
||||
],
|
||||
"main": "index.js",
|
||||
"typings": "src/index.d.ts",
|
||||
"typings": "index.d.ts",
|
||||
"license": "Apache-2.0",
|
||||
"jshintConfig": {
|
||||
"bitwise": true,
|
||||
|
|
|
|||
|
|
@ -27,6 +27,26 @@ var binary = require('node-pre-gyp/lib/pre-binding');
|
|||
var path = require('path');
|
||||
var binding_path =
|
||||
binary.find(path.resolve(path.join(__dirname, '../package.json')));
|
||||
var binding = require(binding_path);
|
||||
var binding;
|
||||
try {
|
||||
binding = require(binding_path);
|
||||
} catch (e) {
|
||||
var fs = require('fs');
|
||||
var searchPath = path.dirname(path.dirname(binding_path));
|
||||
var searchName = path.basename(path.dirname(binding_path));
|
||||
var foundNames = fs.readdirSync(searchPath);
|
||||
if (foundNames.indexOf(searchName) === -1) {
|
||||
var message = `Failed to load gRPC binary module because it was not installed for the current system
|
||||
Expected directory: ${searchName}
|
||||
Found: [${foundNames.join(', ')}]
|
||||
This problem can often be fixed by running "npm rebuild" on the current system
|
||||
Original error: ${e.message}`;
|
||||
var error = new Error(message);
|
||||
error.code = e.code;
|
||||
throw error;
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = binding;
|
||||
|
|
|
|||
|
|
@ -195,7 +195,7 @@
|
|||
'product_prefix': 'lib',
|
||||
'type': 'static_library',
|
||||
'cflags': [
|
||||
'-Wimplicit-fallthrough=0'
|
||||
'-Wno-implicit-fallthrough'
|
||||
],
|
||||
'dependencies': [
|
||||
% for dep in getattr(lib, 'deps', []):
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@
|
|||
}
|
||||
],
|
||||
"directories": {
|
||||
"lib": "src/node/src"
|
||||
"lib": "src"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "./node_modules/.bin/node-pre-gyp build",
|
||||
|
|
@ -33,7 +33,7 @@
|
|||
"arguejs": "^0.2.3",
|
||||
"lodash": "^4.15.0",
|
||||
"nan": "^2.0.0",
|
||||
"node-pre-gyp": "^0.6.35",
|
||||
"node-pre-gyp": "^0.6.39",
|
||||
"protobufjs": "^5.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
|
@ -55,6 +55,7 @@
|
|||
"README.md",
|
||||
"deps/grpc/etc/",
|
||||
"index.js",
|
||||
"index.d.ts",
|
||||
"src/*.js",
|
||||
"ext/*.{cc,h}",
|
||||
"deps/grpc/include/grpc/**/*.h",
|
||||
|
|
@ -69,7 +70,7 @@
|
|||
"binding.gyp"
|
||||
],
|
||||
"main": "index.js",
|
||||
"typings": "src/index.d.ts",
|
||||
"typings": "index.d.ts",
|
||||
"license": "Apache-2.0",
|
||||
"jshintConfig": {
|
||||
"bitwise": true,
|
||||
|
|
|
|||
|
|
@ -132,8 +132,7 @@ describe('channel', function() {
|
|||
grpc.connectivityState.IDLE);
|
||||
});
|
||||
});
|
||||
// This suite test appears to be triggering grpc/grpc#12932; skipping for now
|
||||
describe.skip('watchConnectivityState', function() {
|
||||
describe('watchConnectivityState', function() {
|
||||
var channel;
|
||||
beforeEach(function() {
|
||||
channel = new grpc.Channel('localhost', insecureCreds, {});
|
||||
|
|
|
|||
|
|
@ -0,0 +1,2 @@
|
|||
FROM node:8-alpine
|
||||
RUN apk add --no-cache python curl bash build-base
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
#!/bin/bash
|
||||
# Copyright 2017 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
source ~/.nvm/nvm.sh
|
||||
|
||||
nvm install 8
|
||||
set -ex
|
||||
|
||||
cd $(dirname $0)
|
||||
tool_dir=$(pwd)
|
||||
cd $tool_dir/../../..
|
||||
base_dir=$(pwd)
|
||||
|
||||
export ARTIFACTS_OUT=$base_dir/artifacts
|
||||
|
||||
rm -rf build || true
|
||||
|
||||
mkdir -p "${ARTIFACTS_OUT}"
|
||||
|
||||
docker build -t alpine_node_artifact $base_dir/tools/docker/alpine_artifact
|
||||
|
||||
$tool_dir/build_artifact_node.sh
|
||||
|
||||
$tool_dir/build_artifact_node_arm.sh
|
||||
|
||||
docker run -e ARTIFACTS_OUT=/var/grpc/artifacts -v $base_dir:/var/grpc alpine_node_artifact bash -c /var/grpc/tools/run_tests/artifacts/build_artifact_node.sh --with-alpine
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
@rem Copyright 2016 gRPC authors.
|
||||
@rem
|
||||
@rem Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@rem you may not use this file except in compliance with the License.
|
||||
@rem You may obtain a copy of the License at
|
||||
@rem
|
||||
@rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
@rem
|
||||
@rem Unless required by applicable law or agreed to in writing, software
|
||||
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@rem See the License for the specific language governing permissions and
|
||||
@rem limitations under the License.
|
||||
|
||||
set arch_list=ia32 x64
|
||||
|
||||
set node_versions=4.0.0 5.0.0 6.0.0 7.0.0 8.0.0 9.0.0
|
||||
|
||||
set electron_versions=1.0.0 1.1.0 1.2.0 1.3.0 1.4.0 1.5.0 1.6.0 1.7.0
|
||||
|
||||
set PATH=%PATH%;C:\Program Files\nodejs\;%APPDATA%\npm
|
||||
|
||||
del /f /q BUILD || rmdir build /s /q
|
||||
|
||||
call npm update || goto :error
|
||||
|
||||
mkdir -p %ARTIFACTS_OUT%
|
||||
|
||||
for %%a in (%arch_list%) do (
|
||||
for %%v in (%node_versions%) do (
|
||||
call .\node_modules\.bin\node-pre-gyp.cmd configure build --target=%%v --target_arch=%%a
|
||||
|
||||
@rem Try again after removing openssl headers
|
||||
rmdir "%USERPROFILE%\.node-gyp\%%v\include\node\openssl" /S /Q
|
||||
rmdir "%USERPROFILE%\.node-gyp\iojs-%%v\include\node\openssl" /S /Q
|
||||
call .\node_modules\.bin\node-pre-gyp.cmd build package --target=%%v --target_arch=%%a || goto :error
|
||||
|
||||
xcopy /Y /I /S build\stage\* %ARTIFACTS_OUT%\ || goto :error
|
||||
)
|
||||
|
||||
for %%v in (%electron_versions%) do (
|
||||
cmd /V /C "set "HOME=%USERPROFILE%\electron-gyp" && call .\node_modules\.bin\node-pre-gyp.cmd configure rebuild package --runtime=electron --target=%%v --target_arch=%%a --disturl=https://atom.io/download/electron" || goto :error
|
||||
|
||||
xcopy /Y /I /S build\stage\* %ARTIFACTS_OUT%\ || goto :error
|
||||
)
|
||||
)
|
||||
if %errorlevel% neq 0 exit /b %errorlevel%
|
||||
|
||||
goto :EOF
|
||||
|
||||
:error
|
||||
exit /b 1
|
||||
|
|
@ -0,0 +1,66 @@
|
|||
#!/bin/bash
|
||||
# Copyright 2016 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
NODE_ALPINE_BUILD=false
|
||||
|
||||
while true ; do
|
||||
case $1 in
|
||||
--with-alpine)
|
||||
NODE_ALPINE_BUILD=true
|
||||
;;
|
||||
"")
|
||||
;;
|
||||
*)
|
||||
echo "Unknown parameter: $1"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
shift || break
|
||||
done
|
||||
|
||||
NODE_ALPINE_BUILD=$1
|
||||
|
||||
umask 022
|
||||
|
||||
cd $(dirname $0)/../../..
|
||||
|
||||
rm -rf build || true
|
||||
|
||||
mkdir -p "${ARTIFACTS_OUT}"
|
||||
|
||||
npm update
|
||||
|
||||
arch_list=( ia32 x64 )
|
||||
|
||||
node_versions=( 4.0.0 5.0.0 6.0.0 7.0.0 8.0.0 9.0.0 )
|
||||
|
||||
electron_versions=( 1.0.0 1.1.0 1.2.0 1.3.0 1.4.0 1.5.0 1.6.0 1.7.0 )
|
||||
|
||||
for arch in ${arch_list[@]}
|
||||
do
|
||||
for version in ${node_versions[@]}
|
||||
do
|
||||
./node_modules/.bin/node-pre-gyp configure rebuild package --target=$version --target_arch=$arch --grpc_alpine=$NODE_ALPINE_BUILD
|
||||
cp -r build/stage/* "${ARTIFACTS_OUT}"/
|
||||
done
|
||||
|
||||
for version in ${electron_versions[@]}
|
||||
do
|
||||
HOME=~/.electron-gyp ./node_modules/.bin/node-pre-gyp configure rebuild package --runtime=electron --target=$version --target_arch=$arch --disturl=https://atom.io/download/electron
|
||||
cp -r build/stage/* "${ARTIFACTS_OUT}"/
|
||||
done
|
||||
done
|
||||
|
||||
rm -rf build || true
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
#!/bin/bash
|
||||
# Copyright 2017 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
source ~/.nvm/nvm.sh
|
||||
|
||||
nvm use 8
|
||||
set -ex
|
||||
|
||||
cd $(dirname $0)/../../..
|
||||
|
||||
rm -rf build || true
|
||||
|
||||
mkdir -p "${ARTIFACTS_OUT}"
|
||||
|
||||
npm update
|
||||
|
||||
node_versions=( 4.0.0 5.0.0 6.0.0 7.0.0 8.0.0 9.0.0 )
|
||||
|
||||
for version in ${node_versions[@]}
|
||||
do
|
||||
# Cross compile for ARM on x64
|
||||
CC=arm-linux-gnueabihf-gcc CXX=arm-linux-gnueabihf-g++ LD=arm-linux-gnueabihf-g++ ./node_modules/.bin/node-pre-gyp configure rebuild package testpackage --target=$version --target_arch=arm
|
||||
cp -r build/stage/* "${ARTIFACTS_OUT}"/
|
||||
done
|
||||
|
|
@ -0,0 +1,84 @@
|
|||
#!/bin/bash
|
||||
# Copyright 2016 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
source ~/.nvm/nvm.sh
|
||||
|
||||
nvm use 8
|
||||
set -ex
|
||||
|
||||
cd $(dirname $0)/../../..
|
||||
|
||||
base=$(pwd)
|
||||
|
||||
artifacts=$base/artifacts
|
||||
|
||||
mkdir -p $artifacts
|
||||
cp -r $EXTERNAL_GIT_ROOT/platform={windows,linux,macos}/artifacts/node_ext_*/* $artifacts/ || true
|
||||
|
||||
npm update
|
||||
npm pack
|
||||
|
||||
cp grpc-*.tgz $artifacts/grpc.tgz
|
||||
|
||||
mkdir -p bin
|
||||
|
||||
cd $base/src/node/health_check
|
||||
npm pack
|
||||
cp grpc-health-check-*.tgz $artifacts/
|
||||
|
||||
cd $base/src/node/tools
|
||||
npm update
|
||||
npm pack
|
||||
cp grpc-tools-*.tgz $artifacts/
|
||||
tools_version=$(npm list | grep -oP '(?<=grpc-tools@)\S+')
|
||||
|
||||
output_dir=$artifacts/grpc-precompiled-binaries/node/grpc-tools/v$tools_version
|
||||
mkdir -p $output_dir
|
||||
|
||||
well_known_protos=( any api compiler/plugin descriptor duration empty field_mask source_context struct timestamp type wrappers )
|
||||
|
||||
for arch in {x86,x64}; do
|
||||
case $arch in
|
||||
x86)
|
||||
node_arch=ia32
|
||||
;;
|
||||
*)
|
||||
node_arch=$arch
|
||||
;;
|
||||
esac
|
||||
for plat in {windows,linux,macos}; do
|
||||
case $plat in
|
||||
windows)
|
||||
node_plat=win32
|
||||
;;
|
||||
macos)
|
||||
node_plat=darwin
|
||||
;;
|
||||
*)
|
||||
node_plat=$plat
|
||||
;;
|
||||
esac
|
||||
rm -r bin/*
|
||||
input_dir="$EXTERNAL_GIT_ROOT/platform=${plat}/artifacts/protoc_${plat}_${arch}"
|
||||
cp $input_dir/protoc* bin/
|
||||
cp $input_dir/grpc_node_plugin* bin/
|
||||
mkdir -p bin/google/protobuf
|
||||
mkdir -p bin/google/protobuf/compiler # needed for plugin.proto
|
||||
for proto in "${well_known_protos[@]}"; do
|
||||
cp $base/third_party/protobuf/src/google/protobuf/$proto.proto bin/google/protobuf/$proto.proto
|
||||
done
|
||||
tar -czf $output_dir/$node_plat-$node_arch.tar.gz bin/
|
||||
done
|
||||
done
|
||||
|
|
@ -28,19 +28,19 @@ const linkSync = require('../../util').linkSync;
|
|||
|
||||
const nativeDir = __dirname;
|
||||
|
||||
gulp.task('native.clean.links', 'Delete npm links', () => {
|
||||
gulp.task('clean.links', 'Delete npm links', () => {
|
||||
return del([path.resolve(nativeDir, 'node_modules/grpc'),
|
||||
path.resolve(nativeDir, 'node_modules/@grpc/surface')]);
|
||||
});
|
||||
|
||||
gulp.task('native.clean.all', 'Delete all files created by tasks',
|
||||
['native.clean.links']);
|
||||
gulp.task('clean.all', 'Delete all files created by tasks',
|
||||
['clean.links']);
|
||||
|
||||
gulp.task('native.install', 'Install dependencies', () => {
|
||||
gulp.task('install', 'Install dependencies', () => {
|
||||
return execa('npm', ['install', '--unsafe-perm'], {cwd: nativeDir, stdio: 'inherit'});
|
||||
});
|
||||
|
||||
gulp.task('native.link.add', 'Link local copies of dependencies', () => {
|
||||
gulp.task('link.add', 'Link local copies of dependencies', () => {
|
||||
linkSync(nativeDir, './node_modules/grpc', '../grpc-native-core');
|
||||
linkSync(nativeDir, './node_modules/@grpc/surface', '../grpc-surface');
|
||||
});
|
||||
|
|
|
|||
|
|
@ -25,6 +25,6 @@ const execa = require('execa');
|
|||
|
||||
const surfaceDir = __dirname;
|
||||
|
||||
gulp.task('surface.install', 'Install surface dependencies', () => {
|
||||
gulp.task('install', 'Install surface dependencies', () => {
|
||||
return execa('npm', ['install', '--unsafe-perm'], {cwd: surfaceDir, stdio: 'inherit'});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -29,13 +29,13 @@ const gulp = help(_gulp);
|
|||
const testDir = __dirname;
|
||||
const apiTestDir = path.resolve(testDir, 'api');
|
||||
|
||||
gulp.task('internal.test.install', 'Install test dependencies', () => {
|
||||
gulp.task('install', 'Install test dependencies', () => {
|
||||
return execa('npm', ['install'], {cwd: testDir, stdio: 'inherit'});
|
||||
});
|
||||
|
||||
gulp.task('internal.test.clean.all', 'Delete all files created by tasks', () => {});
|
||||
gulp.task('clean.all', 'Delete all files created by tasks', () => {});
|
||||
|
||||
gulp.task('internal.test.test', 'Run API-level tests', () => {
|
||||
gulp.task('test', 'Run API-level tests', () => {
|
||||
// run mocha tests matching a glob with a pre-required fixture,
|
||||
// returning the associated gulp stream
|
||||
const apiTestGlob = `${apiTestDir}/*.js`;
|
||||
|
|
|
|||
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"lib": [
|
||||
"es2015"
|
||||
]
|
||||
}
|
||||
}
|
||||
Loading…
Reference in New Issue