Lint Python code for undefined names (#1721)
* Lint Python code for undefined names * Lint Python code for undefined names * Exclude tfdv.py to workaround an overzealous pytest * Fixup for tfdv.py * Fixup for tfdv.py * Fixup for tfdv.py
This commit is contained in:
parent
23993486c5
commit
8e1e823139
|
|
@ -94,5 +94,9 @@ matrix:
|
|||
- language: python
|
||||
python: "3.7"
|
||||
env: TOXENV=py37
|
||||
dist: xenial # required for Python >= 3.7
|
||||
script: *1
|
||||
- name: "Lint Python code with flake8"
|
||||
language: python
|
||||
python: "3.7"
|
||||
install: pip install flake8
|
||||
script: flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
|
|
|
|||
|
|
@ -12,6 +12,8 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# flake8: noqa TODO
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
from bokeh.layouts import row
|
||||
|
|
@ -34,7 +36,7 @@ from tensorflow.python.lib.io import file_io
|
|||
# trueclass
|
||||
# true_score_column
|
||||
|
||||
if "is_generated" is not in variables or variables["is_generated"] is False:
|
||||
if not variables.get("is_generated"):
|
||||
# Create data from specified csv file(s).
|
||||
# The schema file provides column names for the csv file that will be used
|
||||
# to generate the roc curve.
|
||||
|
|
|
|||
|
|
@ -76,7 +76,13 @@ snapshots['TestExporterMethods::test_create_cell_from_args_with_one_arg 1'] = ''
|
|||
|
||||
'''
|
||||
|
||||
snapshots['TestExporterMethods::test_create_cell_from_file 1'] = '''# Copyright 2019 Google LLC
|
||||
snapshots['TestExporterMethods::test_create_cell_from_file 1'] = '''"""
|
||||
test.py is used for test_server.py as a way to test the tornado web server
|
||||
without having a reliance on the validity of visualizations. It does not serve
|
||||
as a valid visualization and is only used for testing purposes.
|
||||
"""
|
||||
|
||||
# Copyright 2019 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
|
@ -90,17 +96,8 @@ snapshots['TestExporterMethods::test_create_cell_from_file 1'] = '''# Copyright
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import tensorflow_data_validation as tfdv
|
||||
|
||||
# The following variables are provided through dependency injection. These
|
||||
# variables come from the specified input path and arguments provided by the
|
||||
# API post request.
|
||||
#
|
||||
# source
|
||||
|
||||
train_stats = tfdv.generate_statistics_from_csv(data_location=source)
|
||||
|
||||
tfdv.visualize_statistics(train_stats)
|
||||
x = 2
|
||||
print(x)
|
||||
'''
|
||||
|
||||
snapshots['TestExporterMethods::test_generate_html_from_notebook 1'] = '''
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ class TestExporterMethods(snapshottest.TestCase):
|
|||
|
||||
def test_create_cell_from_file(self):
|
||||
self.maxDiff = None
|
||||
cell = self.exporter.create_cell_from_file("tfdv.py")
|
||||
cell = self.exporter.create_cell_from_file("test.py")
|
||||
self.assertMatchSnapshot(cell.source)
|
||||
|
||||
def test_generate_html_from_notebook(self):
|
||||
|
|
|
|||
|
|
@ -20,6 +20,6 @@ import tensorflow_data_validation as tfdv
|
|||
#
|
||||
# source
|
||||
|
||||
train_stats = tfdv.generate_statistics_from_csv(data_location=source)
|
||||
train_stats = tfdv.generate_statistics_from_csv(data_location=source) # noqa: F821
|
||||
|
||||
tfdv.visualize_statistics(train_stats)
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# flake8: noqa TODO
|
||||
|
||||
import kfp.dsl as dsl
|
||||
import datetime
|
||||
|
|
|
|||
|
|
@ -17,6 +17,12 @@ from pathlib2 import Path
|
|||
|
||||
from common import _utils
|
||||
|
||||
try:
|
||||
unicode
|
||||
except NameError:
|
||||
unicode = str
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
parser = argparse.ArgumentParser(description='Create EMR Cluster')
|
||||
parser.add_argument('--region', type=str, help='EMR Cluster region.')
|
||||
|
|
|
|||
|
|
@ -29,6 +29,11 @@ from pathlib2 import Path
|
|||
|
||||
from common import _utils
|
||||
|
||||
try:
|
||||
unicode
|
||||
except NameError:
|
||||
unicode = str
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
parser = argparse.ArgumentParser(description='Submit PySpark Job')
|
||||
|
|
|
|||
|
|
@ -30,6 +30,11 @@ from pathlib2 import Path
|
|||
|
||||
from common import _utils
|
||||
|
||||
try:
|
||||
unicode
|
||||
except NameError:
|
||||
unicode = str
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
parser = argparse.ArgumentParser(description='Submit Spark Job')
|
||||
|
|
|
|||
|
|
@ -16,6 +16,12 @@ from pathlib2 import Path
|
|||
|
||||
from common import _utils
|
||||
|
||||
try:
|
||||
unicode
|
||||
except NameError:
|
||||
unicode = str
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
parser = argparse.ArgumentParser(description='SageMaker Batch Transformation Job')
|
||||
parser.add_argument('--region', type=str.strip, required=True, help='The region where the cluster launches.')
|
||||
|
|
|
|||
|
|
@ -21,6 +21,11 @@ import logging
|
|||
import re
|
||||
import requests
|
||||
|
||||
try:
|
||||
unicode
|
||||
except NameError:
|
||||
unicode = str
|
||||
|
||||
def urls_for_zone(zone, location_to_urls_map):
|
||||
"""Returns list of potential proxy URLs for a given zone.
|
||||
|
||||
|
|
|
|||
|
|
@ -136,4 +136,4 @@ def ground_truth_test(region='us-west-2',
|
|||
).apply(use_aws_secret('aws-secret', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY'))
|
||||
|
||||
if __name__ == '__main__':
|
||||
kfp.compiler.Compiler().compile(hpo_test, __file__ + '.zip')
|
||||
kfp.compiler.Compiler().compile(hpo_test, __file__ + '.zip') # noqa: F821 TODO
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# flake8: noqa TODO
|
||||
|
||||
import kfp.dsl as dsl
|
||||
import kfp.gcp as gcp
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ def create_function_from_parameters(func: Callable[[Mapping[str, Any]], Any], pa
|
|||
new_signature = Signature(parameters) # Checks the parameter consistency
|
||||
|
||||
def pass_locals():
|
||||
return dict_func(locals())
|
||||
return dict_func(locals()) # noqa: F821 TODO
|
||||
|
||||
code = pass_locals.__code__
|
||||
mod_co_argcount = len(parameters)
|
||||
|
|
|
|||
|
|
@ -243,7 +243,7 @@ class Pipeline():
|
|||
Args:
|
||||
metadata (ComponentMeta): component metadata
|
||||
'''
|
||||
if not isinstance(metadata, PipelineMeta):
|
||||
if not isinstance(metadata, PipelineMeta): # noqa: F821 TODO
|
||||
raise ValueError('_set_medata is expecting PipelineMeta.')
|
||||
self._metadata = metadata
|
||||
|
||||
|
|
|
|||
|
|
@ -63,7 +63,7 @@ class TestPythonComponent(unittest.TestCase):
|
|||
|
||||
@component
|
||||
def b_op(field_x: {'customized_type': {'property_a': 'value_a', 'property_b': 'value_b'}},
|
||||
field_y: 'GcsUri',
|
||||
field_y: 'GcsUri', # noqa: F821 TODO
|
||||
field_z: GCSPath()) -> {'output_model_uri': 'GcsUri'}:
|
||||
return ContainerOp(
|
||||
name = 'operator b',
|
||||
|
|
|
|||
Loading…
Reference in New Issue