6acf609
--- theano/compile/mode.py.orig	2019-01-15 14:13:57.000000000 -0700
6acf609
+++ theano/compile/mode.py	2019-08-22 13:25:25.024334947 -0600
6acf609
@@ -261,7 +261,7 @@ class Mode(object):
6acf609
     def __init__(self, linker=None, optimizer='default'):
6acf609
         if linker is None:
6acf609
             linker = config.linker
6acf609
-        if optimizer is 'default':
6acf609
+        if optimizer == 'default':
6acf609
             optimizer = config.optimizer
6acf609
         Mode.__setstate__(self, (linker, optimizer))
6acf609
 
6acf609
--- theano/gof/opt.py.orig	2019-01-15 14:13:57.000000000 -0700
6acf609
+++ theano/gof/opt.py	2019-08-22 14:06:43.820896086 -0600
6acf609
@@ -1284,7 +1284,7 @@ def local_optimizer(tracks, inplace=Fals
6acf609
 
6acf609
         """
6acf609
         if tracks is not None:
6acf609
-            if len(tracks) is 0:
6acf609
+            if len(tracks) == 0:
6acf609
                 raise ValueError("Use None instead of an empty list to apply to all nodes.", f.__module__, f.__name__)
6acf609
             for t in tracks:
6acf609
                 if not (isinstance(t, op.Op) or issubclass(t, op.PureOp)):
6acf609
--- theano/gof/tests/test_link.py.orig	2019-01-15 14:13:57.000000000 -0700
6acf609
+++ theano/gof/tests/test_link.py	2019-08-22 16:29:02.294513027 -0600
6acf609
@@ -113,7 +113,7 @@ class TestPerformLinker(unittest.TestCas
6acf609
     def test_input_output_same(self):
6acf609
         x, y, z = inputs()
6acf609
         fn = perform_linker(FunctionGraph([x], [x])).make_function()
6acf609
-        assert 1.0 is fn(1.0)
6acf609
+        assert 1.0 == fn(1.0)
6acf609
 
6acf609
     def test_input_dependency0(self):
6acf609
         x, y, z = inputs()
6acf609
--- theano/tensor/nnet/bn.py.orig	2019-01-15 14:13:57.000000000 -0700
6acf609
+++ theano/tensor/nnet/bn.py	2019-08-22 13:35:49.109305914 -0600
6acf609
@@ -642,7 +642,7 @@ class AbstractBatchNormTrainGrad(Op):
6acf609
         # some inputs should be disconnected
6acf609
         results = [g_wrt_x, g_wrt_dy, g_wrt_scale, g_wrt_x_mean, g_wrt_x_invstd,
6acf609
                    theano.gradient.DisconnectedType()()]
6acf609
-        return [theano.gradient.DisconnectedType()() if r is 0 else r
6acf609
+        return [theano.gradient.DisconnectedType()() if r == 0 else r
6acf609
                 for r in results]
6acf609
 
6acf609
     def connection_pattern(self, node):
6acf609
--- theano/tensor/nnet/tests/test_conv.py.orig	2019-01-15 14:13:57.000000000 -0700
6acf609
+++ theano/tensor/nnet/tests/test_conv.py	2019-08-22 16:29:51.149656121 -0600
6acf609
@@ -95,7 +95,7 @@ class TestConv2D(utt.InferShapeTester):
6acf609
         # REFERENCE IMPLEMENTATION
6acf609
         s = 1.
6acf609
         orig_image_data = image_data
6acf609
-        if border_mode is not 'full':
6acf609
+        if border_mode != 'full':
6acf609
             s = -1.
6acf609
         out_shape2d = np.array(N_image_shape[-2:]) +\
6acf609
             s * np.array(N_filter_shape[-2:]) - s
6acf609
--- theano/tests/test_determinism.py.orig	2019-01-15 14:13:57.000000000 -0700
6acf609
+++ theano/tests/test_determinism.py	2019-08-22 16:31:03.119393791 -0600
6acf609
@@ -57,7 +57,7 @@ def test_determinism_1():
6acf609
             updates.append((s, val))
6acf609
 
6acf609
         for var in theano.gof.graph.ancestors(update for _, update in updates):
6acf609
-            if var.name is not None and var.name is not 'b':
6acf609
+            if var.name is not None and var.name != 'b':
6acf609
                 if var.name[0] != 's' or len(var.name) != 2:
6acf609
                     var.name = None
6acf609