tensorflow: dynamic_rnn broken on master?

The following code works on TF 0.10 but fails on master:

import tensorflow as tf
i = tf.placeholder(tf.float32, shape=[1, None, 20])
cell = tf.nn.rnn_cell.GRUCell(30)
o = tf.nn.dynamic_rnn(cell, i, dtype=tf.float32)

The autogenerated documentation for master here does not mention anything which could explain this.

The failure is the following exception:

ValueError: Time steps is not the same for all the elements in the input in a batch.

raised on line 915 of rnn.py

I could not find any issues in the issue tracker that address this.

About this issue

  • Original URL
  • State: closed
  • Created 8 years ago
  • Comments: 20 (16 by maintainers)

Commits related to this issue

Most upvoted comments

hello, please i run this code :

from sparkdl import DeepImagePredictor image_df = ImageSchema.readImages(“flower_photos/sample/”)

predictor = DeepImagePredictor(inputCol=“image”, outputCol=“predicted_labels”, modelName=“InceptionV3”, decodePredictions=True, topK=10) predictions_df = predictor.transform(image_df)

and i get this error :


ValueError Traceback (most recent call last) <ipython-input-10-d7bde844670d> in <module>() 4 5 predictor = DeepImagePredictor(inputCol=“image”, outputCol=“predicted_labels”, modelName=“InceptionV3”, decodePredictions=True, topK=10) ----> 6 predictions_df = predictor.transform(image_df)

~/anaconda3/lib/python3.6/site-packages/pyspark/ml/base.py in transform(self, dataset, params) 171 return self.copy(params)._transform(dataset) 172 else: –> 173 return self._transform(dataset) 174 else: 175 raise ValueError(“Params must be a param map but got %s.” % type(params))

/tmp/spark-ad4dff5c-2eda-49cc-b5a8-b81ede3fac4b/userFiles-d4fb97fa-84ed-4aaf-8036-d6d38363201e/databricks_spark-deep-learning-1.0.0-spark2.3-s_2.11.jar/sparkdl/transformers/named_image.py in _transform(self, dataset) 94 modelName=self.getModelName(), 95 featurize=False) —> 96 transformed = transformer.transform(dataset) 97 if self.getOrDefault(self.decodePredictions): 98 return self._decodeOutputAsPredictions(transformed)

~/anaconda3/lib/python3.6/site-packages/pyspark/ml/base.py in transform(self, dataset, params) 171 return self.copy(params)._transform(dataset) 172 else: –> 173 return self._transform(dataset) 174 else: 175 raise ValueError(“Params must be a param map but got %s.” % type(params))

/tmp/spark-ad4dff5c-2eda-49cc-b5a8-b81ede3fac4b/userFiles-d4fb97fa-84ed-4aaf-8036-d6d38363201e/databricks_spark-deep-learning-1.0.0-spark2.3-s_2.11.jar/sparkdl/transformers/named_image.py in _transform(self, dataset) 327 outputMode=modelGraphSpec[“outputMode”]) 328 resizeUdf = createResizeImageUDF(modelGraphSpec[“inputTensorSize”]) –> 329 result = tfTransformer.transform(dataset.withColumn(resizedCol, resizeUdf(inputCol))) 330 return result.drop(resizedCol) 331

~/anaconda3/lib/python3.6/site-packages/pyspark/ml/base.py in transform(self, dataset, params) 171 return self.copy(params)._transform(dataset) 172 else: –> 173 return self._transform(dataset) 174 else: 175 raise ValueError(“Params must be a param map but got %s.” % type(params))

/tmp/spark-ad4dff5c-2eda-49cc-b5a8-b81ede3fac4b/userFiles-d4fb97fa-84ed-4aaf-8036-d6d38363201e/databricks_spark-deep-learning-1.0.0-spark2.3-s_2.11.jar/sparkdl/transformers/tf_image.py in _transform(self, dataset) 145 “width”: “__sdl_image_width”, 146 “num_channels”: “__sdl_image_nchannels”, –> 147 “image_buffer”: “__sdl_image_data”}) 148 .drop(“__sdl_image_height”, “__sdl_image_width”, “__sdl_image_nchannels”, 149 “__sdl_image_data”)

/tmp/spark-ad4dff5c-2eda-49cc-b5a8-b81ede3fac4b/userFiles-d4fb97fa-84ed-4aaf-8036-d6d38363201e/databricks_tensorframes-0.3.0-s_2.11.jar/tensorframes/core.py in map_rows(fetches, dframe, feed_dict, initial_variables) 262 if isinstance(dframe, pd.DataFrame): 263 return _map_pd(fetches, dframe, feed_dict, block=False, trim=None, initial_variables=initial_variables) –> 264 return _map(fetches, dframe, feed_dict, block=False, trim=None, initial_variables=initial_variables) 265 266 def map_blocks(fetches, dframe, feed_dict=None, trim=False, initial_variables=_initial_variables_default):

/tmp/spark-ad4dff5c-2eda-49cc-b5a8-b81ede3fac4b/userFiles-d4fb97fa-84ed-4aaf-8036-d6d38363201e/databricks_tensorframes-0.3.0-s_2.11.jar/tensorframes/core.py in _map(fetches, dframe, feed_dict, block, trim, initial_variables) 150 builder = _java_api().map_rows(dframe._jdf) 151 _add_graph(graph, builder) –> 152 ph_names = _add_shapes(graph, builder, fetches) 153 _add_inputs(builder, feed_dict, ph_names) 154 jdf = builder.buildDF()

/tmp/spark-ad4dff5c-2eda-49cc-b5a8-b81ede3fac4b/userFiles-d4fb97fa-84ed-4aaf-8036-d6d38363201e/databricks_tensorframes-0.3.0-s_2.11.jar/tensorframes/core.py in _add_shapes(graph, builder, fetches) 83 t = graph.get_tensor_by_name(op_name + “:0”) 84 ph_names.append(t.name) —> 85 ph_shapes.append(_get_shape(t)) 86 logger.info(“fetches: %s %s”, str(names), str(shapes)) 87 logger.info(“inputs: %s %s”, str(ph_names), str(ph_shapes))

/tmp/spark-ad4dff5c-2eda-49cc-b5a8-b81ede3fac4b/userFiles-d4fb97fa-84ed-4aaf-8036-d6d38363201e/databricks_tensorframes-0.3.0-s_2.11.jar/tensorframes/core.py in _get_shape(node) 36 37 def _get_shape(node): —> 38 l = node.get_shape().as_list() 39 return [-1 if x is None else x for x in l] 40

~/anaconda3/lib/python3.6/site-packages/tensorflow/python/framework/tensor_shape.py in as_list(self) 898 “”" 899 if self._dims is None: –> 900 raise ValueError(“as_list() is not defined on an unknown TensorShape.”) 901 return [dim.value for dim in self._dims] 902

ValueError: as_list() is not defined on an unknown TensorShape.

Any help please ! Thanks