tensorboardX: RuntimeError: Only tuples, lists and Variables supported as JIT inputs, but got numpy.ndarray

I am pretty sure that the inputs are lists. I am not sure whether I am using tensorboard rightly.

for epoch in range(epochs):    
    batch_loss_list = []
    
    batch_list = seqHelper.gen_batch_list_of_lists(train_list,batch_size,(random_seed+epoch))

    #run through training minibatches
    for counter, train_batch in enumerate(batch_list):
        x_atom,x_bonds,x_atom_index,x_bond_index,x_mask,y_val = seqHelper.get_info_with_smiles_list(train_batch,\
                smiles_to_measurement,smiles_to_atom_info,smiles_to_bond_info,\
                smiles_to_atom_neighbors,smiles_to_bond_neighbors,smiles_to_atom_mask)

        atoms_prediction, mol_prediction = model(x_atom,x_bonds,x_atom_index,x_bond_index,x_mask)
        with SummaryWriter(comment='Fingerprint') as w:
            w.add_graph(model, (x_atom,x_bonds,x_atom_index,x_bond_index,x_mask))

It returns:

RuntimeError                              Traceback (most recent call last)
<ipython-input-6-fd2c7c65a5f0> in <module>()
      1 with SummaryWriter(comment='Fingerprint') as w:
----> 2     w.add_graph(model, (x_atom,x_bonds,x_atom_index,x_bond_index,x_mask))
      3 

~/anaconda3/envs/deepchem/lib/python3.5/site-packages/tensorboardX/writer.py in add_graph(self, model, input_to_model, verbose)
    398                 print('add_graph() only supports PyTorch v0.2.')
    399                 return
--> 400         self.file_writer.add_graph(graph(model, input_to_model, verbose))
    401 
    402     @staticmethod

~/anaconda3/envs/deepchem/lib/python3.5/site-packages/tensorboardX/graph.py in graph(model, args, verbose)
     50     if LooseVersion(torch.__version__) >= LooseVersion("0.4"):
     51         with torch.onnx.set_training(model, False):
---> 52             trace, _ = torch.jit.get_trace_graph(model, args)
     53         torch.onnx._optimize_trace(trace, False)
     54     else:

~/anaconda3/envs/deepchem/lib/python3.5/site-packages/torch/jit/__init__.py in get_trace_graph(f, args, kwargs, nderivs)
    251     if not isinstance(args, tuple):
    252         args = (args,)
--> 253     return LegacyTracedModule(f, nderivs=nderivs)(*args, **kwargs)
    254 
    255 

~/anaconda3/envs/deepchem/lib/python3.5/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
    369             result = self._slow_forward(*input, **kwargs)
    370         else:
--> 371             result = self.forward(*input, **kwargs)
    372         for hook in self._forward_hooks.values():
    373             hook_result = hook(self, input, result)

~/anaconda3/envs/deepchem/lib/python3.5/site-packages/torch/jit/__init__.py in forward(self, *args)
    277     def forward(self, *args):
    278         global _tracing
--> 279         in_vars, in_desc = _flatten(args)
    280         # NOTE: use full state, because we need it for BatchNorm export
    281         # This differs from the compiler path, which doesn't support it at the moment.

RuntimeError: Only tuples, lists and Variables supported as JIT inputs, but got numpy.ndarray\

About this issue

  • Original URL
  • State: closed
  • Created 6 years ago
  • Comments: 22 (9 by maintainers)

Most upvoted comments

For future reference, it is the use of contants that causes this issue and you can get around it by wrapping them in scalar tensors e.g.

y = 1. - x  #breaks
y = torch.FloatTensor([1.]) - x # works fine

It works, but looks weird. I still need some time to figure out what’s the point of the generated graph

you need something like x_atom = torch.floatTensor(x_atom)