nni: ImportError: Cannot use a path to identify something from __main__.

Describe the issue:

Hi,

I was able to run the demo scripts. Now, I am trying with my own architecture and I am running into this error while running the experimen.run command:

"ImportError: Cannot use a path to identify something from main.

During handling of the above exception, another exception occurred: . . . TypeError: cannot pickle ‘_io.BufferedReader’ object."

Full Log message: ImportError Traceback (most recent call last) File ~/anaconda3/envs/tpot/lib/python3.10/site-packages/nni/common/serializer.py:791, in get_hybrid_cls_or_func_name(cls_or_func, pickle_size_limit) 790 try: –> 791 name = _get_cls_or_func_name(cls_or_func) 792 # import success, use a path format

File ~/anaconda3/envs/tpot/lib/python3.10/site-packages/nni/common/serializer.py:770, in _get_cls_or_func_name(cls_or_func) 769 if module_name == ‘main’: –> 770 raise ImportError(‘Cannot use a path to identify something from main.’) 771 full_name = module_name + ‘.’ + cls_or_func.name

ImportError: Cannot use a path to identify something from main.

During handling of the above exception, another exception occurred:

TypeError Traceback (most recent call last) Cell In[11], line 1 ----> 1 exp.run(exp_config, 8081)

File ~/anaconda3/envs/tpot/lib/python3.10/site-packages/nni/nas/experiment/pytorch.py:298, in RetiariiExperiment.run(self, config, port, debug) 291 if self._action == ‘create’: 292 base_model_ir, self.applied_mutators = preprocess_model( 293 self.base_model, self.evaluator, self.applied_mutators, 294 full_ir=not isinstance(canoni_conf.execution_engine, (PyEngineConfig, BenchmarkEngineConfig)), 295 dummy_input=canoni_conf.execution_engine.dummy_input 296 if isinstance(canoni_conf.execution_engine, (BaseEngineConfig, CgoEngineConfig)) else None 297 ) –> 298 self._save_experiment_checkpoint(base_model_ir, self.applied_mutators, self.strategy, 299 canoni_conf.experiment_working_directory) 300 elif self._action == ‘resume’: 301 base_model_ir, self.applied_mutators, self.strategy = self._load_experiment_checkpoint( 302 canoni_conf.experiment_working_directory)

File ~/anaconda3/envs/tpot/lib/python3.10/site-packages/nni/nas/experiment/pytorch.py:226, in RetiariiExperiment._save_experiment_checkpoint(self, base_model_ir, applied_mutators, strategy, exp_work_dir) 224 ckp_path = os.path.join(exp_work_dir, self.id, ‘checkpoint’) 225 with open(os.path.join(ckp_path, ‘nas_model’), ‘w’) as fp: –> 226 dump(base_model_ir._dump(), fp, pickle_size_limit=int(os.getenv(‘PICKLE_SIZE_LIMIT’, 64 * 1024))) 227 with open(os.path.join(ckp_path, ‘applied_mutators’), ‘w’) as fp: 228 dump(applied_mutators, fp)

File ~/anaconda3/envs/tpot/lib/python3.10/site-packages/nni/common/serializer.py:341, in dump(obj, fp, use_trace, pickle_size_limit, allow_nan, **json_tricks_kwargs) 339 if json_tricks_kwargs.get(‘compression’) is not None: 340 raise ValueError(‘If you meant to compress the dumped payload, please use dump_bytes.’) –> 341 result = _dump( 342 obj=obj, 343 fp=fp, 344 use_trace=use_trace, 345 pickle_size_limit=pickle_size_limit, 346 allow_nan=allow_nan, 347 **json_tricks_kwargs) 348 return cast(str, result)

File ~/anaconda3/envs/tpot/lib/python3.10/site-packages/nni/common/serializer.py:390, in _dump(obj, fp, use_trace, pickle_size_limit, allow_nan, **json_tricks_kwargs) 387 json_tricks_kwargs[‘allow_nan’] = allow_nan 389 if fp is not None: –> 390 return json_tricks.dump(obj, fp, obj_encoders=encoders, **json_tricks_kwargs) 391 else: 392 return json_tricks.dumps(obj, obj_encoders=encoders, **json_tricks_kwargs)

File ~/anaconda3/envs/tpot/lib/python3.10/site-packages/json_tricks/nonp.py:151, in dump(obj, fp, sort_keys, cls, obj_encoders, extra_obj_encoders, primitives, compression, force_flush, allow_nan, conv_str_byte, fallback_encoders, properties, **jsonkwargs) 149 if (isinstance(obj, str_type) or hasattr(obj, ‘write’)) and isinstance(fp, (list, dict)): 150 raise ValueError(‘json-tricks dump arguments are in the wrong order: provide the data to be serialized before file handle’) –> 151 txt = dumps(obj, sort_keys=sort_keys, cls=cls, obj_encoders=obj_encoders, extra_obj_encoders=extra_obj_encoders, 152 primitives=primitives, compression=compression, allow_nan=allow_nan, conv_str_byte=conv_str_byte, 153 fallback_encoders=fallback_encoders, properties=properties, **jsonkwargs) 154 if isinstance(fp, str_type): 155 if compression:

File ~/anaconda3/envs/tpot/lib/python3.10/site-packages/json_tricks/nonp.py:125, in dumps(obj, sort_keys, cls, obj_encoders, extra_obj_encoders, primitives, compression, allow_nan, conv_str_byte, fallback_encoders, properties, **jsonkwargs) 121 cls = TricksEncoder 122 combined_encoder = cls(sort_keys=sort_keys, obj_encoders=encoders, allow_nan=allow_nan, 123 primitives=primitives, fallback_encoders=fallback_encoders, 124 properties=properties, **jsonkwargs) –> 125 txt = combined_encoder.encode(obj) 126 if not is_py3 and isinstance(txt, str): 127 txt = unicode(txt, ENCODING)

File ~/anaconda3/envs/tpot/lib/python3.10/json/encoder.py:199, in JSONEncoder.encode(self, o) 195 return encode_basestring(o) 196 # This doesn’t pass the iterator directly to ‘’.join() because the 197 # exceptions aren’t as detailed. The list call should be roughly 198 # equivalent to the PySequence_Fast that ‘’.join() would do. –> 199 chunks = self.iterencode(o, _one_shot=True) 200 if not isinstance(chunks, (list, tuple)): 201 chunks = list(chunks)

File ~/anaconda3/envs/tpot/lib/python3.10/json/encoder.py:257, in JSONEncoder.iterencode(self, o, _one_shot) 252 else: 253 _iterencode = _make_iterencode( 254 markers, self.default, _encoder, self.indent, floatstr, 255 self.key_separator, self.item_separator, self.sort_keys, 256 self.skipkeys, _one_shot) –> 257 return _iterencode(o, 0)

File ~/anaconda3/envs/tpot/lib/python3.10/site-packages/json_tricks/encoders.py:77, in TricksEncoder.default(self, obj, *args, **kwargs) 75 prev_id = id(obj) 76 for encoder in self.obj_encoders: —> 77 obj = encoder(obj, primitives=self.primitives, is_changed=id(obj) != prev_id, properties=self.properties) 78 if id(obj) == prev_id: 79 raise TypeError(('Object of type {0:} could not be encoded by {1:} using encoders [{2:s}]. ’ 80 'You can add an encoders for this type using extra_obj_encoders. If you want to 'skip' this ’ 81 ‘object, consider using fallback_encoders like str or lambda o: None.’).format( 82 type(obj), self.class.name, ', '.join(str(encoder) for encoder in self.obj_encoders)))

File ~/anaconda3/envs/tpot/lib/python3.10/site-packages/json_tricks/utils.py:66, in filtered_wrapper…wrapper(*args, **kwargs) 65 def wrapper(*args, **kwargs): —> 66 return encoder(*args, **{k: v for k, v in kwargs.items() if k in names})

File ~/anaconda3/envs/tpot/lib/python3.10/site-packages/nni/common/serializer.py:818, in _json_tricks_func_or_cls_encode(cls_or_func, primitives, pickle_size_limit) 813 if not isinstance(cls_or_func, type) and not _is_function(cls_or_func): 814 # not a function or class, continue 815 return cls_or_func 817 return { –> 818 ‘nni_type’: get_hybrid_cls_or_func_name(cls_or_func, pickle_size_limit) 819 }

File ~/anaconda3/envs/tpot/lib/python3.10/site-packages/nni/common/serializer.py:795, in get_hybrid_cls_or_func_name(cls_or_func, pickle_size_limit) 793 return ‘path:’ + name 794 except (ImportError, AttributeError): –> 795 b = cloudpickle.dumps(cls_or_func) 796 if len(b) > pickle_size_limit: 797 raise ValueError(f’Pickle too large when trying to dump {cls_or_func}. ’ 798 ‘Please try to raise pickle_size_limit if you insist.’)

File ~/anaconda3/envs/tpot/lib/python3.10/site-packages/cloudpickle/cloudpickle_fast.py:73, in dumps(obj, protocol, buffer_callback) 69 with io.BytesIO() as file: 70 cp = CloudPickler( 71 file, protocol=protocol, buffer_callback=buffer_callback 72 ) —> 73 cp.dump(obj) 74 return file.getvalue()

File ~/anaconda3/envs/tpot/lib/python3.10/site-packages/cloudpickle/cloudpickle_fast.py:632, in CloudPickler.dump(self, obj) 630 def dump(self, obj): 631 try: –> 632 return Pickler.dump(self, obj) 633 except RuntimeError as e: 634 if “recursion” in e.args[0]:

TypeError: cannot pickle ‘_io.BufferedReader’ object

Log screenshot: image . . . image

Any ideas on what might be the problem? Thanks.

About this issue

  • Original URL
  • State: closed
  • Created a year ago
  • Comments: 17 (6 by maintainers)

Most upvoted comments

Is your train dataloader a global variable or something in the context? In my experience, a variable must be belonging to one of the following cases to work well:

  1. Initialized as a local variable inside the serialized / evaluator function (recommended).
  2. As an explicit parameter of the function. Only in this case, whether to put nni.trace onto the loader makes a difference.

Putting the dataloader as an implicit dependency of dataloader is not recommended. For example, you can try the following case. It probably fails.

dataloader = ...  # no matter whether traced or not

def foo():
    ... dataloader

nni.dump(foo)

But if you dump nni.dump(dataloader) directly, I guess whether having nni.trace on it makes a difference.