You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
I have been slowly working through the example notebooks - thank you so much for providing them first of all - they are immensely helpful!
I am trying to do the segmentation notebook, however, I am stuck on the UNET segmentation. (I also wanted to ask whether there is an example notebook for cellpose segmentation within big-fish, since I believe this might be more appropriate for my case?)
My issue is the following:
All the deeplearning packages are installed, versions are:
2.6.2
0.14.0
When I run:
# load pretrained model
model_nuc = segmentation.unet_3_classes_nuc()
model_nuc.summary()
I get the following error which seems to me as if the models require loading and internet connection? I run the jupyter notebooks on our computing cluster which has no access to internet - the no route to host error is now quite familiar to me. I haven`t been able to figure out how to circumvent this, or how to download the models potentially in the terminal where I do have access to the internet and load them locally within the jupyter notebook.
downloading model weights...
---------------------------------------------------------------------------
OSError Traceback (most recent call last)
~/miniconda3/envs/bigfish_env/lib/python3.6/urllib/request.py in do_open(self, http_class, req, **http_conn_args)
1348 h.request(req.get_method(), req.selector, req.data, headers,
-> 1349 encode_chunked=req.has_header('Transfer-encoding'))
1350 except OSError as err: # timeout error
~/miniconda3/envs/bigfish_env/lib/python3.6/http/client.py in request(self, method, url, body, headers, encode_chunked)
1286 """Send a complete request to the server."""
-> 1287 self._send_request(method, url, body, headers, encode_chunked)
1288
~/miniconda3/envs/bigfish_env/lib/python3.6/http/client.py in _send_request(self, method, url, body, headers, encode_chunked)
1332 body = _encode(body, 'body')
-> 1333 self.endheaders(body, encode_chunked=encode_chunked)
1334
~/miniconda3/envs/bigfish_env/lib/python3.6/http/client.py in endheaders(self, message_body, encode_chunked)
1281 raise CannotSendHeader()
-> 1282 self._send_output(message_body, encode_chunked=encode_chunked)
1283
~/miniconda3/envs/bigfish_env/lib/python3.6/http/client.py in _send_output(self, message_body, encode_chunked)
1041 del self._buffer[:]
-> 1042 self.send(msg)
1043
~/miniconda3/envs/bigfish_env/lib/python3.6/http/client.py in send(self, data)
979 if self.auto_open:
--> 980 self.connect()
981 else:
~/miniconda3/envs/bigfish_env/lib/python3.6/http/client.py in connect(self)
1439
-> 1440 super().connect()
1441
~/miniconda3/envs/bigfish_env/lib/python3.6/http/client.py in connect(self)
951 self.sock = self._create_connection(
--> 952 (self.host,self.port), self.timeout, self.source_address)
953 self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
~/miniconda3/envs/bigfish_env/lib/python3.6/socket.py in create_connection(address, timeout, source_address)
723 if err is not None:
--> 724 raise err
725 else:
~/miniconda3/envs/bigfish_env/lib/python3.6/socket.py in create_connection(address, timeout, source_address)
712 sock.bind(source_address)
--> 713 sock.connect(sa)
714 # Break explicitly a reference cycle
OSError: [Errno 113] No route to host
During handling of the above exception, another exception occurred:
URLError Traceback (most recent call last)
<ipython-input-3-b4ffa3482168> in <module>
1 # load pretrained model
----> 2 model_nuc = segmentation.unet_3_classes_nuc()
3 model_nuc.summary()
~/miniconda3/envs/bigfish_env/lib/python3.6/site-packages/bigfish/segmentation/nuc_segmentation.py in unet_3_classes_nuc()
34
35 # load model
---> 36 model = dl.load_pretrained_model("nuc", "3_classes")
37
38 return model
~/miniconda3/envs/bigfish_env/lib/python3.6/site-packages/bigfish/deep_learning/models_segmentation.py in load_pretrained_model(channel, model_name)
63
64 # load weights
---> 65 path_pretrained_directory = check_pretrained_weights(channel, model_name)
66 path_checkpoint = os.path.join(
67 path_pretrained_directory, "checkpoint")
~/miniconda3/envs/bigfish_env/lib/python3.6/site-packages/bigfish/deep_learning/models_segmentation.py in check_pretrained_weights(channel, model_name)
181 stack.load_and_save_url(
182 remote_url=url_zip_file,
--> 183 directory=path_weights_directory)
184
185 # unzip
~/miniconda3/envs/bigfish_env/lib/python3.6/site-packages/bigfish/stack/utils.py in load_and_save_url(remote_url, directory, filename)
338
339 # download and save data
--> 340 urlretrieve(remote_url, path)
341
342 return path
~/miniconda3/envs/bigfish_env/lib/python3.6/urllib/request.py in urlretrieve(url, filename, reporthook, data)
246 url_type, path = splittype(url)
247
--> 248 with contextlib.closing(urlopen(url, data)) as fp:
249 headers = fp.info()
250
~/miniconda3/envs/bigfish_env/lib/python3.6/urllib/request.py in urlopen(url, data, timeout, cafile, capath, cadefault, context)
221 else:
222 opener = _opener
--> 223 return opener.open(url, data, timeout)
224
225 def install_opener(opener):
~/miniconda3/envs/bigfish_env/lib/python3.6/urllib/request.py in open(self, fullurl, data, timeout)
524 req = meth(req)
525
--> 526 response = self._open(req, data)
527
528 # post-process response
~/miniconda3/envs/bigfish_env/lib/python3.6/urllib/request.py in _open(self, req, data)
542 protocol = req.type
543 result = self._call_chain(self.handle_open, protocol, protocol +
--> 544 '_open', req)
545 if result:
546 return result
~/miniconda3/envs/bigfish_env/lib/python3.6/urllib/request.py in _call_chain(self, chain, kind, meth_name, *args)
502 for handler in handlers:
503 func = getattr(handler, meth_name)
--> 504 result = func(*args)
505 if result is not None:
506 return result
~/miniconda3/envs/bigfish_env/lib/python3.6/urllib/request.py in https_open(self, req)
1390 def https_open(self, req):
1391 return self.do_open(http.client.HTTPSConnection, req,
-> 1392 context=self._context, check_hostname=self._check_hostname)
1393
1394 https_request = AbstractHTTPHandler.do_request_
~/miniconda3/envs/bigfish_env/lib/python3.6/urllib/request.py in do_open(self, http_class, req, **http_conn_args)
1349 encode_chunked=req.has_header('Transfer-encoding'))
1350 except OSError as err: # timeout error
-> 1351 raise URLError(err)
1352 r = h.getresponse()
1353 except:
URLError: <urlopen error [Errno 113] No route to host>
The text was updated successfully, but these errors were encountered:
I have been slowly working through the example notebooks - thank you so much for providing them first of all - they are immensely helpful!
I am trying to do the segmentation notebook, however, I am stuck on the UNET segmentation. (I also wanted to ask whether there is an example notebook for cellpose segmentation within big-fish, since I believe this might be more appropriate for my case?)
My issue is the following:
I get the following error which seems to me as if the models require loading and internet connection? I run the jupyter notebooks on our computing cluster which has no access to internet - the no route to host error is now quite familiar to me. I haven`t been able to figure out how to circumvent this, or how to download the models potentially in the terminal where I do have access to the internet and load them locally within the jupyter notebook.
The text was updated successfully, but these errors were encountered: