Skip to content

Commit

Permalink
Merge pull request #190 from UtrechtUniversity/develop
Browse files Browse the repository at this point in the history
New minor release
  • Loading branch information
chStaiger authored Jun 7, 2024
2 parents 1a948af + a286e9a commit 5c67d53
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 1 deletion.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ To simply upload or download data you do not need to write full python program,

### Beginners tutorials
- [Setup client configuration](tutorials/01-Setup-and-connect.ipynb)
- [iRODS Paths](tutorials/02-iRODS_paths.ipynb)
- [iRODS Paths](tutorials/02-iRODS-paths.ipynb)
- [Working with data](tutorials/03-Working-with-data.ipynb)
- [Metadata](tutorials/04-Metadata.ipynb)
- [Sharing data](tutorials/05-Data-Sharing.ipynb)
Expand Down
11 changes: 11 additions & 0 deletions ibridges/data_operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import irods.data_object
import irods.exception
import irods.keywords as kw
from irods import DEFAULT_CONNECTION_TIMEOUT
from tqdm import tqdm

from ibridges.path import CachedIrodsPath, IrodsPath
Expand Down Expand Up @@ -375,6 +376,15 @@ def perform_operations(session: Session, operations: dict, ignore_err: bool=Fals
down_sizes = [ipath.size for ipath, _ in operations["download"]]
pbar = tqdm(total=sum(up_sizes) + sum(down_sizes), unit="B", unit_scale=True, unit_divisor=1024)

# For large files, the checksum computation might take too long, which can result in a timeout.
# This is why we increase the time out from file sizes > 1 GB
# This might still result in a time out if your server is very busy or a potato.
max_size = max([*up_sizes, *down_sizes])
original_timeout = session.irods_session.pool.connection_timeout
if max_size > 1e9 and original_timeout == DEFAULT_CONNECTION_TIMEOUT:
session.irods_session.pool.connection_timeout = int(
DEFAULT_CONNECTION_TIMEOUT*(max_size/1e9)+0.5)

for col in operations["create_collection"]:
IrodsPath.create_collection(session, col)
for curdir in operations["create_dir"]:
Expand All @@ -394,6 +404,7 @@ def perform_operations(session: Session, operations: dict, ignore_err: bool=Fals
_obj_get(session, ipath, lpath, overwrite=True, ignore_err=ignore_err, options=options,
resc_name=resc_name)
pbar.update(size)
session.irods_session.pool.connection_timeout = original_timeout


def sync(session: Session,
Expand Down

0 comments on commit 5c67d53

Please sign in to comment.