Skip to content

Commit

Permalink
Bug fixes, lib rename, hubconf prepared
Browse files Browse the repository at this point in the history
  • Loading branch information
andreaconti committed Nov 8, 2022
1 parent 94cc740 commit 9b777f5
Show file tree
Hide file tree
Showing 41 changed files with 704 additions and 59 deletions.
46 changes: 45 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
_[Matteo Poggi](https://mattpoggi.github.io/)\*, [Andrea Conti](https://andreaconti.github.io/)\*, [Stefano Mattoccia](http://vision.deis.unibo.it/~smatt/Site/Home.html) *joint first authorship_


[[arxiv]]()
[[arxiv]](https://arxiv.org/pdf/2210.11467v1.pdf)
[[project page]](https://andreaconti.github.io/projects/multiview_guided_multiview_stereo/)

This is the official source code of Multi-View Guided Multi-View Stereo presented at [IEEE/RSJ International Conference on Intelligent Robots and Systems](https://iros2022.org/)
Expand All @@ -20,6 +20,50 @@ This is the official source code of Multi-View Guided Multi-View Stereo presente
}
```

## Load pretrained models and evaluate

We release many of the mvs networks tested in the paper trained on Blended-MVG or on Blended-MVG and fine-tuned on DTU, with and without sparse depth points. To load these models can be simply used the `torch.hub` API.

```python
model = torch.hub.load(
"andreaconti/multi-view-guided-multi-view-stereo",
"mvsnet", # mvsnet | ucsnet | d2hc_rmvsnet | patchmatchnet | cas_mvsnet
pretrained=True,
dataset="blended_mvg", # blended_mvg | dtu_yao_blended_mvg
hints="not_guided", # mvguided_filtered | not_guided | guided | mvguided
)
```

Once loaded each model have the same following interface, moreover each pretrained models provides its training parameters under the attribute `train_params`.

```python
depth = model(
images, # B x N x 3 x H x W
intrinsics, # B x N x 3 x 3
extrinsics, # B x N x 4 x 4
depth_values, # B x D (128 usually)
hints, # B x 1 x H x W (optional)
)
```

Finally, we provide also an interface over the datasets used as follows. In this case is required Pytorch Lightning as dependency and the dataset must be stored locally.

```python
dm = torch.hub.load(
"andreaconti/multi-view-guided-multi-view-stereo",
"blended-mvg", # blended_mvg | blended_mvs | dtu
root="data/blended-mvg",
hints="not_guided", # mvguided_filtered | not_guided | guided | mvguided
hints_density=0.03,
dm.prepare_data()
dm.setup()
dl = dm.train_dataloader()
)

```

In [results.ipynb](/~https://github.com/andreaconti/multi-view-guided-multi-view/blob/main/results.ipynb) there is an example of how to reproduce some of the results showed in the paper through the `torch.hub` API.

## Installation

Install the dependencies using Conda or [Mamba](/~https://github.com/mamba-org/mamba):
Expand Down
10 changes: 5 additions & 5 deletions eval.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,11 @@
from torch.utils.data import DataLoader
from tqdm import tqdm

import lib.models as models
from lib.datasets import find_dataset_def, find_scans
from lib.datasets.dtu_utils import read_pfm, save_pfm
from lib.datasets.sample_preprocess import MVSSampleTransform
from lib.utils import *
import guided_mvs_lib.models as models
from guided_mvs_lib.datasets import find_dataset_def, find_scans
from guided_mvs_lib.datasets.dtu_utils import read_pfm, save_pfm
from guided_mvs_lib.datasets.sample_preprocess import MVSSampleTransform
from guided_mvs_lib.utils import *


def main():
Expand Down
1 change: 1 addition & 0 deletions guided_mvs_lib/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
__version__ = "0.1.0"
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ def __init__(
name: Literal["dtu_yao", "blended_mvs", "blended_mvg"],
# args for the dataloader
batch_size: int = 1,
num_workers: int = multiprocessing.cpu_count() // 2,
# args for the dataset
**kwargs,
):
Expand All @@ -27,6 +28,7 @@ def __init__(

# dataloader args
self.batch_size = batch_size
self.num_workers = num_workers

def setup(self, stage: Optional[str] = None):
if stage in ("fit", None):
Expand All @@ -40,23 +42,23 @@ def train_dataloader(self):
self.mvs_train,
batch_size=self.batch_size,
shuffle=True,
num_workers=multiprocessing.cpu_count() // 2,
num_workers=self.num_workers,
)

def val_dataloader(self):
return DataLoader(
self.mvs_val,
batch_size=1,
shuffle=False,
num_workers=multiprocessing.cpu_count() // 2,
num_workers=self.num_workers,
)

def test_dataloader(self):
return DataLoader(
self.mvs_test,
batch_size=1,
shuffle=False,
num_workers=multiprocessing.cpu_count() // 2,
num_workers=self.num_workers,
)


Expand Down
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@
from plyfile import PlyData
from torch.utils.data import Dataset

import lib.datasets.blended_mvg_utils as mvg_utils
import lib.datasets.blended_mvs_utils as mvs_utils
import lib.datasets.dtu_utils as dtu_utils
import guided_mvs_lib.datasets.blended_mvg_utils as mvg_utils
import guided_mvs_lib.datasets.blended_mvs_utils as mvs_utils
import guided_mvs_lib.datasets.dtu_utils as dtu_utils


class MVSSample(TypedDict):
Expand All @@ -39,7 +39,7 @@ def _identity_fn(x: MVSSample) -> Dict:
class MVSDataset(Dataset):
def __init__(
self,
name: Literal["dtu_yao", "blended_mvs", "blended_mvg", "eth3d"],
name: Literal["dtu_yao", "blended_mvs", "blended_mvg"],
datapath: str,
mode: Literal["train", "val", "test"],
nviews: int = 5,
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import torchvision.transforms.functional as F
from PIL import Image

from lib.datasets.dtu_blended_mvs import MVSSample
from guided_mvs_lib.datasets.dtu_blended_mvs import MVSSample

__all__ = ["MVSSampleTransform"]

Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Loading

0 comments on commit 9b777f5

Please sign in to comment.