We have upload their GitHub Repo and our data sets to the filecloud of the our university. The following link should open a ZIP-file, which you could download:
environment.yml to create a conda environment for GANmapper conda env create -f environment.yml
conda activate ganmapper_project
python predict.py --dataroot datasets/test/LA/Source --checkpoints_dir checkpoints/Exp3 --name LA
# or without a gpu
python predict.py --dataroot datasets/test/LA/Source --checkpoints_dir checkpoints/Exp3 --name LA --gpu_ids -1
# locally
pip install tqdm
# colab
!pip install torch
!pip install numpy
!pip install Pillow
!pip install torchvision
!pip install dominate
!pip install visdom
# cd to your ganmapper_project folder
conda activate ganmapper_project
# first experiment
python train.py --dataroot ./datasets/exp_high/ --name exp_high --model pix2pix --direction AtoB --crop_size 256 --load_size 260 --n_epochs 100 --n_epochs_decay 100 --netG resnet_9blocks
# second experiment
python train.py --dataroot ./datasets/exp_low/ --name exp_low --model pix2pix --direction AtoB --crop_size 256 --load_size 260 --n_epochs 100 --n_epochs_decay 100 --netG resnet_9blocks
# third experiment
python train.py --dataroot ./datasets/exp_medium/ --name exp_medium --model pix2pix --direction AtoB --crop_size 256 --load_size 260 --n_epochs 100 --n_epochs_decay 100 --netG resnet_9blocks