diff options
| author | StevenLiuWen <liuwen@shanghaitech.edu.cn> | 2018-03-13 10:00:48 -0400 |
|---|---|---|
| committer | StevenLiuWen <liuwen@shanghaitech.edu.cn> | 2018-03-13 10:00:48 -0400 |
| commit | c6c714d2abc5c36fed7e710f833c86db16d42b78 (patch) | |
| tree | 91e0575c6ba30a861179aef5646e3d8ec07b5d48 | |
| parent | ce291c80235a8cc115b7699701943f14194e2af3 (diff) | |
update
| -rw-r--r-- | Codes/checkpoints/download_pretrains.sh | 13 | ||||
| -rw-r--r-- | Data/avenue.sh | 13 | ||||
| -rw-r--r-- | Data/ped1.sh | 14 | ||||
| -rw-r--r-- | Data/ped2.sh | 13 | ||||
| -rw-r--r-- | Data/shanghaitech.sh | 13 | ||||
| -rw-r--r-- | README.md | 6 |
6 files changed, 67 insertions, 5 deletions
diff --git a/Codes/checkpoints/download_pretrains.sh b/Codes/checkpoints/download_pretrains.sh index d315f6b..89ed9f4 100644 --- a/Codes/checkpoints/download_pretrains.sh +++ b/Codes/checkpoints/download_pretrains.sh @@ -2,8 +2,19 @@ echo "Downloading trained models on ped1, ped2 and avenue datasets ....." -wget "https://ofhz9a.bn.files.1drv.com/y4mdc9K-lh3tfqBXiG-rSR04JARM20at-t2AtTo-7fUC-fadMB_x255o35v0J-YV4bnMW9m9XscVOKkITHI2bZLrgxZJJKXI4QEVDsi61KvsLQxI42elYWmm01F2kjI94onmbRrFYai7CkVNUspBgscY2vvEfd2c3qE2A_bcTW-Cp_6hBKpPEQClmwlT2QqTy-UwuzCmjyFfOrHqKeGkqtadQ/pretrains.tar.gz" +wget "https://ofhz9a.bn.files.1drv.com/y4m5lC_SnkDiTcKjKEiue7uKKHX_jM7LojjlsjpurNHC8gkOj0MjgqdKrj6YJwLNFMAb649j07rheaBeS-B8JmYwGc3wy6Zb7T0ICYBzz9PdheTGxHWGsLCxJ7MpaA4Rj6V0KmtAyoUYbdeNQVWEAPAZtVn1ikrdslLVVvKB1doyWRaTnIKjCiXIybbXG-6VtZ4uw10H_PrBFTEq6cBeqr2CQ/pretrains.tar.gz?download&psid=1" +mv "pretrains.tar.gz?download&psid=1" pretrains.tar.gz tar -xvf pretrains.tar.gz rm pretrains.tar.gz echo "Download pretrains successfully..." + +ehco "If encounters any ERRORS(I guess the download link in shell script is not permanent), +please manually download pretrains.tar.gz from https://onedrive.live.com/?authkey=%21AMqh2fTSemfrokE&id=3705E349C336415F%215109&cid=3705E349C336415F +and run the following commands: + +tar -xvf pretrains.tar.gz +rm pretrains.tar.gz + +make sure the pretrains is under the director of Codes/checkpoints. +"
\ No newline at end of file diff --git a/Data/avenue.sh b/Data/avenue.sh index b206960..798966c 100644 --- a/Data/avenue.sh +++ b/Data/avenue.sh @@ -2,8 +2,19 @@ echo "Downloading CUHK-Avenue dataset....." -wget "https://ofhz9a.bn.files.1drv.com/y4mFu9sFG5p90urg6SmkVLwfpZjwIAfa32TinJLVv-11ygKXKXlDyA96nHpXWTsxT52m8RxlR5kFp03uU-_AmepLnmcLW4trLTu9IJejBuVahvNlbTbD7fA4fvq1fzdDL9s83uOX5JFKwN8P2e3X7TjNbQbKl0_HNU5FzIQy4QM31t-WDBMz29pfH2Ens5jXP0-bYDBVxUdNQnSSX5T9Dk_ew/avenue.tar.gz" +wget "https://ofhz9a.bn.files.1drv.com/y4m4fpDJfxvClUUg4yfbH22DpWmnN8smMTSoK0tPyEB2VUQmsD0oUkURguUYhQABYcDkdXvpseAe2G4gxjdnssPWERMbyGA8z6tk-pU6V4fNvRjZBdH3P6joeAEbOPXcK0ZhQCRqDVROdbZQ0vMZjoXiRf2Kvs_o175MW1xLKvfOmIMcw3ZhtF6iOmvIvMfmP8RcZJNbp8CSOwySQgONpkODQ/avenue.tar.gz?download&psid=1" +mv "avenue.tar.gz?download&psid=1" avenue.tar.gz tar -xvf avenue.tar.gz rm avenue.tar.gz echo "Download CUHK-Avenue successfully..." + +ehco "If encounters any ERRORS(I guess the download link in shell script is not permanent), +please manually download avenue.tar.gz from https://onedrive.live.com/?authkey=%21AMqh2fTSemfrokE&id=3705E349C336415F%215109&cid=3705E349C336415F +and run the following commands: + +tar -xvf avenue.tar.gz +rm avenue.tar.gz + +make sure the avenue dataset is under the director of Data. +" diff --git a/Data/ped1.sh b/Data/ped1.sh index 30d2b87..683ff19 100644 --- a/Data/ped1.sh +++ b/Data/ped1.sh @@ -2,8 +2,20 @@ echo "Downloading UCSD-Ped1 dataset....." -wget "https://ofhz9a.bn.files.1drv.com/y4mYp-s_RSs8flFeaThqT-Zwa_mSIgALD0g5M_ioQF20lFFnLYjYD8aaPR7pqVav4U_xxvRxMERy4Z0o_Fw0T8ysEHGwmaKuz135ajAwofSunZZpNV4e2E_IHW3mXwEy8-NMK7OF9U-Ntm1Pe9bxG-OH9acwL9Qg7EMa4vx-yGF_JRU3pTg-BPkIpuQaV8jhyAldwniIn-F1dGbiTLw08RZPg/ped1.tar.gz" +wget "https://ofhz9a.bn.files.1drv.com/y4mP5HrUYe3m0KnhIA3KbOaqlFEKpvCmqepz-C9UDoIUgO4i0WuW9Dm-J-98qYXivCdniC-_mYHq9r4t25im6XogBz-INqqktYE2Rc38vkKKwM1iFZ_uWxoGon4QniumO2gNLscP9N9wNw6fWD8GqIYqOUVe_UO9svbF0RpeRpAbSe82uHJ9qqmN2q-mZ9prbrScwsolPEv_IxprXqgjG5Plw/ped1.tar.gz?download&psid=1" +mv "ped1.tar.gz?download&psid=1" ped1.tar.gz tar -xvf ped1.tar.gz rm ped1.tar.gz echo "Download UCSD-Ped1 successfully..." + +ehco "If encounters any ERRORS(I guess the download link in shell script is not permanent), +please manually download ped1.tar.gz from https://onedrive.live.com/?authkey=%21AMqh2fTSemfrokE&id=3705E349C336415F%215109&cid=3705E349C336415F +and run the following commands: + +tar -xvf ped1.tar.gz +rm ped1.tar.gz + +make sure the ped1 dataset is under the director of Data. +" + diff --git a/Data/ped2.sh b/Data/ped2.sh index 23d9991..f96f795 100644 --- a/Data/ped2.sh +++ b/Data/ped2.sh @@ -2,8 +2,19 @@ echo "Downloading UCSD-Ped2 dataset....." -wget "https://ofhz9a.bn.files.1drv.com/y4mtXuyxi4Pb4MMZvlhLAJG34RmxFnpbCNPe9a9RK-Vl9QOlBWv3AEhXvGJVFb0RDFD_IOndEUNgRdlgTB5bGnJAVKEkYnm2CLSwCBD0WyB8UCzc-rHKg6XO6hQcggNTu4S1PhvYKeMuqHPlwKoa5tK8FxJbYWdP4ZGjeWTWeKS2z0qIlACqGYnq5K-VqUk5R5PcnqiTZaXQaBgKvteBjDXjA/ped2.tar.gz" +wget "https://ofhz9a.bn.files.1drv.com/y4mFzDLdy1ZKsJawXtABkPGQsYhoZzeVYofrv5cKtvNS85CyUJcqwL0-P_PnzNvwrfEkIlQA9mQhld7CK9ohIa_lFvovPgNOZ3Z7BVnY-0sKA97Bv3OrnSU2Vkh9fl5ceDBo8PuCVoc_XHJN03Zj-v8q31cswu9RliBzujx_mLW4PxPi0cxui2j_n9xFp-S1Px_6H5a4_SGQBr_8EP8qsz3fA/ped2.tar.gz?download&psid=1" +mv "ped2.tar.gz?download&psid=1" ped2.tar.gz tar -xvf ped2.tar.gz rm ped2.tar.gz echo "Download UCSD-Ped2 successfully..." + +ehco "If encounters any ERRORS(I guess the download link in shell script is not permanent), +please manually download ped2.tar.gz from https://onedrive.live.com/?authkey=%21AMqh2fTSemfrokE&id=3705E349C336415F%215109&cid=3705E349C336415F +and run the following commands: + +tar -xvf ped2.tar.gz +rm ped2.tar.gz + +make sure the ped2 dataset is under the director of Data. +" diff --git a/Data/shanghaitech.sh b/Data/shanghaitech.sh index 669f6a0..d3f6f32 100644 --- a/Data/shanghaitech.sh +++ b/Data/shanghaitech.sh @@ -1,8 +1,19 @@ #!/bin/bash echo "download ShanghaiTech-Campus dataset....." -wget "https://ofhz9a.bn.files.1drv.com/y4mb9ehfPcEc2QJOWHHdhumdqt5jxTobXbtJQY6Q-5HWfoTWSSpDdFEN7Mkx861al_K9Tt3zQdzEIS7i3gRUVi0lfOsKrLaKV2sV6qHaTKmWPMUvOfMhMh2lScycBEmp4NasUkKJR2eftgbZ5XzHui03_LVL875RK1Z5sTtPADPD2TuPwHG8_hGhtfQOtJUWbqMvh0XmGZq-qYP9YIQKbe9Lw/shanghaitech.tar.gz" +wget "https://ofhz9a.bn.files.1drv.com/y4mZ-bxF_FckWxjvJKGdcIkCr4PZOK3JQIbVqcv_1IE8QnAvQzlCnIqAWiWI6l_NVpBcfizN_6EOYc01NMPCiEj_YCFOyBVK1ZjczoTHClYXry87x5DrzcimwVXttkPtHBytzj43XKWFoOIFyZqpJDUL5o5GoZnfp5g3i1tthSsuIy4YnMMOup1tebJ8jb_Kqb09kksykw2YE-C-0pD5ovsVQ/shanghaitech.tar.gz?download&psid=1" +mv "shanghaitech.tar.gz?download&psid=1" shanghaitech.tar.gz tar -xvf shanghaitech.tar.gz rm shanghaitech.tar.gz echo "download ShanghaiTech-Campus successfully..." + +ehco "If encounters any ERRORS(I guess the download link in shell script is not permanent), +please manually download shanghaitech.tar.gz from https://onedrive.live.com/?authkey=%21AMqh2fTSemfrokE&id=3705E349C336415F%215109&cid=3705E349C336415F +and run the following commands: + +tar -xvf shanghaitech.tar.gz +rm shanghaitech.tar.gz + +make sure the shanghaitech dataset is under the director of Data. +"
\ No newline at end of file @@ -38,12 +38,18 @@ cd Data ./shanghaitech.sh ``` +If the download shell does not work (I guess the download link in shell script is not permanent), please manually download all datasets from [ped1.tar.gz, ped2.tar.gz, avenue.tar.gz and shanghaitech.tar.gz](https://onedrive.live.com/?authkey=%21AMqh2fTSemfrokE&id=3705E349C336415F%215109&cid=3705E349C336415F) +and tar each tar.gz file, and move them in to **Data** folder. + ## 3. Testing on saved models * Download the trained models (There are the pretrained FlowNet and the trained models of the papers, such as ped1, ped2 and avenue). ```shell cd checkpoints ./download_pretrains.sh ``` +If the download shell does not work (I guess the download link in shell script is not permanent), please manually download pretrained models from [pretrains.tar.gz](https://onedrive.live.com/?authkey=%21AMqh2fTSemfrokE&id=3705E349C336415F%215109&cid=3705E349C336415F) +and tar -xvf pretrains.tar.gz, and move pretrains into **Codes/checkpoints** folder. + * Running the sript (as ped2 and avenue datasets for examples) and cd into **Codes** folder at first. ```shell python inference.py --dataset ped2 \ |
