-
Optical flow fields -
Person trajectories (up to 1451) -
Dense pixel trajectories
@INPROCEEDINGS{TUBCrowdFlow2018, AUTHOR = {Gregory Schr{\"o}der and Tobias Senst and Erik Bochinski and Thomas Sikora}, TITLE = {Optical Flow Dataset and Benchmark for Visual Crowd Analysis}, BOOKTITLE = {IEEE International Conference on Advanced Video and Signals-based Surveillance}, YEAR = {2018}, }
sudo apt-get install unrar unrar x TUBCrowdFlow
sudo apt-get install python3-dev python3-virtualenv virtualenv
virtualenv -p python3 crowdflow_env source crowdflow_env/bin/activate pip3 install numpy progressbar2 opencv-contrib-python
-
create a new directory in the /TUBCrowdFlow/estimate directory. -
compute flow fields and save them in .flo fileformat with the structure given in by the /TUBCrowdFlow/images directory. For example optical flow results from the image pair /TUBCrowdFlow/images/IM01/frame_0000.png and /TUBCrowdFlow/images/IM01/frame_0001.png must be stored as `/estimate/[mymethod]/images/IM01/frame_0000.flo -
run opticalflow_evaluate.py to compute EPE and R2 short-term metrics. -
run trajectory_evaluate.py to compute tracking accuracy long-term metrics.
source crowdflow_env/bin/activate python3 opticalflow_estimate.py TUBCrowdFlow/ dual farneback plk
source crowdflow_env/bin/activate python3 opticalflow_evaluate.py TUBCrowdFlow/ dual plk farneback
source crowdflow_env/bin/activate python3 trajectory_evaluate.py TUBCrowdFlow/ dual plk farneback
|
|
|
|
|
|
|
|
|
|
|
|
|
|
||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
---|---|---|---|---|---|---|---|---|---|---|---|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
---|---|---|---|---|---|---|---|---|---|---|---|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@inproceedings {Bailer2015, title = {Flow Fields: Dense Correspondence Fields for Highly Accurate Large Displacement Optical Flow Estimation}, author={Bailer, C. and Taetz, B. and Stricker, D.}, booktitle = {International Conference on Computer Vision}, pages={4015--4023}, year = {2015} }
@inproceedings{Hu2017, title={Robust interpolation of correspondences for large displacement optical flow}, author={Hu, Y. and Li, Y. and Song, R.}, booktitle={Conference on Computer Vision and Pattern Recognition}, pages={4791--4799}, year={2017}, }
@article{Li2018, author={Y. Li and Y. Hu and R. Song and P. Rao and Y. Wang}, journal={IEEE Transactions on Circuits and Systems for Video Technology}, title={Coarse-to-Fine PatchMatch for Dense Correspondence}, year={2018}, volume={28}, number={9}, pages={2233-2245}, }
@inproceedings{Weinzaepfel2013, AUTHOR = {Weinzaepfel, Philippe and Revaud, Jerome and Harchaoui, Zaid and Schmid, Cordelia}, TITLE = {{DeepFlow: Large displacement optical flow with deep matching}}, BOOKTITLE = {{Intenational Conference on Computer Vision }}, YEAR = {2013}, }
@inproceedings{Geistert2016, AUTHOR = {Jonas Geistert and Tobias Senst and Thomas Sikora}, TITLE = {Robust Local Optical Flow: Dense Motion Vector Field Interpolation}, BOOKTITLE = {Picture Coding Symposium}, YEAR = {2016}, PAGES = {1--5}, }
@inproceedings{Kroeger2016, Author = {Till Kroeger and Radu Timofte and Dengxin Dai and Luc Van Gool}, Title = {Fast Optical Flow using Dense Inverse Search}, Booktitle = {European Conference on Computer Vision }, Year = {2016} }
@inproceedings{Farneback2003, Author = {Gunnar Farneb{\"a}ck}, Title = {Two-Frame Motion Estimation Based on Polynomial Expansion}, Booktitle = {Proceedings of the 13th Scandinavian Conference on Image Analysis}, Pages = {363--370}, Year = {2003}, }
@TECHREPORT{Bouguet2000, author = {J.-Y. Bouguet}, title = {Pyramidal Implementation of the Lucas Kanade Feature Tracker}, institution = {Intel Corporation Microprocessor Research Lab}, year = {2000}, type = {Technical {R}eport }, publisher = {Intel Corporation Microprocessor Research Labs}, timestamp = {2013.04.03} }
@article{IDREES2014, title = "Tracking in dense crowds using prominence and neighborhood motion concurrence", journal = "Image and Vision Computing", volume = "32", number = "1", pages = "14 - 26", year = "2014", author = "Haroon Idrees and Nolan Warner and Mubarak Shah", }