OpenGM  2.3.x
Discrete Graphical Model Library
fusion_based_inf.hxx
Go to the documentation of this file.
1 #pragma once
2 #ifndef OPENGM_FUSION_BASED_INF_HXX
3 #define OPENGM_FUSION_BASED_INF_HXX
4 
5 #include <vector>
6 #include <string>
7 #include <iostream>
8 
9 #include "opengm/opengm.hxx"
13 
14 // Fusion Move Solver (they solve binary problems)
19 
20 #ifdef WITH_CPLEX
22 #endif
23 #ifdef WITH_QPBO
24 #include "QPBO.h"
27 #endif
28 #ifdef WITH_AD3
30 #endif
31 
32 #include <stdlib.h> /* srand, rand */
33 
34 
36 
37 // fusion move model generator
39 
40 
41 
42 
43 
44 
45 
46 
47 
48 namespace opengm
49 {
50 
51 
52 
53 
54 
55 
56 
57 
58 
59 
60 
61 namespace proposal_gen{
62 
63 
64 
65 
66  template<class GM, class ACC>
68  {
69  public:
70  typedef ACC AccumulationType;
71  typedef GM GraphicalModelType;
73  struct Parameter
74  {
76  };
77  AlphaExpansionGen(const GM &gm, const Parameter &param)
78  : gm_(gm),
79  param_(param),
80  currentAlpha_(0)
81  {
82  maxLabel_ =0;
83  for(size_t i=0; i<gm.numberOfVariables();++i){
84  if(gm.numberOfLabels(i)>maxLabel_){
85  maxLabel_ = gm.numberOfLabels(i);
86  }
87  }
88  }
89  void reset()
90  {
91  currentAlpha_ = 0;
92  }
93 
94  size_t defaultNumStopIt() {return maxLabel_;}
95 
96  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal)
97  {
98  for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi)
99  {
100  if (gm_.numberOfLabels(vi) > currentAlpha_ )
101  {
102  proposal[vi] = currentAlpha_;
103  }
104  else
105  {
106  proposal[vi] = current[vi];
107  }
108  }
109  ++currentAlpha_;
110  if(currentAlpha_>=maxLabel_){
111  currentAlpha_ = 0;
112  }
113  }
114  LabelType currentAlpha(){return currentAlpha_;}
115  private:
116  const GM &gm_;
117  Parameter param_;
118  LabelType maxLabel_;
119  LabelType currentAlpha_;
120  };
121 
122 
123 
124 
125 
126  template<class GM, class ACC>
127  class UpDownGen
128  {
129  public:
130  typedef ACC AccumulationType;
131  typedef GM GraphicalModelType;
133  struct Parameter
134  {
136  const std::string startDirection = std::string("up")
137  )
138  : startDirection_(startDirection)
139  {
140 
141  }
142  std::string startDirection_;
143  };
144  UpDownGen(const GM &gm, const Parameter &param)
145  : gm_(gm),
146  param_(param),
147  argBuffer_(gm.numberOfVariables(),0),
148  direction_(gm.numberOfVariables())
149  {
150  this->reset();
151  }
152  void reset()
153  {
154  if(param_.startDirection_== std::string("random")){
155  for(size_t i=0; i<gm_.numberOfVariables();++i){
156  direction_[i]=rand()%2 == 0 ? -1:1;
157  }
158  }
159  else if(param_.startDirection_== std::string("up")){
160  for(size_t i=0; i<gm_.numberOfVariables();++i){
161  direction_[i]=1;
162  }
163  }
164  else if(param_.startDirection_== std::string("down")){
165  for(size_t i=0; i<gm_.numberOfVariables();++i){
166  direction_[i]=-1;
167  }
168  }
169  else{
170  throw opengm::RuntimeError("wrong starting direction for UpDownGen");
171  }
172  }
173 
174  size_t defaultNumStopIt() {return 2;}
175 
176  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal)
177  {
178  for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi)
179  {
180  const size_t numL = gm_.numberOfLabels(vi);
181 
182  const LabelType ol = argBuffer_[vi];
183  const LabelType cl = current[vi];
184 
185  std::copy(current.begin(), current.end(), argBuffer_.begin());
186 
187  // flip direction?
188  if(ol == cl){
189  direction_[vi]*=-1;
190  }
191  const LabelType d = direction_[vi];
192  if(d==1){
193 
194  if(cl+1<numL){
195  proposal[vi] = cl +1;
196  }
197  else{
198  direction_[vi] = -1;
199  proposal[vi] = cl - 1 ;
200  }
201  }
202  else{
203  if(cl>=1){
204  proposal[vi] = cl - 1;
205  }
206  else{
207  direction_[vi] = 1;
208  proposal[vi] = cl + 1 ;
209  }
210  }
211  }
212  }
213  private:
214  const GM &gm_;
215  Parameter param_;
216  std::vector<LabelType> argBuffer_;
217  std::vector<LabelType> direction_;
218  std::vector<LabelType> jumpSize_;
219  };
220 
221 
222  template<class GM, class ACC>
224  {
225  public:
226  typedef ACC AccumulationType;
227  typedef GM GraphicalModelType;
229  struct Parameter
230  {
232  };
233  private:
234  static size_t getMaxLabel(const GM &gm){
235  size_t maxLabel = 0;
236  for(size_t i=0; i<gm.numberOfVariables();++i){
237  if(gm.numberOfLabels(i)>maxLabel ){
238  maxLabel = gm.numberOfLabels(i);
239  }
240  }
241  return maxLabel;
242  }
243  public:
244  AlphaBetaSwapGen(const GM &gm, const Parameter &param)
245  : gm_(gm),
246  param_(param),
247  maxLabel_(getMaxLabel(gm)),
248  abShape_(2, maxLabel_),
249  abWalker_(abShape_.begin(), 2)
250  {
251  // ++abWalker_;
252  }
253  void reset()
254  {
255  abWalker_.reset();
256  }
257 
258  size_t defaultNumStopIt() {return (maxLabel_*maxLabel_-maxLabel_)/2;}
259 
260  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal)
261  {
262  if( maxLabel_<2){
263  for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi)
264  proposal[vi] = current[vi];
265  }else{
266  ++abWalker_;
267  if(currentAlpha()+1 == maxLabel_ && currentBeta()+1== maxLabel_){
268  reset();
269  }
270  while (abWalker_.coordinateTuple()[0] == abWalker_.coordinateTuple()[1])
271  {
272  ++abWalker_;
273  }
274 
275  const LabelType alpha = abWalker_.coordinateTuple()[0];
276  const LabelType beta = abWalker_.coordinateTuple()[1];
277 
278  for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi)
279  {
280  if ( current[vi] == alpha && gm_.numberOfLabels(vi) > beta )
281  {
282  proposal[vi] = beta;
283  }
284  else if ( current[vi] == beta && gm_.numberOfLabels(vi) > alpha )
285  {
286  proposal[vi] = alpha;
287  }
288  else
289  {
290  proposal[vi] = current[vi];
291  }
292  }
293  }
294  }
295 
297  {
298  return abWalker_.coordinateTuple()[0];
299  }
301  {
302  return abWalker_.coordinateTuple()[1];
303  }
304  private:
305 
306  const GM &gm_;
307  Parameter param_;
308  LabelType maxLabel_;
309  std::vector<LabelType> abShape_;
310  ShapeWalker<typename std::vector<LabelType>::const_iterator> abWalker_;
311 
312  };
313 
314  template<class GM, class ACC>
315  class RandomGen
316  {
317  public:
318  typedef ACC AccumulationType;
319  typedef GM GraphicalModelType;
321  struct Parameter
322  {
324  };
325  RandomGen(const GM &gm, const Parameter &param)
326  : gm_(gm),
327  param_(param),
328  currentStep_(0)
329  {
330  }
331  void reset()
332  {
333  currentStep_ = 0;
334  }
335  size_t defaultNumStopIt() {return 10;}
336  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal)
337  {
338  for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi){
339  // draw label
340  opengm::RandomUniform<size_t> randomLabel(0, gm_.numberOfLabels(vi),currentStep_+vi);
341  proposal[vi] = randomLabel();
342  }
343  ++currentStep_;
344  }
345  private:
346  const GM &gm_;
347  Parameter param_;
348  LabelType currentStep_;
349  };
350 
351 
352  template<class GM, class ACC>
354  {
355  public:
356  typedef ACC AccumulationType;
357  typedef GM GraphicalModelType;
359  struct Parameter
360  {
362  };
363  Random2Gen(const GM &gm, const Parameter &param)
364  : gm_(gm),
365  param_(param),
366  currentStep_(0)
367  {
368  }
369  void reset()
370  {
371  currentStep_ = 0;
372  }
373  size_t defaultNumStopIt() {return 4;}
374  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal)
375  {
376  for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi){
377  // draw label
378  opengm::RandomUniform<size_t> randomLabel(0,3,currentStep_+vi);
379  proposal[vi] = std::min(randomLabel(),size_t(1));
380  }
381  ++currentStep_;
382  }
383  private:
384  const GM &gm_;
385  Parameter param_;
386  LabelType currentStep_;
387  };
388 
389 
390 
391  template<class GM, class ACC>
393  {
394  public:
395  typedef ACC AccumulationType;
396  typedef GM GraphicalModelType;
398  struct Parameter
399  {
401  };
402  RandomLFGen(const GM &gm, const Parameter &param)
403  : gm_(gm),
404  param_(param),
405  currentStep_(0)
406  {
407  }
408  void reset()
409  {
410  currentStep_ = 0;
411  }
412  size_t defaultNumStopIt() {return 10;}
413  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal)
414  {
415  for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi){
416  // draw label
417  opengm::RandomUniform<size_t> randomLabel(0, gm_.numberOfLabels(vi),currentStep_+vi);
418  proposal[vi] = randomLabel();
419  }
420  typename opengm::LazyFlipper<GM,ACC>::Parameter para(1,proposal.begin(),proposal.end());
421  opengm::LazyFlipper<GM,ACC> lf(gm_,para);
422  lf.infer();
423  lf.arg(proposal);
424  ++currentStep_;
425  }
426  private:
427  const GM &gm_;
428  Parameter param_;
429  LabelType currentStep_;
430  };
431 
432 
433  template<class GM, class ACC>
435  {
436  public:
437  typedef ACC AccumulationType;
438  typedef GM GraphicalModelType;
440  struct Parameter
441  {
442  Parameter(const float temp=1.0)
443  : temp_(temp){
444  }
445  float temp_;
446  };
447 
448  NonUniformRandomGen(const GM &gm, const Parameter &param)
449  : gm_(gm),
450  param_(param),
451  currentStep_(0),
452  randomGens_(gm.numberOfVariables())
453  {
454  std::vector<bool> hasUnary(gm.numberOfVariables(),false);
455 
456  for(IndexType fi=0; fi<gm_.numberOfFactors(); ++fi){
457 
458  if(gm_[fi].numberOfVariables()==1){
459 
460  const IndexType vi = gm_[fi].variableIndex(0);
461  const LabelType numLabels = gm_.numberOfLabels(vi);
462  std::vector<ValueType> weights(numLabels);
463  gm_[fi].copyValues(&weights[0]);
464  const ValueType minValue = *std::min_element(weights.begin(),weights.end());
465  for(LabelType l=0; l<numLabels; ++l){
466  weights[l]-= minValue;
467  }
468  for(LabelType l=0; l<numLabels; ++l){
469  //OPENGM_CHECK_OP(weights[l],>=,0.0, "NonUniformRandomGen allows only positive unaries");
470  weights[l]=std::exp(-1.0*param_.temp_*weights[l]);
471  }
472  randomGens_[vi]=GenType(weights.begin(),weights.end());
473  hasUnary[vi]=true;
474  }
475  }
476  for(IndexType vi=0 ;vi<gm_.numberOfVariables(); ++vi){
477  if(!hasUnary[vi]){
478  const LabelType numLabels = gm_.numberOfLabels(vi);
479  std::vector<ValueType> weights(numLabels,1.0);
480  randomGens_[vi]=GenType(weights.begin(),weights.end());
481  }
482  }
483 
484  }
485 
486  void reset()
487  {
488  currentStep_ = 0;
489  }
490 
491  size_t defaultNumStopIt() {
492  return 10;
493  }
494  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal)
495  {
496  for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi){
497  proposal[vi]=randomGens_[vi]();
498  }
499  ++currentStep_;
500  }
501  private:
502  const GM &gm_;
503  Parameter param_;
504  LabelType currentStep_;
505 
506  typedef RandomDiscreteWeighted<LabelType,ValueType> GenType;
507 
508  std::vector < RandomDiscreteWeighted<LabelType,ValueType> > randomGens_;
509  };
510 
511 
512  template<class GM, class ACC>
513  class BlurGen
514  {
515  public:
516  typedef ACC AccumulationType;
517  typedef GM GraphicalModelType;
519  struct Parameter
520  {
521  Parameter(double sigma = 20.0) : sigma_(sigma)
522  {
523  }
524  double sigma_;
525  };
526  BlurGen(const GM &gm, const Parameter &param)
527  : gm_(gm),
528  param_(param),
529  currentStep_(0)
530  {
531  const double pi = 3.1416;
532  const double oneOverSqrt2PiSigmaSquared = 1.0 / (std::sqrt(2.0 * pi) * param_.sigma_);
533  const double oneOverTwoSigmaSquared = 1.0 / (2.0* param_.sigma_ * param_.sigma_);
534  const size_t kradius = std::ceil(3*param_.sigma_);
535  kernel_.resize(2*kradius + 1);
536  double sum = 0;
537  for(double i = 0; i <= kradius ; ++i) {
538  double value = oneOverSqrt2PiSigmaSquared * std::exp(-(i*i)*oneOverTwoSigmaSquared);
539  kernel_[kradius+i] = value;
540  kernel_[kradius-i] = value;
541  sum += 2*value;
542  }
543  for(double i = 0; i <= kradius ; ++i) {
544  kernel_[kradius+i] /= sum;
545  kernel_[kradius-i] /= sum;
546  }
547 
548  size_t N = gm_.numberOfFactors(0);
549  for(size_t i=1; i<gm_.numberOfVariables(); ++i){
550  if(N==gm_.numberOfFactors(i)){
551  height_ = i+1;
552  break;
553  }
554  }
555 
556  width_ = gm_.numberOfVariables()/height_;
557 
558  OPENGM_ASSERT(height_*width_ == gm_.numberOfVariables());
559 
560  //Generate blured label
561  bluredLabel_.resize(gm_.numberOfVariables(),0);
562  std::vector<double> temp(gm_.numberOfVariables(),0.0);
563  std::vector<LabelType> localLabel(gm_.numberOfVariables(),0);
564  for (size_t i=0; i<gm_.numberOfVariables(); ++i){
565  for(typename GM::ConstFactorIterator it=gm_.factorsOfVariableBegin(i); it!=gm_.factorsOfVariableEnd(i);++it){
566  if(gm_[*it].numberOfVariables() == 1){
567  ValueType v;
568  ACC::neutral(v);
569  for(LabelType l=0; l<gm_.numberOfLabels(i); ++l){
570  if(ACC::bop(gm_[*it](&l),v)){
571  v=gm_[*it](&l);
572  localLabel[i]=l;
573  }
574  }
575  continue;
576  }
577  }
578  }
579  const int radius = (kernel_.size()-1)/2;
580  const int h = height_-1;
581  const int w = width_ -1;
582  for (int i = 0; i < height_; ++i) {
583  for (int j = 0; j < width_; ++j) {
584  double val = 0.0;
585  for (int k = 0; k < 2*radius+1; ++k) {
586  int i2 = std::min( h,std::max(0,i-radius+k));
587  val += kernel_[k] * localLabel[ind(i2,j)];
588  }
589  temp[ind(i,j)] = val;
590  }
591  }
592  for (int i = 0; i < height_; ++i) {
593  for (int j = 0; j < width_; ++j) {
594  double val = 0.0;
595  for (int k = 0; k < 2*radius+1; ++k) {
596  int j2 = std::min(w,std::max(0,i-radius+k));
597  val += kernel_[k] * temp[ind(i, j2)];
598  }
599  bluredLabel_[ind(i,j)] = std::min(double(gm_.numberOfLabels(ind(i,j))),(std::max(0.0,val)));
600  }
601  }
602  }
603 
604  void reset(){}
605  size_t defaultNumStopIt() {return 10;}
606 
607  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal)
608  {
609  if ((currentStep_ % 2) == 0){
610  for (int i = 0; i < height_; ++i) {
611  for (int j = 0; j < width_; ++j) {
612  const size_t var = ind(i,j);
613  opengm::RandomUniform<size_t> randomLabel(0, gm_.numberOfLabels(var),currentStep_+i+j);
614  proposal[var] = (LabelType)(randomLabel());
615  }
616  }
617  }else{
618  proposal.resize(gm_.numberOfVariables(),0.0);
619  opengm::RandomUniform<double> randomLabel(-param_.sigma_*1.5, param_.sigma_*1.5,currentStep_);
620  for(size_t i=0; i<proposal.size();++i){
621  proposal[i] = std::min(gm_.numberOfLabels(i), (LabelType)(std::max(0.0,bluredLabel_[i] + randomLabel())));
622  }
623  }
624  ++currentStep_;
625  }
626  private:
627  size_t ind(int i, int j){ return i+j*height_;}
628  const GM &gm_;
629  Parameter param_;
630  size_t height_;
631  size_t width_;
632  std::vector<double> kernel_;
633  std::vector<double> bluredLabel_;
634  LabelType currentStep_;
635  };
636 
637 
638  template<class GM, class ACC>
640  {
641  public:
642  typedef ACC AccumulationType;
643  typedef GM GraphicalModelType;
645  struct Parameter
646  {
647  Parameter(double sigma = 20.0, bool useLocalMargs = false, double temp=1) : sigma_(sigma), useLocalMargs_(useLocalMargs), temp_(temp)
648  {
649  }
650  double sigma_;
652  double temp_;
653 
654  };
655  EnergyBlurGen(const GM &gm, const Parameter &param)
656  : gm_(gm),
657  param_(param),
658  currentStep_(0)
659  {
660  const double pi = 3.1416;
661  const double oneOverSqrt2PiSigmaSquared = 1.0 / (std::sqrt(2.0 * pi) * param_.sigma_);
662  const double oneOverTwoSigmaSquared = 1.0 / (2.0* param_.sigma_ * param_.sigma_);
663  const size_t kradius = std::ceil(3*param_.sigma_);
664  std::vector<double> kernel;
665  kernel.resize(2*kradius + 1);
666  double sum = 0;
667  for(double i = 0; i <= kradius ; ++i) {
668  double value = oneOverSqrt2PiSigmaSquared * std::exp(-(i*i)*oneOverTwoSigmaSquared);
669  kernel[kradius+i] = value;
670  kernel[kradius-i] = value;
671  sum += 2*value;
672  }
673  for(double i = 0; i <= kradius ; ++i) {
674  kernel[kradius+i] /= sum;
675  kernel[kradius-i] /= sum;
676  }
677 
678  size_t N = gm_.numberOfFactors(0);
679  for(size_t i=1; i<gm_.numberOfVariables(); ++i){
680  if(N==gm_.numberOfFactors(i)){
681  height_ = i+1;
682  break;
683  }
684  }
685 
686  width_ = gm_.numberOfVariables()/height_;
687 
688  OPENGM_ASSERT(height_*width_ == gm_.numberOfVariables());
689 
690  //Generate energy-blured label
691  size_t numLabels =gm_.numberOfLabels(0);
692  std::vector<double> temp(gm_.numberOfVariables(),0.0);
693  std::vector<double> bluredEnergy(gm_.numberOfVariables(),1000000000000.0);
694  std::vector<double> bluredOpt(gm_.numberOfVariables(),0);
695  std::vector<double> energy(gm_.numberOfVariables(),0.0);
696  std::vector<IndexType> unaries(gm_.numberOfVariables());
697  std::vector<std::vector<double> > margs;;
698  if(param_.useLocalMargs_)
699  margs.resize(gm_.numberOfVariables(),std::vector<double>(numLabels));
700 
701  for (size_t i=0; i<gm_.numberOfVariables(); ++i){
702  bool found = false;
703  for(typename GM::ConstFactorIterator it=gm_.factorsOfVariableBegin(i); it!=gm_.factorsOfVariableEnd(i);++it){
704  if(gm_[*it].numberOfVariables() == 1){
705  unaries[i] = *it;
706  found = true;
707  if(gm_[*it].numberOfLabels(0) != numLabels)
708  throw RuntimeError("number of labels are not equal for all variables");
709  continue;
710  }
711  }
712  if(!found)
713  throw RuntimeError("missing unary");
714  }
715 
716 
717  for(size_t l=0; l<numLabels; ++l){
718  for (int i = 0; i < height_; ++i) {
719  for (int j = 0; j < width_; ++j) {
720  const size_t var = ind(i, j);
721  energy[var] =gm_[unaries[ind(i, j)]](&l);
722  }
723  }
724 
725  const int radius = (kernel.size()-1)/2;
726  const int h = height_-1;
727  const int w = width_ -1;
728  for (int i = 0; i < height_; ++i) {
729  for (int j = 0; j < width_; ++j) {
730  double val = 0.0;
731  const size_t var = ind(i, j);
732  for (int k = 0; k < 2*radius+1; ++k) {
733  int i2 = std::min( h,std::max(0,i-radius+k));
734  val += kernel[k] * energy[ind(i2,j)];
735  }
736  temp[var] = val;
737  }
738  }
739  for (int i = 0; i < height_; ++i) {
740  for (int j = 0; j < width_; ++j) {
741  double val = 0.0;
742  const size_t var = ind(i, j);
743  for (int k = 0; k < 2*radius+1; ++k) {
744  int j2 = std::min(w,std::max(0,i-radius+k));
745  val += kernel[k] * temp[ind(i, j2)];
746  }
747  if(param_.useLocalMargs_){
748  margs[var][l]=val;
749  }else{
750  if(val < bluredEnergy[var]){
751  bluredEnergy[var] = val;
752  bluredOpt[var] = l;
753  }
754  }
755  }
756  }
757  }
758  if(param_.useLocalMargs_){
759  localMargGens_.reserve(bluredOpt.size());
760  for(size_t var=0 ; var<bluredOpt.size(); ++var){
761  const ValueType minValue = *std::min_element(margs[var].begin(),margs[var].end());
762  for(LabelType l=0; l<numLabels; ++l){
763  margs[var][l]-= minValue;
764  }
765  for(LabelType l=0; l<numLabels; ++l){
766  margs[var][l]=std::exp(-1.0*param_.temp_*margs[var][l]);
767  }
768  localMargGens_[var]=opengm::RandomDiscreteWeighted<LabelType,ValueType>(margs[var].begin(),margs[var].end(),var);
769  }
770  }else{
771  uniformGens_.reserve(bluredOpt.size());
772  for(size_t var=0 ; var<bluredOpt.size(); ++var){
773  LabelType minVal = (LabelType)(std::max((double)(0) , bluredOpt[var]-param_.sigma_*1.5));
774  LabelType maxVal = (LabelType)(std::min((double)(numLabels) , bluredOpt[var]+param_.sigma_*1.5));
775  uniformGens_[var] = opengm::RandomUniform<LabelType>(minVal, maxVal+1, var);
776  }
777  }
778  }
779 
780  void reset(){}
781  size_t defaultNumStopIt() {return 10;}
782 
783  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal)
784  {
785  proposal.resize(gm_.numberOfVariables());
786  if(param_.useLocalMargs_){
787  for(size_t i=0; i<proposal.size();++i){
788  proposal[i] = localMargGens_[i]();
789  }
790  }
791  else{
792  opengm::RandomUniform<LabelType> randomLabel(0, gm_.numberOfLabels(0),currentStep_);
793  if ((currentStep_ % 2) == 0){
794  for(size_t i=0; i<proposal.size();++i){
795  proposal[i] = randomLabel();
796  }
797  }else{
798  for(size_t i=0; i<proposal.size();++i){
799  proposal[i] = uniformGens_[i]();
800  }
801  }
802  }
803  ++currentStep_;
804  }
805  private:
806  size_t ind(int i, int j){ return i+j*height_;}
807  const GM &gm_;
808  Parameter param_;
809  size_t height_;
810  size_t width_;
811  LabelType currentStep_;
812 
813  // Random Generators
814  std::vector<opengm::RandomDiscreteWeighted<LabelType,ValueType> > localMargGens_;
815  std::vector<opengm::RandomUniform<LabelType> > uniformGens_;
816  };
817 
818 
819  template<class GM, class ACC>
820  class DynamincGen{
821  public:
822  typedef ACC AccumulationType;
823  typedef GM GraphicalModelType;
834  };
835 
836  struct Parameter{
838  };
839 
840  DynamincGen(const GM & gm, const Parameter & param)
841  :
842  gm_(gm),
843  param_(param){
844  }
845 
846  void reset(){
847  if(param_.gen_ == AlphaExpansion)
848  alphaExpansionGen_->reset();
849  else if(param_.gen_ == AlphaBetaSwap)
850  alphaBetaSwapGen_->reset();
851  else if(param_.gen_ == UpDown)
852  upDownGen_->reset();
853  else if(param_.gen_ == Random)
854  randomGen_->reset();
855  else if(param_.gen_ == RandomLF)
856  randomLFGen_->reset();
857  else if(param_.gen_ == NonUniformRandom)
858  nonUniformRandomGen_->reset();
859  else if(param_.gen_ == Blur)
860  blurGen_->reset();
861  else if(param_.gen_ == EnergyBlur)
862  energyBlurGen_->reset();
863  else{
864  throw RuntimeError("unknown generator type");
865  }
866  }
867  size_t defaultNumStopIt() {
868  if(param_.gen_ == AlphaExpansion)
869  return alphaExpansionGen_->defaultNumStopIt();
870  else if(param_.gen_ == AlphaBetaSwap)
871  return alphaBetaSwapGen_->defaultNumStopIt();
872  else if(param_.gen_ == UpDown)
873  return upDownGen_->defaultNumStopIt();
874  else if(param_.gen_ == Random)
875  return randomGen_->defaultNumStopIt();
876  else if(param_.gen_ == RandomLF)
877  return randomLFGen_->defaultNumStopIt();
878  else if(param_.gen_ == NonUniformRandom)
879  return nonUniformRandomGen_->defaultNumStopIt();
880  else if(param_.gen_ == Blur)
881  return blurGen_->defaultNumStopIt();
882  else if(param_.gen_ == EnergyBlur)
883  return energyBlurGen_->defaultNumStopIt();
884  else{
885  throw RuntimeError("unknown generator type");
886  }
887  }
888  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal){
889  if(param_.gen_ == AlphaExpansion)
890  return alphaExpansionGen_->getProposal(current, proposal);
891  else if(param_.gen_ == AlphaBetaSwap)
892  return alphaBetaSwapGen_->getProposal(current, proposal);
893  else if(param_.gen_ == UpDown)
894  return upDownGen_->getProposal(current, proposal);
895  else if(param_.gen_ == Random)
896  return randomGen_->getProposal(current, proposal);
897  else if(param_.gen_ == RandomLF)
898  return randomLFGen_->getProposal(current, proposal);
899  else if(param_.gen_ == NonUniformRandom)
900  return nonUniformRandomGen_->getProposal(current, proposal);
901  else if(param_.gen_ == Blur)
902  return blurGen_->getProposal(current, proposal);
903  else if(param_.gen_ == EnergyBlur)
904  return energyBlurGen_->getProposal(current, proposal);
905  else{
906  throw RuntimeError("unknown generator type");
907  }
908  }
909  private:
910  const GM & gm_;
911  Parameter param_;
912 
913  // generators
914  AlphaExpansionGen<GM, ACC> * alphaExpansionGen_;
915  AlphaBetaSwapGen <GM, ACC> * alphaBetaSwapGen_;
916  UpDownGen<GM, ACC> * upDownGen_;
917  RandomGen<GM, ACC> * randomGen_;
918  RandomLFGen<GM, ACC> * randomLFGen_;
919  NonUniformRandomGen<GM, ACC> * nonUniformRandomGen_;
920  BlurGen<GM, ACC> * blurGen_;
921  EnergyBlurGen<GM, ACC> * energyBlurGen_;
922  };
923 
924 
925 }
926 
927 
928 template<class GM, class PROPOSAL_GEN>
929 class FusionBasedInf : public Inference<GM, typename PROPOSAL_GEN::AccumulationType>
930 {
931 public:
932  typedef PROPOSAL_GEN ProposalGen;
933  typedef typename ProposalGen::AccumulationType AccumulationType;
934  typedef AccumulationType ACC;
935  typedef GM GraphicalModelType;
937 
941 
942 
943 
946 
947  typedef typename ProposalGen::Parameter ProposalParameter;
949 
950 
951 
952  class Parameter
953  {
954  public:
956  const ProposalParameter & proposalParam = ProposalParameter(),
957  const FusionParameter & fusionParam = FusionParameter(),
958  const size_t numIt=1000,
959  const size_t numStopIt = 0
960  )
961  : proposalParam_(proposalParam),
962  fusionParam_(fusionParam),
963  numIt_(numIt),
964  numStopIt_(numStopIt)
965  {
966 
967  }
968  ProposalParameter proposalParam_;
969  FusionParameter fusionParam_;
970  size_t numIt_;
971  size_t numStopIt_;
972 
973 
974  };
975 
976 
977  FusionBasedInf(const GraphicalModelType &, const Parameter & = Parameter() );
978  ~FusionBasedInf();
979  std::string name() const;
980  const GraphicalModelType &graphicalModel() const;
982  void reset();
983  template<class VisitorType>
984  InferenceTermination infer(VisitorType &);
985  void setStartingPoint(typename std::vector<LabelType>::const_iterator);
986  virtual InferenceTermination arg(std::vector<LabelType> &, const size_t = 1) const ;
987  virtual ValueType value()const {return bestValue_;}
988 private:
989 
990 
991  const GraphicalModelType &gm_;
992  Parameter param_;
993 
994  FusionMoverType * fusionMover_;
995 
996  PROPOSAL_GEN proposalGen_;
997  ValueType bestValue_;
998  std::vector<LabelType> bestArg_;
999  size_t maxOrder_;
1000 };
1001 
1002 
1003 
1004 
1005 template<class GM, class PROPOSAL_GEN>
1008  const GraphicalModelType &gm,
1009  const Parameter &parameter
1010 )
1011  : gm_(gm),
1012  param_(parameter),
1013  fusionMover_(NULL),
1014  proposalGen_(gm_, parameter.proposalParam_),
1015  bestValue_(),
1016  bestArg_(gm_.numberOfVariables(), 0),
1017  maxOrder_(gm.factorOrder())
1018 {
1019  ACC::neutral(bestValue_);
1020  fusionMover_ = new FusionMoverType(gm_,parameter.fusionParam_);
1021  //set default starting point
1022  std::vector<LabelType> conf(gm_.numberOfVariables(),0);
1023  for (size_t i=0; i<gm_.numberOfVariables(); ++i){
1024  for(typename GM::ConstFactorIterator it=gm_.factorsOfVariableBegin(i); it!=gm_.factorsOfVariableEnd(i);++it){
1025  if(gm_[*it].numberOfVariables() == 1){
1026  ValueType v;
1027  ACC::neutral(v);
1028  for(LabelType l=0; l<gm_.numberOfLabels(i); ++l){
1029  if(ACC::bop(gm_[*it](&l),v)){
1030  v=gm_[*it](&l);
1031  conf[i]=l;
1032  }
1033  }
1034  continue;
1035  }
1036  }
1037  }
1038  setStartingPoint(conf.begin());
1039 }
1040 template<class GM, class PROPOSAL_GEN>
1042 {
1043  delete fusionMover_;
1044 }
1045 
1046 
1047 template<class GM, class PROPOSAL_GEN>
1048 inline void
1050 {
1051  throw RuntimeError("not implemented yet");
1052 }
1053 
1054 template<class GM, class PROPOSAL_GEN>
1055 inline void
1058  typename std::vector<typename FusionBasedInf<GM, PROPOSAL_GEN>::LabelType>::const_iterator begin
1059 )
1060 {
1061  std::copy(begin, begin + gm_.numberOfVariables(), bestArg_.begin());
1062  bestValue_ = gm_.evaluate(bestArg_.begin());
1063 }
1064 
1065 template<class GM, class PROPOSAL_GEN>
1066 inline std::string
1068 {
1069  return "FusionBasedInf";
1070 }
1071 
1072 template<class GM, class PROPOSAL_GEN>
1075 {
1076  return gm_;
1077 }
1078 
1079 template<class GM, class PROPOSAL_GEN>
1080 inline InferenceTermination
1082 {
1083  EmptyVisitorType v;
1084  return infer(v);
1085 }
1086 
1087 
1088 template<class GM, class PROPOSAL_GEN>
1089 template<class VisitorType>
1092  VisitorType &visitor
1093 )
1094 {
1095  // evaluate the current best state
1096  bestValue_ = gm_.evaluate(bestArg_.begin());
1097 
1098  visitor.begin(*this);
1099 
1100 
1101  if(param_.numStopIt_ == 0){
1102  param_.numStopIt_ = proposalGen_.defaultNumStopIt();
1103  }
1104 
1105  std::vector<LabelType> proposedState(gm_.numberOfVariables());
1106  std::vector<LabelType> fusedState(gm_.numberOfVariables());
1107 
1108  size_t countRoundsWithNoImprovement = 0;
1109 
1110  for(size_t iteration=0; iteration<param_.numIt_; ++iteration){
1111  // store initial value before one proposal round
1112  const ValueType valueBeforeRound = bestValue_;
1113 
1114  proposalGen_.getProposal(bestArg_,proposedState);
1115 
1116  // this might be to expensive
1117  ValueType proposalValue = gm_.evaluate(proposedState);
1118  //ValueType proposalValue = 100000000000000000000000.0;
1119 
1120  const bool anyVar = fusionMover_->fuse(bestArg_,proposedState, fusedState,
1121  bestValue_, proposalValue, bestValue_);
1122 
1123 
1124  if(anyVar){
1125  if( !ACC::bop(bestValue_, valueBeforeRound)){
1126  ++countRoundsWithNoImprovement;
1127  }
1128  else{
1129  // Improvement
1130  countRoundsWithNoImprovement = 0;
1131  bestArg_ = fusedState;
1132  }
1133  if(visitor(*this)!=0){
1134  break;
1135  }
1136  }
1137  else{
1138  ++countRoundsWithNoImprovement;
1139  }
1140  // check if converged or done
1141  if(countRoundsWithNoImprovement==param_.numStopIt_ && param_.numStopIt_ !=0 )
1142  break;
1143  }
1144  visitor.end(*this);
1145  return NORMAL;
1146 }
1147 
1148 
1149 
1150 
1151 template<class GM, class PROPOSAL_GEN>
1152 inline InferenceTermination
1155  std::vector<LabelType> &x,
1156  const size_t N
1157 ) const
1158 {
1159  if (N == 1)
1160  {
1161  x.resize(gm_.numberOfVariables());
1162  for (size_t j = 0; j < x.size(); ++j)
1163  {
1164  x[j] = bestArg_[j];
1165  }
1166  return NORMAL;
1167  }
1168  else
1169  {
1170  return UNKNOWN;
1171  }
1172 }
1173 
1174 } // namespace opengm
1175 
1176 #endif // #ifndef OPENGM_FUSION_BASED_INF_HXX
RandomGen(const GM &gm, const Parameter &param)
AlphaBetaSwapGen(const GM &gm, const Parameter &param)
EnergyBlurGen(const GM &gm, const Parameter &param)
The OpenGM namespace.
Definition: config.hxx:43
virtual ValueType value() const
return the solution (value)
opengm::visitors::VerboseVisitor< FusionBasedInf< GM, PROPOSAL_GEN > > VerboseVisitorType
Parameter(const ProposalParameter &proposalParam=ProposalParameter(), const FusionParameter &fusionParam=FusionParameter(), const size_t numIt=1000, const size_t numStopIt=0)
std::string name() const
UpDownGen(const GM &gm, const Parameter &param)
const GraphicalModelType & graphicalModel() const
#define OPENGM_ASSERT(expression)
Definition: opengm.hxx:77
ProposalGen::AccumulationType AccumulationType
AlphaExpansionGen(const GM &gm, const Parameter &param)
opengm::visitors::EmptyVisitor< FusionBasedInf< GM, PROPOSAL_GEN > > EmptyVisitorType
RandomLFGen(const GM &gm, const Parameter &param)
NonUniformRandomGen(const GM &gm, const Parameter &param)
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)
Alpha-Beta-Swap Algorithm.
FusionMoverType::Parameter FusionParameter
Inference algorithm interface.
Definition: inference.hxx:34
void setStartingPoint(typename std::vector< LabelType >::const_iterator)
set initial labeling
opengm::visitors::TimingVisitor< FusionBasedInf< GM, PROPOSAL_GEN > > TimingVisitorType
DynamincGen(const GM &gm, const Parameter &param)
Alpha-Expansion Algorithm.
HlFusionMover< GraphicalModelType, AccumulationType > FusionMover
Random2Gen(const GM &gm, const Parameter &param)
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)
Parameter(double sigma=20.0, bool useLocalMargs=false, double temp=1)
InferenceTermination infer()
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)
BlurGen(const GM &gm, const Parameter &param)
HlFusionMover< GraphicalModelType, AccumulationType > FusionMoverType
InferenceTermination arg(std::vector< LabelType > &, const size_t=1) const
output a solution
Parameter(const std::string startDirection=std::string("up"))
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)
A generalization of ICM B. Andres, J. H. Kappes, U. Koethe and Hamprecht F. A., The Lazy Flipper: MA...
FusionBasedInf(const GraphicalModelType &, const Parameter &=Parameter())
ProposalGen::Parameter ProposalParameter
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)
OpenGM runtime error.
Definition: opengm.hxx:100
virtual InferenceTermination arg(std::vector< LabelType > &, const size_t=1) const
output a solution
InferenceTermination infer()
start the algorithm
InferenceTermination
Definition: inference.hxx:24
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)