import java.io.*;
import java.util.*;

import java.math.BigInteger;

//! version 0.8, 29 juli 2008


//! -------- KSet code is stablised---------------------------------------------------
//! -----------------------------------------------------------------------------------
//! Ksets begin with index 1

//! Jul 15 2008: KSets can now also have parents and children....

class KSet
	{
	private int leafMax;
	private boolean array[];
	private int leafCount;

	private int id;
	private KSet parent;
	private Vector children;

	private int isCTBR;
	private final static int UNKNOWN = 0;
	private final static int NO = -1;
	private final static int YES = 1;

	public void setID( int idval )
		{
		this.id = idval;
		}

	public int getID()
		{
		return id;
		}

	public void setParent( KSet p )
		{
		this.parent = p;
		}

	public KSet getParent()
		{
		return parent;
		}

	public KSet emptyClone()
		{
		KSet k = new KSet( this.leafMax );
		return k;
		}

	//! Should I check to see if it's unique?
	public void addChild( KSet p )
		{
		children.add(p);
		}

	public int countChildren()
		{
		return children.size();
		}

	public Vector getChildren()
		{
		return children;
		}

	//! Only call this when the SN-tree rooted at this. is binary

	public int countCherries()
		{
		int numC = this.children.size();

		if( numC == 0 ) return 0;

		if( numC != 2 )
			{
			System.out.println("THIS SHOULD NEVER HAPPEN!");
			System.exit(0);
			}

		KSet ch1 = (KSet) children.elementAt(0);
		KSet ch2 = (KSet) children.elementAt(1);

		int tot = ch1.countCherries() + ch2.countCherries();

		if( (ch1.leafCount == 1) && (ch2.leafCount == 1) ) tot++;
		
		return tot;
		}

	public KSet( int leaves )
		{
		this.leafMax = leaves;
		array = new boolean[ leafMax ];
		children = new Vector();
		}

	public void addLeaf( int l )
		{
		if( array[l-1] == false ) leafCount++;
		array[l-1] = true;
		}

	public void removeLeaf( int l )
		{
		if( array[l-1] == true ) leafCount--;
		array[l-1] = false;
		}

	public boolean containsLeaf( int l )
		{
		if( l > leafMax ) return false;
		return array[l-1];
		}

	//! Returns true if this is a subset of 'two'

	public boolean isSubsetOf( KSet two )
		{
		if( this.leafCount > two.leafCount ) return false;
		
		for( int x=0; x<array.length; x++ )
			{
			if( (array[x] == true) && (two.array[x] == false) ) return false;
			}
		return true;
		}

	public int size()
		{
		return leafCount;
		}

	public boolean empty()
		{
		return( leafCount==0 );
		}

	public int getFirstElement()
		{
		for( int x=0; x<array.length; x++ )
			{
			if( array[x] == true ) return (x+1);
			}

		System.out.println("ERROR#1: Set was empty!");
		return -1;
		}

	public int[] getFirstTwoElements()
		{
		if( leafCount < 2 ) return null;

		int vec[] = new int[2];
		int at=0;

		for( int x=0; x<array.length; x++ )
			{
			if( array[x] )
				{
				vec[at++] = (x+1);
				if( at == 2 ) return vec;
				}

			}
		return null;
		}


	public void dump()
		{
		if( Simplistic.DEBUG || Simplistic.DOUBLEDEBUG )
			{
			System.out.print("[");
			for(int x=0; x<leafMax; x++ )
				{
				if(array[x] == true) System.out.print((x+1)+" ");
				}
			System.out.print("]");
			}
		}

	public boolean sameAs( KSet second )
		{
		int smaller, bigger;
		boolean longer[] = null;
	
		if( this.leafCount != second.leafCount ) return false;

		if( leafMax < second.leafMax )
			{
			smaller = leafMax;
			bigger = second.leafMax;
			longer = second.array;
			}
		else
			{
			smaller = second.leafMax;
			bigger = leafMax;
			longer = this.array;
			}

		for( int x=0; x<smaller; x++ )
			{
			if( array[x] != second.array[x] ) return false;
			}

		for( int x=smaller; x<bigger; x++ )
			{
			if( longer[x] ) return false;
			}

		return true;
		}


	public int getMaxSize()
		{
		return leafMax;
		}


	//! After this has been done, the isCTBR flags for the
	//! KSets in the SN-tree will be correct. The beautiful thing
	//! is that the structure of the SN-tree under a CTBR exactly
	//! represents the tree that it builds. Also nice: each
	//! KSet is equal to the set of leaves underneath it.

	//! returns the set of CTBRs in the Vector ctbrs, which is assumed
	//! to be empty at the beginning.

	//! Note that it DOES NOT return the empty CBTR, this has to be
	//! handled explicitly as a special case.

	public void computeCandidateTBRs( Vector ctbrs )
		{
		if( isCTBR != UNKNOWN )
			{
			System.out.println("Error: computeCandidateTBRs has been called more than once!");
			System.exit(0);
			}

		int numKids = children.size();

		if( numKids == 0 )
			{
			isCTBR = YES;
			ctbrs.addElement( this );
			return;
			}

		Enumeration e = children.elements();

		boolean kidsAreGood = true;
		
		while( e.hasMoreElements() )
			{
			KSet kid = (KSet) e.nextElement();
			
			kid.computeCandidateTBRs( ctbrs );

			if( kid.isCTBR == NO ) kidsAreGood = false;
			}

		if( numKids != 2 )
			{
			isCTBR = NO;
			return;
			}		

		isCTBR = kidsAreGood ? YES : NO;

		if( isCTBR == YES )
			{
			ctbrs.addElement(this);
			}

		}

        //! private int leafMax;
        //! private boolean array[];
        //! private int leafCount;

	public KSet negateKSet()
		{
		KSet neg = new KSet(leafMax);
		
		for( int x=1; x<=array.length; x++ )
			{
			if( this.containsLeaf(x) )
				{
				neg.removeLeaf(x);
				}
			else
				{
				neg.addLeaf(x);
				}				
			}
		return neg;
		}


	public biDAG buildTreeFromCTBR()
		{
		//! System.out.println("Building tree from CTBR");

		if( leafCount == 0 )
			{
			//! This is an emptyTBR, return a dummy biDAG

			//! System.out.println("Returning a dummy biDAG");

			biDAG b = new biDAG();
			b.data = 0;
			b.isDummy = true;
			
			return b;
			}

		biDAG node = new biDAG();

		int kids = children.size();

		if( (kids != 0) && (kids != 2) )
			{
			System.out.println("Something went very very wrong with the CTBRs.");
			}
		if( kids == 2 )
			{
			KSet ch1 = (KSet) children.elementAt(0);
			KSet ch2 = (KSet) children.elementAt(1);

			biDAG lchild = ch1.buildTreeFromCTBR();
			biDAG rchild = ch2.buildTreeFromCTBR();

			node.child1 = lchild;
			node.child2 = rchild;
			
			lchild.parent = node;
			rchild.parent = node;
			}
		else
			{
			//! a leaf...
			node.data = this.getFirstElement();
			//! System.out.println("Node data="+node.data);
			}

		return node;
		}


	}


//! --------------------------------------------------------
//! This part of the code is now ready.

class Graph
{
private int leaves;

public static int NOT_VISITED = 0;
public static int LEFT = 1;
public static int RIGHT = 2;

boolean adj[][];
int deg[];

public Graph(int n)
	{
	leaves = n;
	adj = new boolean[n][n];
	deg = new int[n];
	}

public boolean containsEdge(int a, int b)
	{
	return( adj[a-1][b-1] );
	}

public void addEdge(int a, int b)
	{
	if( adj[a-1][b-1] == false )
		{
		deg[a-1]++;
		deg[b-1]++;
		}		

	//! These things are always added in unison...
	adj[a-1][b-1] = adj[b-1][a-1] = true;
	}

public int degree(int a)
	{
	return deg[a-1];
	}

//! Inspects if the vertices form two disjoint cliques and, zo ja,
//! returns the partition (LEFT/RIGHT) in the return array
//! otherwise returns null
//! The array should be indexed on [1...leaves]

public int[] getDoubleClique()
	{
	//! I should first check if the complement graph is bipartite.
	//! If there are more then 2 components, then it won't be
	//! And then I can check if it is a complete bipartite graph.
	//! That is: everyone on LEFT has degree equal to RIGHT, and
	//! viceversa

	int visited[] = new int[leaves+1];

	visited[0] = -1000;

	//! We'll start with vertex 1
	int start = 1;

	boolean success = visit( 1, LEFT, visited );

	if( !success ) return null;

	int lClique = 0;
	int rClique = 0;

	for(int scan=1;scan<=leaves; scan++ )
		{
		if( visited[scan] == NOT_VISITED ) return null;
		if( visited[scan] == LEFT ) lClique++;
		if( visited[scan] == RIGHT ) rClique++;
		}

	//! So if we've got here we know how many guys are in the
	//! left clique, and in he right...

	for( int scan=1; scan<=leaves; scan++ )
		{
		if( visited[scan] == LEFT )
				{
				if( degree(scan) != (lClique-1) ) return null;
				}
		else
				{
				if( degree(scan) != (rClique-1) ) return null;
				}

		}

	return visited;
	}


private boolean visit( int vertex, int colourWith, int state[] )
	{
	if( state[vertex] != NOT_VISITED )
		{
		if( state[vertex] != colourWith ) return false;
		return true;
		}

	state[vertex] = colourWith;

	int childCol;
	if( colourWith == LEFT ) childCol = RIGHT;
		else childCol = LEFT;

	//! Now, try all COMPLEMENT children...

	for( int scan=1; scan<=leaves; scan++ )
		{
		if( vertex==scan ) continue;
		if( !containsEdge( vertex, scan ) )
			{
			boolean gelukt = visit( scan, childCol, state );
			if( !gelukt ) return false;
			}

		}

	return true;
	}


}


//! convention: ab|c where a<b
//! Assumes that leaves are numbered 1...n
//! !!!Assumes that there are no missing leaves!!!

class TripletSet
	{
	private Vector tripVec;
	private int numLeaves;
	private boolean finalised;
	private boolean lookup[][][];

	public TripletSet()
		{
		tripVec = new Vector();
		numLeaves = 0;
		finalised = false;
		}

	public Enumeration elements()
		{
		return tripVec.elements();
		}

	//! Ugly....don't use except in dire circumstances
	public int tellLeaves()
		{
		return numLeaves;
		}

	//! Checks to see if all leaves on 1...n are used where n = numLeaves
	//! This is currently very slow...

	public boolean isClosed(int errorTip[])
		{
		boolean seen[] = new boolean[ numLeaves+1 ];
		Enumeration e = tripVec.elements();
		while( e.hasMoreElements() )
			{
			int t[] = (int []) e.nextElement();
			seen[ t[0] ] = true;
			seen[ t[1] ] = true;
			seen[ t[2] ] = true;
			}
		for( int scan=1; scan<seen.length; scan++ )
			{
			if( !seen[scan] )
				{
				errorTip[0] = scan;
				return false;
				}
			}
		return true;
		}

	//! This is currently very slooow.
	public boolean isDense(int errorTrip[])
		{
		for( int x=1; x<=numLeaves; x++ )
		for( int y=1; y<=numLeaves; y++ )
		for( int z=1; z<=numLeaves; z++ )
			{
			if( (x==y) || (x==z) || (z==y) ) continue;
			if( !containsTriplet(x,y,z) && !containsTriplet(x,z,y) && !containsTriplet(z,y,x) )
				{
				errorTrip[0] = x;
				errorTrip[1] = y;
				errorTrip[2] = z;
				return false;
				}
			}
		return true;
		}


	public void addTriplet(int a, int b, int c)
		{
		if( finalised == true )
			{
			System.out.println("ERROR#2: Major error: tried adding triplet to finalised triplet set.");
			System.exit(0);
			}

		int swap = 0;

		//! swap a and b around, if necessary...

		if( a>b )
			{
			swap = a;
			a = b;
			b = swap;
			}

		if( this.containsTriplet(a, b, c) ) return;

		//! What is the highest leaf seen so far?

		int highest = 0;

		if( (a>b) && (a>c) ) highest = a;
		else
		if( (b>a) && (b>c) ) highest = b;
		else
		highest = c;

		if( highest > numLeaves )
			{
			numLeaves = highest;
			}


		int myTriplet[] = new int[3];
		myTriplet[0] = a;
		myTriplet[1] = b;
		myTriplet[2] = c;

		tripVec.add( (int []) myTriplet );				
		}

	public int countTriplets()
		{
		return tripVec.size();
		}


	public boolean containsTriplet(int a, int b, int c)
		{
		int swap = 0;

		if( finalised == true )
			{
			return ( lookup[a][b][c] || lookup[b][a][c] );
			}

		//! swap a and b around, if necessary...

		if( a>b )
			{
			swap = a;
			a = b;
			b = swap;
			}

		Enumeration e = tripVec.elements();
		while( e.hasMoreElements() )
			{
			int triplet[] = (int []) e.nextElement();
			if( (triplet[0] == a) && (triplet[1] == b) && (triplet[2] == c) ) return true;
			}

		return false;
		}


	public void dumpTriplets()
		{
		System.out.println(tripVec.size()+" elements on "+numLeaves+" leaves.");

		Enumeration e = tripVec.elements();
		while( e.hasMoreElements() )
			{
			int triplet[] = (int []) e.nextElement();
			System.out.println(triplet[0]+","+triplet[1]+"|"+triplet[2]);
			}
		}

	//! After calling this, look-up is miles faster, BUT you can't change the set anymore!!!

	public void finaliseAndOptimise()
		{
		finalised = true;

		lookup = new boolean[numLeaves+1][numLeaves+1][numLeaves+1];

		Enumeration e = this.tripVec.elements();

                while( e.hasMoreElements() )
                        {        
                        int t[] = (int []) e.nextElement();
                                
			int x = t[0];
			int y = t[1];
			int z = t[2];

			lookup[x][y][z] = true;
			lookup[y][x][z] = true;
                        }

		} 


	//! Not sure whether this is a correct implementation, try and verify the
	//! correctness of this...I think it should be approximately cubic time...
	//! Ah I now understand why this works. At any one iteration, X cup Z is
	//! the current SN set, and X cup Z contains all leaves which were
	//! added because of triplets of the form x1 * | x2

	public KSet FastComputeSN( int x, int y )
		{
		int n = numLeaves;

		KSet X = new KSet(n);
		KSet Z = new KSet(n);

		X.addLeaf(x);
		Z.addLeaf(y);

		while( Z.empty() == false )
			{
			int z = Z.getFirstElement();

			for( int a=1; a<=n; a++ )
				{
				if( X.containsLeaf(a) == false ) continue;

				for( int c=1; c<=n; c++ )
					{
					if( X.containsLeaf(c) || Z.containsLeaf(c) ) continue;

					if( this.containsTriplet(a, c, z) || this.containsTriplet(z, c, a) )
						{
						Z.addLeaf(c);

						//! I think their algorithm mistakenly implies that we can 'break' here
						}

					}				
				X.addLeaf(z);
				Z.removeLeaf(z);
				}

			}	
		return X;
		}

	//! Returns a vector of KSets, one KSet per leaf. This to
	//! retain compatibility with computeCTBRs routine from
	//! KSet

	public Vector getLeaves()
		{
		Vector v = new Vector();

		for( int x=1; x<=this.numLeaves; x++ )
			{
			KSet k = new KSet(this.numLeaves);
			k.addLeaf(x);
			v.addElement(k);
			}

		return v;
		}


	//! ------------------------------------------------------
	//! returns a tree of KSets.
	//! specifically, returns the KSet at the root (the set of all leaves)

	public KSet buildSNTree()
		{
		Simplistic.report("Constructing SN-sets.");

		if( this.numLeaves == 0 ) return null;	//! there are no SN-sets at all!
		
		Vector sn = this.getSNSets();

		//! Ok, so now we have a vector containing all SN-sets.
		//! We need to build an intelligent data-structure out of them, to make
		//! everything nice and fast.

		Simplistic.report("Ranking the SN-sets in terms of size.");

		//! snrank begins at [1], goes up until [this.numLeaves]

		Vector snrank[] = new Vector [ this.numLeaves+1 ];
		for( int x=1; x<snrank.length; x++ )
			{
			snrank[x] = new Vector();
			}

		//! This makes a ranking based on the number of leaves...
		for( int x=0; x<sn.size(); x++ )
			{
			KSet k = (KSet) sn.elementAt(x);
			int l = k.size();
			snrank[l].add( k );
			}		

		Simplistic.report("Finishing ranking, now putting in linear order...");

		//! ---------------------------------------------
		//! This creates an SN-set array which has
		//! elements of increasing size...

		KSet ordered[] = new KSet[ sn.size() ];

		int at = 0;
		for( int x=1; x<snrank.length; x++ )
			{
			Enumeration e = snrank[x].elements();
			while( e.hasMoreElements() )
				{
				ordered[at++] = (KSet) e.nextElement();
				}
			}

		//! Hopefully this is correct...
		KSet trivial = ordered[ordered.length-1];

		//! -----------------------------------------------

		Simplistic.report("Constructing SN-tree: first the parent relationship");

		doom: for( int l=0; l<ordered.length-1; l++ )		//! the trivial SN-set is not a subset of anything
			for( int r=(l+1); r<ordered.length; r++ )
				{
				KSet s = (KSet) ordered[l];
				KSet t = (KSet) ordered[r];

				if( s.size() == t.size() ) continue;

				if( s.isSubsetOf(t) )
					{
					s.setParent(t);
					continue doom;
					}
				}
				
		//! -----------------------------------------------
		//! Now we can build the child relationship...

		Simplistic.report("Building the SN child relationship...");

		for( int x=0; x< sn.size(); x++ )
			{
			KSet k = (KSet) sn.elementAt(x);

			KSet p = k.getParent();

			if( p == null ) continue;
	
			p.addChild( k );
			}

		return trivial;
		}

	//! ----------------------------------------------------------------------------

	private final static int TBR_OBJECT = 0;
	private final static int EDGE_OBJECT = 1;

	class StackObject
		{	
		//for both TBRs and edges
		Vector list;
		int busyWith;
		int type;

		//! int createdMustHits;

		// for only TBRs
		TripletSet base;
		KSet builtTBR;	//! The TBR that brought us here, labels are correct?
		int bmap[];

		//! for edges only...
		dagExplore netBase;
		int alsoBusyWith;
		
		public StackObject()
			{
			this.busyWith = -1;
			this.alsoBusyWith = 0;
			//! this.createdMustHits = 0;
			}
		}

	//! --------------------------------------------------------------------------
	//! Here we go!! Here we go!! Here we go!!
	//! We assume that we specify the upper limit on the level with the 'k' parameter...
	//! It would perhaps be more clever to remove the k parameter but first things first...
	//! So many potential boundary errors...

	//! this returns null if there were no strict simple level-k solutions...

	public Vector buildSimpleNetworks( int k )
		{
		Vector solutions = null;

		int numTrips = this.tripVec.size();
		float maxpercentage = (float) 0.0;
	
		Stack stack = new Stack();

		Vector leaves = this.getLeaves();	//! Is a vector of KSets, each containing a single leaf...

		StackObject so = new StackObject();
		so.list = leaves;
		so.type = TBR_OBJECT;
		so.base = this;		//! Triplets...
		so.builtTBR = null;	//! we didn't remove anything to get here...
		so.bmap = null;		//! likewise

		stack.push(so);

		fallback: while( stack.size() != 0 )
			{
			Simplistic.doublereport("Stack size="+stack.size());

			StackObject top = (StackObject) stack.peek();		
			
				if( top.type == TBR_OBJECT )
					{
					top.busyWith++;

	                                if( top.busyWith >= top.list.size() )
	                                        {
	                                        //! Then we've done everything we need to here, we drop down the stack
	                                        Simplistic.doublereport("Popping stack.");
	                                        stack.pop();
						continue fallback;			
	                                        }

					//! get the object 'busyWith', do something with it
					//! unless we are really high up...

					KSet ctbr = (KSet) top.list.elementAt( top.busyWith );

					//! TODO: count the number of cherries in the ctbr...

					Simplistic.doublereport("Preparing to remove this CTBR");
					ctbr.dump();

					TripletSet trip = top.base;
					
					//! So we're gonna rrrrip ctbr out of trip...
					
					int leavesLeft = trip.numLeaves - ctbr.size();

					int backmap[] = new int[ leavesLeft + 1 ];
					//! backmap will tell me what the numbers of the remaining leaves, used to be

					TripletSet nextLevel = null;

					//! int TBRmustHits = 0;					

					if( ctbr.size() == 0 )
						{
						//! We can put some optimisation in here because nothing changes at all
					
						nextLevel = trip;	//! I think this is safe....
						for( int f=1; f<backmap.length; f++ ) backmap[f] = f;

						//! TBRmustHits = 1;			//! dummy leaf adds one mustHit
						}
					else	{			
						//! System.out.println("cbtr has "+ctbr.size()+" leaves.");
						//! TBRmustHits = ctbr.countCherries();
						nextLevel = trip.subtractCTBR( ctbr, backmap );
						nextLevel.finaliseAndOptimise();	//! for speed
						}

					//! System.out.println("// TBR has "+TBRmustHits+" mustHits.");

					if( stack.size() < k )
						{
						StackObject penthouse = new StackObject();
	
						Simplistic.doublereport("Preparing penthouse.");
		
						Vector cand = null;

						if( ctbr.size() != 0 )
							{
							KSet freshTree = nextLevel.buildSNTree();
							cand = new Vector();

							if( freshTree != null ) freshTree.computeCandidateTBRs( cand );

							/*
							if( cand.size() == 0 )
								{
								//! There should always at least be leaves...or should there???!? CHECK THIS
								System.out.println("We have a problem, there were no CTBRs...");
								System.exit(0);
								}
							*/

							//! Add the empty CTBR
							//! the cloning thing here is just to make sure that the maxLeaf thing is the right size

							KSet emptyCTBR = null;

							if( cand.size() != 0 ) emptyCTBR = ((KSet) cand.elementAt(0)).emptyClone();
							else	{
								emptyCTBR = new KSet(nextLevel.tellLeaves());	//! not sure whether this makes sense...
								}

							//! System.out.println("Adding the empty CTBR");
							cand.addElement(emptyCTBR);
							}
						else	{
							cand = top.list;	//! Same set of CTBRs!!
							//! System.out.println("// Empty CBTR, bypassing...");
							}

						penthouse.base = nextLevel;
						penthouse.list = cand;
						penthouse.type = TBR_OBJECT;
						penthouse.builtTBR = ctbr;		//! Is a KSet / CTBR
						penthouse.bmap = backmap;

						//! penthouse.createdMustHits = top.createdMustHits + TBRmustHits;
						//! System.out.println("MustHits was"+top.createdMustHits+", is now "+penthouse.createdMustHits+" (ctbr had "+ctbr.size()+" leaves");

						stack.push( penthouse );

						}
					else
						{
						if( stack.size() > k )
							{
							//! The stack should be exactly k-hoog at this point
							System.out.println("Houston, we have a problem...");
							System.exit(0);
							}
						//! So stack.size == k
						//! So now we have to build the tree and switch into
						//! edge stack objects, zuuucht
	
						Simplistic.doublereport("Time to try building the tree...");

						biDAG mytree = null;

						if( leavesLeft > 2 )
							{
							Simplistic.doublereport("tree leaves > 2");
							mytree = nextLevel.buildTree();
							}
						else if( leavesLeft == 2 )
							{
							Simplistic.doublereport("tree leaves = 2");
							mytree = biDAG.cherry12();
							}
						else if( leavesLeft == 1 )
							{
							Simplistic.doublereport("tree leaves = 1");
							//! network consisting of one leaf...hmmm
							mytree = new biDAG();
							mytree.data = 1;
							}
						else	{
							//! dummy node
							Simplistic.doublereport("tree leaves = 0");
							mytree = new biDAG();
							mytree.isDummy = true;	//! it's a dummy leaf!!!
							}

						//! need to consider what we will do if we get 'small' trees

						if( mytree != null )
							{
							//! So we got a tree............

							//! System.out.println("We got a tree!");

							//! add the fake root

							biDAG dummyRoot = new biDAG();
							dummyRoot.child1 = mytree;	//! note that child2 stays null
							mytree.parent = dummyRoot;

							//! get all the edges and shove them onto the stack

							dagExplore dexp = new dagExplore( dummyRoot );

							//! We need to hit all the mustHit clusters. Each
							//! TBR can hit 2, but each TBR might add some back in as
							//! well....

							//! int lbTBRcontribution = top.createdMustHits + TBRmustHits;

							//! System.out.println("lower bound TBR contribution: "+lbTBRcontribution);

							Vector edges = dexp.getEdges();

							StackObject edgeob = new StackObject();
							edgeob.type = EDGE_OBJECT;
							edgeob.list = edges;
							edgeob.netBase = dexp;

							//! I think we need this too...
							edgeob.builtTBR = ctbr;
							edgeob.bmap = backmap;

							//! edgeob.createdMustHits = lbTBRcontribution;

							stack.push( edgeob );	
							}

						}

					}
				else
					{				
					//! top.type == EDGE_OBJECT

					//! So. We need to consider all pairs of edges, including pairs
					//! that are the same.

					Simplistic.doublereport("Top of stack is an EDGE_OBJECT, top.list.size() is "+top.list.size());
			
					boolean popped = false;

					top.busyWith++;
					if( top.busyWith >= top.list.size() )
						{
						top.alsoBusyWith++;

						if( top.alsoBusyWith >= top.list.size() )
							{
							Simplistic.doublereport("Popping EDGE_OBJECT");
							stack.pop();
							popped = true;
							}
						top.busyWith = top.alsoBusyWith;	//! IF EVERYTHING BREAKS SET TO 0
						}

					if( !popped )
						{
						Simplistic.doublereport("busyWith, alsoBusyWith = "+top.busyWith+","+top.alsoBusyWith);
						//! Let's hang some TBRs!!

						dagExplore netBase = top.netBase;
						Vector edges = top.list;	//! should be the same as netBase's list!

						biDAG firstEdge[] = (biDAG []) edges.elementAt( top.busyWith );
						biDAG secondEdge[] = (biDAG []) edges.elementAt( top.alsoBusyWith );

						if( (firstEdge[1].isDummy) && (firstEdge[0].parent != null) && (firstEdge[0].secondParent == null) )
							{
							Simplistic.doublereport("Skipping, no need to subdivide already subdivided dummy edge [first edge]");
							continue fallback;
							}

						if( (secondEdge[1].isDummy) && (secondEdge[0].parent != null) && (secondEdge[0].secondParent == null) )
							{
							Simplistic.doublereport("Skipping, no need to subdivide already subdivided dummy edge [second edge]");
							continue fallback;
							}

						int height = stack.size();
						int index = (2*(k+1))-height-1; //! -1 because the stack indexes from 0

						int tripIndex = index - 1;	//! This is where the triplets are that we will
										//! compare with;

						
						//! --------------------------------------------------------------
						//! This bit of code can be removed if it doesn't help

						//! int TBRaccum = (2*k) - height + 1;
						//! int TBRaccumIndex = TBRaccum - 1;
						//! StackObject Accum = (StackObject) stack.elementAt(TBRaccumIndex);
						//! int lbHits = Accum.createdMustHits;
						//! --------------------------------------------------------------


						StackObject lookback = (StackObject) stack.elementAt(index);

						TripletSet oldTrips = ((StackObject) stack.elementAt(tripIndex)).base;

						KSet findTheTree = lookback.builtTBR;

						int switchback[] = lookback.bmap;	//! lets us relabel stuff

						//! Need to clone the biDAG. This is a very annoying
						//! thing to do, I must improve this later.
	
						//! This should simultaneously relabel the leaves too using the backmap...
						biDAG newNodes[] = netBase.clonebiDAG(switchback);	//! So we clone the netBase...

						biDAG newDAG = newNodes[0];

						biDAG edge1tail = newNodes[firstEdge[0].nodeNum];
						biDAG edge1head = newNodes[firstEdge[1].nodeNum];

						biDAG edge2tail = newNodes[secondEdge[0].nodeNum];
						biDAG edge2head = newNodes[secondEdge[1].nodeNum]; 

						//! buildTreeFromCTBR returns a dummy leaf if it was an emptyCTBR...

						//! this following line needs to be optimised...is sloow
						biDAG treeToHang = findTheTree.buildTreeFromCTBR();	//! labels should be OK

						//! if( treeToHang.isDummy ) System.out.println("Hanging a CTBR.");

						biDAG leave1 = null;
						biDAG leave2 = null;

						if( (edge1tail == edge2tail) && (edge1head == edge2head) )
							{
							//! System.out.println("Subdividing a single edge.");

							//! Then we are subdividing one edge
							leave1 = biDAG.subdivide( edge1tail, edge1head );

							leave2 = biDAG.subdivide( edge1tail, leave1 );	//! experimental, check it works!
							}
						else
							{
							//! We are subdividing two edges

							leave1 = biDAG.subdivide( edge1tail, edge1head );

							leave2 = biDAG.subdivide( edge2tail, edge2head );
							}

						biDAG recomb = new biDAG();
						leave1.child2 = recomb;
						leave2.child2 = recomb;
						recomb.parent = leave1;
						recomb.secondParent = leave2;

						recomb.child1 = treeToHang;
						treeToHang.parent = recomb;

						//! So the root of the new network is newDAG. I haven't removed
						//! dummy nodes, I will do this only at the end...

						dagExplore exploreAgain = null;

						if( stack.size() == (2*k) )
							{
							//! If we've reached a height of 2k, we're ready to check the triplets!

							//! newDAG has a fake root so we need to move down a little

							newDAG = newDAG.child1;		
							newDAG.parent = null;		//! I think this should work...

							exploreAgain = new dagExplore( newDAG );

							boolean ok = true;

							Vector dummies = exploreAgain.getDummies();
							
							//! We loop through the dummy nodes, if there is a dummy node whose parent is
							//! a recomb node we know that it is not a valid solution. Otherwise we
							//! delete the leaf and suppress its parent. If suppress causes multi-edges
							//! we also abort.

							for( int d=0; d < dummies.size(); d++ )
								{
								biDAG dummyLeaf = (biDAG) dummies.elementAt(d);
								if( (dummyLeaf.child1 != null) || (dummyLeaf.child2 != null) || (dummyLeaf.secondParent != null) )
									{
									System.out.println("Something went wrong with dummy suppression.");
									System.exit(0);
									}
								
								biDAG p = dummyLeaf.parent;
								if( (p.parent != null) && (p.secondParent != null) )
									{
									ok = false;	//! dummy leaf with a recombination parent, doomed!
									//! System.out.println("Suppressed a recombination leaf, forget it.");
									break;
									}

								biDAG grandp = p.parent;

								biDAG sibling = null;

								if( p.child1 == dummyLeaf )	
									{
									sibling = p.child2;
									}
								else
								if( p.child2 == dummyLeaf )
									{
									sibling = p.child1;
									}
								else	{
									System.out.println("Big error.");
									System.exit(0);
									}
															
								biDAG parentSibling = null;

								if( grandp.child1 == p )
									{
									parentSibling = grandp.child2;
									}
								else
								if( grandp.child2 == p )
									{
									parentSibling = grandp.child1;
									}
								else	{
									System.out.println("More error...");
									System.exit(0);
									}

								if( parentSibling == sibling )
									{
									//! multiedge !!
									//! System.out.println("Created a multi-edge, aborting...");
									ok = false;
									break;
									}

								if( sibling.parent == p )
									{
									sibling.parent = grandp;
									}
								else
								if( sibling.secondParent == p )
									{
									sibling.secondParent = grandp;
									}
								else	{
									System.out.println("Not again...");
									System.exit(0);
									}

								if( grandp.child1 == p )
									{
									grandp.child1 = sibling;
									}
								else
								if( grandp.child2 == p )
									{
									grandp.child2 = sibling;
									}
								else	{
									System.out.println("Again it goes wrong.");
									System.exit(0);
									}

								//! Done!

								}
							
							if( ok )
							{
							if( newDAG.isBiconnected() )
								{
								boolean isConsistent = true;

								//! I need a new explorer because i removed all the
								//! dummy nodes

								exploreAgain = new dagExplore( newDAG );

								int numCon = 0;

								for( int t=0; t<tripVec.size(); t++ )
									{
									int trip[] = (int []) tripVec.elementAt(t);
									if( ! exploreAgain.consistent( trip[0], trip[1], trip[2] ) )
										{
										isConsistent = false;
										if(Simplistic.PERCENTAGE == false ) break;
										}
									else numCon++;
									}
                 						
								if( isConsistent )
									{
									if( Simplistic.REFLECT == false )
										{
										//! System.out.println("// We found a simple level-"+k+" network consistent with all the triplets!");
										if( solutions == null ) solutions = new Vector();
										solutions.addElement( newDAG );
										if( Simplistic.BUILD_ALL == false ) return solutions;
										}
									else
										{
										//! Check reflectivity...
										int lscan = this.tellLeaves();
										boolean isReflective = true;

										for( int x=1; x<=lscan; x++ )
										for( int y=(x+1); y <= lscan; y++ )
										for( int z=1; z<=lscan; z++ )
											{
											if( (z==x) || (z==y) ) continue;
											if( !exploreAgain.consistent(x,y,z) ) continue;

											if( !this.containsTriplet(x,y,z) )
												{
												isReflective = false;
												break;
												}
											}
										if( isReflective )
											{
											//! System.out.println("// We found a simple level-"+k+" network that reflects the triplets!");
											if( solutions == null ) solutions = new Vector();
											solutions.addElement( newDAG );
											if( Simplistic.BUILD_ALL == false ) return solutions;
											}

										} //! end reflectivity
									}
								else
									{
									if( Simplistic.PERCENTAGE )
										{
										float competitor = ((float)numCon)/((float)numTrips);
										if( competitor > maxpercentage )
											{
											maxpercentage = competitor;
											int bignum = (int) Math.round( competitor*10000 );
											float scale = ((float)bignum) / ((float)100.0);
											System.out.println("// We found a network consistent with "+(scale)+"% (rounded to 2 decimal places) of the triplets.");
											}
										}
									}
										
								}
							} //! end if ok							

							
							}	//! end stack.size == (2*l)
						else
							{
							//! push it onto the stack...
						
							exploreAgain = new dagExplore( newDAG );

							//! int stillToHit = exploreAgain.num_mustHits;
							//! int stillToAdd = (2*k)-stack.size();	//! check this

							//! Check whether exploreAgain is consistent with oldTrips.
							//! Does consistency checking work with dummy nodes present?
							//! I think so.... :o

							boolean proceed = true;

							Vector zoom = oldTrips.tripVec;

							if( Simplistic.PERCENTAGE == false )
								{
								for( int spin=0; spin<zoom.size(); spin++ )
									{
									int trip[] = (int []) zoom.elementAt(spin);
									if( !exploreAgain.consistent(trip[0],trip[1],trip[2]) )
										{
										proceed = false;
										//! System.out.println("Premature departure: early non-consistency detected.");
										break;
										}
									}	
								}						

							if( proceed )
								{
								StackObject pushnetwork = new StackObject();
								pushnetwork.type = EDGE_OBJECT;
								pushnetwork.list = exploreAgain.getEdges();
								pushnetwork.netBase = exploreAgain;

								Simplistic.doublereport("About to push a new EDGE_OBJ onto the stack.");
								stack.push( pushnetwork );
								}

							}


						} // end if(!popped)

					} //! end else

			}


		return solutions;	//! null if there are no solutions
		}


	//! Eliminates duplicates...			
	//! Returns also the trivial set I think...
	//! Gives every SN-set a unique index, starting at 0

	private Vector getSNSets()
		{
		Vector temp = new Vector();

		int n = numLeaves;

		//! Build all singleton SN-sets i.e. of the form SN(x)
		//! These are obviously all disjoint...

		int index = 0;

		for( int x=1; x<=n; x++ )
			{
			KSet K = new KSet(n);

			//! SNSet indices begin at 0!
			K.setID( index++ );

			K.addLeaf(x);

			temp.add( (KSet) K);

			if(Simplistic.DEBUG)
				{
				System.out.println("SN set "+(index-1));
				K.dump();
				}
			}

		//! Build all sets of the form SN(x,y)
		//! 		

		for( int x=1; x<=n; x++ )
			{
			for( int y=(x+1); y<=n; y++ )
				{
				KSet K = FastComputeSN(x,y);

				boolean alreadyIn = false;

				for( int scan=0; scan<temp.size(); scan++ )
					{
					KSet comp = (KSet) temp.elementAt(scan);
					if( K.sameAs(comp) )
						{
						alreadyIn = true;
						break;
						}
					} 

				if( !alreadyIn )	
						{
						K.setID( index++ );
						temp.add( (KSet) K );

						if(Simplistic.DEBUG)
							{
							System.out.println("SN set "+(index-1));
							K.dump();
							}

						}
				}
			}			

		return temp;
		}


	//! Hmmm, this will be interesting. I've hardcoded this (instead of using
	//! the standard internal routines) for speed...be careful if elsewhere internal
	//! subroutines of TripletSet are changed!!

	//! remember: we assume that this. uses every leaf 1 <= leaf <= numLeaves

	//! somehow I am unhappy about this routine, keep an eye on it for bugs...

	public TripletSet subtractCTBR( KSet ctbr, int backmap[] )
		{
		if( ctbr.getMaxSize() != numLeaves )
			{
			System.out.println("The CTBR and TripletSet do not match.");
			System.exit(0);
			}

		int sub = ctbr.size();

		int remains = numLeaves - sub;

		if( backmap.length != remains+1 )
			{
			System.out.println("Backmap[] is broken.");
			System.exit(0);
			}

		//! not sure numLeaves+1 here is necessary but who cares
		int forwardmap[] = new int [numLeaves+1];

		int at = 1;
		for( int x=1; x<=numLeaves; x++ )
			{
			if( ! ctbr.containsLeaf(x) )
				{
				backmap[at] = x;
				forwardmap[x] = at;
				at++;
				}
			}

		if( at != remains+1 )
			{
			System.out.println("Something went very wrong.");
			System.exit(0);
			}

		TripletSet t = new TripletSet();

		for( int scan=0; scan<tripVec.size(); scan++ )
			{
			int trip[] = (int []) tripVec.elementAt(scan);			
			
			int x = trip[0];
			int y = trip[1];
			int z = trip[2];

			if( ctbr.containsLeaf(x) || ctbr.containsLeaf(y) || ctbr.containsLeaf(z) ) continue;

			int xp = forwardmap[x];
			int yp = forwardmap[y];
			int zp = forwardmap[z];

			int fall[] = new int[3];

			fall[0] = xp;
			fall[1] = yp;
			fall[2] = zp;

			//! No need to arrange the order of the elements:  x < y => xp < yp
			//! check this, but it should be fine...

			t.tripVec.addElement(fall);
			}		

		t.numLeaves = remains;

		return t;
		}


	private Graph buildAhoGraph()
		{
		//! Cycle through all triplets...

		Graph aho = new Graph( numLeaves );

		Enumeration e = tripVec.elements();
		while( e.hasMoreElements() )
			{
			int t[] = (int []) e.nextElement();

			aho.addEdge( t[0], t[1] );
			}		

		return aho;
		}


	//! private constructor
	//! builds a new TripletSet equal to the current tripletset, with the
	//! difference that all triplets containing leaf s are suppressed;
	//! can be optimised, if needed.

	//! All leaves with an index bigger than s drop their index by 1!!!

	private TripletSet( TripletSet ts, int s )
		{
		tripVec = new Vector();
                numLeaves = 0;

		Enumeration e = ts.tripVec.elements();
		while( e.hasMoreElements() )
			{
			int v[] = (int []) e.nextElement();

			//! If it contains s, throw it away...
			if( (v[0] == s) || (v[1] == s) || (v[2]==s)) continue;
	
			//! Otherwise, put the triple back in (with, where
			//! necessary, adjusted indexing

			int ap = v[0] < s ? v[0] : v[0]-1;
			int bp = v[1] < s ? v[1] : v[1]-1;
			int cp = v[2] < s ? v[2] : v[2]-1;

			addTriplet(ap,bp,cp);
			}			

		}


	public biDAG buildTree()
		{
		biDAG papa = new biDAG();

		//! Build the Aho graph...
		Graph g = buildAhoGraph();

		//! Split into two cliques...

		int partition[] = g.getDoubleClique();
		
		if( partition == null ) return null;

		/*
		if(Simplistic.DEBUG)
			{
			System.out.println("Had the double-clique structure:");
			for(int k=1; k<partition.length; k++ )
				{
				System.out.print(partition[k]+" ");
				}
			System.out.println();
			}
		*/

		//! The partition will have only values LEFT and RIGHT...

		//! At some point we have to stop the recursion. We finish once
		//! one of the two cliques has size <= 2;

		int lCount = 0;
		int rCount = 0;

		//! The 0 element of these guys will not be used...
		int leftList[] = new int[partition.length];
		int rightList[] = new int[partition.length];

		//! The 0 element of these guys will not be used...
		int leftBackMap[] = new int[partition.length];
		int rightBackMap[] = new int[partition.length];

		//! Note here that we begin at 1
		for( int scan=1; scan<partition.length; scan++ )
			{
			if( partition[scan] == Graph.LEFT )
				{
				lCount++;

				leftList[lCount] = scan;

				//! This is the new leaf numbering of leaf scan...

				leftBackMap[scan] = lCount;
				}
			else
			if( partition[scan] == Graph.RIGHT )
				{
				rCount++;
				rightList[rCount] = scan;
				rightBackMap[scan] = rCount;
				}
			else
			System.out.println("ERROR#23");
			} 

		//! -------------------------

		biDAG leftChild = null;
		biDAG rightChild = null;

		//! Here it gets messy...

		if( lCount==1 )
			{
			//! In this case it's just a simple leaf...

			leftChild = new biDAG();

			leftChild.data = leftList[1];
		
			/*
			if(Simplistic.DEBUG)
				{
				System.out.println("Setting only data as "+leftList[1]);
				}
			*/

			}
		else
		if( lCount==2 )
			{
			leftChild = new biDAG();
		
			//! Here it's a cherry...
			biDAG gchild1 = new biDAG();
			biDAG gchild2 = new biDAG();

			leftChild.child1 = gchild1;
			leftChild.child2 = gchild2;

			gchild1.data = leftList[1];
			gchild2.data = leftList[2];

			/*
			if(Simplistic.DEBUG)
				{
				System.out.println("Setting gchild1 as "+leftList[1]);
				System.out.println("Setting gchild2 as "+leftList[2]);
				}
			*/

			gchild1.parent = leftChild;
			gchild2.parent = leftChild;
			
			}
		else
			{
			/*
			if( Simplistic.DEBUG )
				{
				System.out.println("Recursing left...");
				}
			*/

			//! and here we recurse...
			//! get all the triplets in the LEFT partition...
			//! and continue...

			TripletSet leftBranch = new TripletSet();

			Enumeration e = tripVec.elements();
			while( e.hasMoreElements() )
				{
				int v[] = (int []) e.nextElement();

				//! We want all 3 of its elements to be in the left
				//! partition...so for each element check whether it

				if( (partition[v[0]] == Graph.RIGHT) || (partition[v[1]] == Graph.RIGHT) || (partition[v[2]] == Graph.RIGHT) )
					continue;

				//! the backscan variable ensures that the indices are correct...i.e. in
				//! the range [1...leftCount]

				leftBranch.addTriplet( leftBackMap[v[0]], leftBackMap[v[1]], leftBackMap[v[2]] );
				}

			leftChild = leftBranch.buildTree();

			if( leftChild == null ) return null;
		
			/*
			if( Simplistic.DEBUG ) System.out.println("Past left recursion");
			*/

			//! Fix labelling in the tree...very crude but zucht let's get it working first...
			leftChild.treeFixLeaves( leftList );
			}		

		//! and now the right branch...

		if( rCount==1 )
			{
			//! In this case it's just a simple leaf...

			rightChild = new biDAG();
			rightChild.data = rightList[1];

			/*
			if(Simplistic.DEBUG)
				{
				System.out.println("Setting only data as"+rightList[1]);
				}
			*/

			}
		else
		if( rCount==2 )
			{
			rightChild = new biDAG();
		
			//! Here it's a cherry...
			biDAG gchild1 = new biDAG();
			biDAG gchild2 = new biDAG();

			rightChild.child1 = gchild1;
			rightChild.child2 = gchild2;

			gchild1.data = rightList[1];
			gchild2.data = rightList[2];

			gchild1.parent = rightChild;
			gchild2.parent = rightChild;

			/*
			if(Simplistic.DEBUG)
				{
				System.out.println("Setting gchild1 as "+rightList[1]);
				System.out.println("Setting gchild2 as "+rightList[2]);
				}
			*/

			}
		else
			{

			/*
			if( Simplistic.DEBUG )
				{
				System.out.println("Recursing...");
				}
			*/

			//! and here we recurse...
			//! get all the triplets in the RIGHT partition...
			//! and continue...

			TripletSet rightBranch = new TripletSet();

			Enumeration e = tripVec.elements();
			while( e.hasMoreElements() )
				{
				int v[] = (int []) e.nextElement();

				//! We want all 3 of its elements to be in the left
				//! partition...so for each element check whether it
				//! is in the LEFT set...

				if( (partition[v[0]] == Graph.LEFT) || (partition[v[1]] == Graph.LEFT) || (partition[v[2]] == Graph.LEFT) )
					continue;

				//! the backscan variable ensures that the indices are correct...i.e. in
				//! the range [1...rightCount]

				rightBranch.addTriplet( rightBackMap[v[0]], rightBackMap[v[1]], rightBackMap[v[2]] );
				}

			rightChild = rightBranch.buildTree();

			if( rightChild == null ) return null;

			/*
			if( Simplistic.DEBUG )
				{
				System.out.println("Past recursive step!");
				}
			*/	

			//! And now fix the leaf numbering...
			rightChild.treeFixLeaves(rightList);
			}		

		papa.child1 = leftChild;
		papa.child2 = rightChild;
		leftChild.parent = papa;
		rightChild.parent = papa;

		return papa;
		}


	public biDAG buildNetwork(int depth)
		{
		Simplistic.report("Entering buildNetwork, recursion level "+depth);

		Simplistic.report("Constructing the maximum SN-sets...");

		KSet sntree = this.buildSNTree();

		Vector msn = sntree.getChildren();	//! That should be thje maximum SN-sets...check this...

		//! Is there a problem if msn is 'small' ?

		Simplistic.report("...done. There are "+msn.size()+" maximum SN-sets.");

		//! for( int x=0; x<msn.size(); x++ ) ((KSet) msn.elementAt(x)).dump();

		biDAG root = null;
		Vector mstar = null;

		boolean success = false;

		if( msn.size() == 2 )
			{
			Simplistic.report("There were only 2 maximum SN-sets.");
			root = biDAG.cherry12();
			mstar = msn;
			success = true;
			}		

		else	//! there are at least 3 maximum SN-sets
			{
			mstar = msn;

			TripletSet treal = this.buildTPrime( mstar );
			treal.finaliseAndOptimise();
			
			int nprime = treal.tellLeaves();
			int toplev;

			if( Simplistic.MAXLEV > (nprime-1) ) toplev = Simplistic.MAXLEV; else toplev = nprime-1;


			System.out.println("// There are "+nprime+" maximum SN-sets at recursion level "+depth+".");
			for( int b=0; b<mstar.size(); b++ ) ((KSet) mstar.elementAt(b)).dump();

			chains: for( int level=Simplistic.BEGIN_LEV; level <= toplev; level++ )
				{
				mstar = msn;

				Vector result = treal.buildSimpleNetworks(level);

				if( result != null )
					{
					success = true;
					System.out.println("// Found a"+(Simplistic.REFLECT == true ? " reflecting " : " consistent ") +"simple level-"+level+" network!");
					root = (biDAG) result.elementAt(0);
					break;
					}
				else
					{
					System.out.println("// Couldn't find a simple level-"+level+" network.");
					if( Simplistic.SN_TO_SPLIT > 0 )
						{
						System.out.println("// Attempting to split maximal SN-sets before moving on to level "+(level+1)+".");
						//! Let's try splitting some SN-sets.....

						for( int split=1; split <= Simplistic.SN_TO_SPLIT; split++ )
							{
							mstar = msn; 

							//! Build a set of all size-split subsets of
							//! the maximum subsets, try each one (zucht...)

							if( split > mstar.size() ) break;
	
							System.out.println("// Trying to split all combinations of "+split+" maximum SN-sets.");

				                        CombinationGenerator cg = new CombinationGenerator( mstar.size(), split );
                         
				                        nextcom: while( cg.hasMore() )
				                                {
								mstar = msn;
							
				                                int gc[] = cg.getNext();
								for( int x=0; x<gc.length;x++)
									{
									if( ((KSet) mstar.elementAt(gc[x])).size() == 1 ) continue nextcom;	// can't be split...
									}
									
								for( int x=0; x<gc.length;x++)
									{
									Simplistic.report(gc[x]+",");
									}
								Simplistic.report("\n");

								Vector newVec = new Vector();

								//! First add the unsplit SN-sets in...

								outerfor: for( int roll=0; roll<mstar.size(); roll++ )
									{
									for( int p=0; p<gc.length; p++ )
										{
										if(gc[p]==roll)
											{
											continue outerfor;
											}
										}
									newVec.addElement( mstar.elementAt(roll) );
									}								

								for( int x=0; x<gc.length; x++ )
									{
									KSet p = (KSet) mstar.elementAt(gc[x]);
									Vector c = p.getChildren();
									for( int y=0; y<c.size(); y++ )
										{
										newVec.addElement( (KSet) c.elementAt(y) );
										}
									}

								
								Simplistic.report("Split SN-sets are now: ");
								for( int l=0; l<newVec.size(); l++ )
									{
									KSet p = (KSet) newVec.elementAt(l);
									p.dump();
									}

								mstar = newVec;

								TripletSet tsplit = this.buildTPrime(mstar);
								tsplit.finaliseAndOptimise();

								int nsplit = tsplit.tellLeaves();
				
                        					System.out.println("// After splitting there are "+nsplit+" maximum SN-sets");

	        			                        Vector splitresult = tsplit.buildSimpleNetworks(level);
			        	                        if( splitresult != null )
									{
                                        				success = true;
				                                        System.out.println("// Found a simple level-"+level+" network!");
				                                        root = (biDAG) splitresult.elementAt(0);
                                        				break chains;
									}						
								}

							//! Don't forget to adjust mstar if it works...
							}
						System.out.println("// Couldn't find a simple level-"+level+" subnetwork, even after splitting.");

						} //! end SN_TO_SPLIT > 0

					}
				}				
			}

		if( !success ) return null;	//! This should never need to happen...

		//! Now do the funky stuff....
		//! root will contain a network with as many leaves as maximum SN-sets...
		//! i.e. the size of mstar...

		biDAG leafToNode[] = new biDAG[ mstar.size() + 1 ];

		//! This assumes that 'visited' is clean...

		root.resetVisited();

		root.getDAGLeafMap( leafToNode );

		//! Now leadToNode[1] points to the biDAG containing element 1, and so on...
		//! ---------------------------------------------------------------------------
		//! Not sure if we need to do this, the only thing that could get hurt is 
		//! the printing out of the DAG, but I think it's important to do it anyway...
		//! ---------------------------------------------------------------------------

		root.resetVisited();
		
		//! xl -> expand leaf...
		for( int xl = 1; xl <= mstar.size(); xl++ )
			{
			KSet subLeaves = (KSet) mstar.elementAt(xl-1);
			int numSubLeaves = subLeaves.size();
			
			int backMap[] = new int[numSubLeaves+1];

			biDAG subroot = null;

			Simplistic.report("Looking inside maxsn set "+xl);

			if( numSubLeaves > 2 )
				{
				Simplistic.report("Ah, this has more than 3 leaves...");
				TripletSet recurse = this.induceTripletSet( subLeaves, backMap );

				recurse.finaliseAndOptimise();

				subroot = recurse.buildNetwork(depth+1);
				if( subroot == null ) return null;
				}
			else
				{
				if( numSubLeaves == 1 )
					{
					Simplistic.report("Ah, this is a single leaf...");
					//! Simply fix what's already there.
					biDAG tweak = leafToNode[xl];
					int leafNum = subLeaves.getFirstElement();
					Simplistic.report("Replacing leaf "+tweak.data+" with "+leafNum);
					tweak.data = leafNum;
					}
				else

					{
					//! numSubLeaves == 2

					Simplistic.report("Ah, this has two leaves.");
					Simplistic.report("Entering danger zone");

					biDAG tweak = leafToNode[xl];

					int pick[] = subLeaves.getFirstTwoElements();
					
					biDAG branch = biDAG.cherry12();
	
					branch.child1.data = pick[0];
					branch.child2.data = pick[1];

					branch.parent = tweak.parent;

					if( tweak.parent == null ) Simplistic.report("Null pointer discovered...");
					
					if( tweak.parent.child1 == tweak ) tweak.parent.child1 = branch;
					else
					if( tweak.parent.child2 == tweak ) tweak.parent.child2 = branch;
					else 	{
						System.out.println("Mega problem, please report to programmer");
						System.exit(0);
						}

					Simplistic.report("Leaving danger zone?");

					}
				continue;	//! no need to do the rest...
				}

			//! That subbranch worked, so fix the leaves and then graft back

			Simplistic.report("Still at recursion depth "+depth);

			subroot.dagFixLeaves(backMap);

			//! Is this necessary??? Might it be dangerous even???
			subroot.resetFixLeaves();	

			biDAG knoop = leafToNode[xl];

			subroot.parent = knoop.parent;

			if( knoop.parent.child1 == knoop ) knoop.parent.child1 = subroot;
			else
			if( knoop.parent.child2 == knoop ) knoop.parent.child2 = subroot;
			else	{
				System.out.println("BL2: That really shouldn't have happened");
				System.exit(0);
				}

			Simplistic.report("Ending iteration.");
			}


		return root;
		}

	public TripletSet buildTPrime( Vector maxSN )
		{
		if( maxSN.size() == 2 ) return null;

		int nprime = maxSN.size();		

		KSet fastSN[] = new KSet[ nprime+1 ];

		//! We'll build a lookup-table here to make the mapping fast
		//! Maps {1,n} -> {1,n'}

		int map[] = new int[ this.numLeaves+1 ];

		int index=1;

		Enumeration e = maxSN.elements();
		while( e.hasMoreElements() )
			{
			fastSN[index] = (KSet) e.nextElement();
			for( int scan=1; scan<=numLeaves; scan++ )
				{
				if( fastSN[index].containsLeaf(scan) ) map[scan] = index;
				}
			index++;
			}

		//! Ok, the maxSN sets are now in the array fastSN, we don't
		//! use the zero element...

		//! Simply iterate through all the triplets in 'this', and induce the corresponding set
		//! of new triplets...

		TripletSet tprime = new TripletSet();

		Enumeration f = tripVec.elements();
		while( f.hasMoreElements() )
			{
			int t[] = (int []) f.nextElement();

			int a = map[t[0]];
			int b = map[t[1]];
			if( b == a ) continue;

			int c = map[t[2]];
			if( (c==a) || (c==b) ) continue;
	
			tprime.addTriplet( map[t[0]], map[t[1]], map[t[2]] );
			}

		return tprime;
		}

	//! Give it an array of which leaves you want to do the inducing
	//! backMap says what the original labellings of the new leaves were...

	public TripletSet induceTripletSet( KSet k, int backMap[] )
		{
		TripletSet ts = new TripletSet();

		boolean in[] = new boolean[numLeaves+1];

		int leafMap[] = new int[numLeaves+1];
		int mapCount = 1;
		//! This will map {1...numLeaves} -> {1...size(KSet)}
		
		for( int x=1; x<=numLeaves; x++ )
			{
			in[x] = k.containsLeaf(x);
			if( in[x] )
				{
				leafMap[x] = mapCount;

				backMap[mapCount] = x;

				mapCount++;
				}
			}

		Enumeration e = this.tripVec.elements();

		while( e.hasMoreElements() )
			{
			int t[] = (int []) e.nextElement();

			if( in[t[0]] && in[t[1]] && in[t[2]] )
				{
				ts.addTriplet( leafMap[t[0]], leafMap[t[1]], leafMap[t[2]] );
				}

			}	

		return ts;
		}


	}


//! ---------------------------------------------------------------------------------------
//! ---------------------------------------------------------------------------------------
//! ---------------------------------------------------------------------------------------
//! ---------------------------------------------------------------------------------------
//! ---------------------------------------------------------------------------------------


public class Simplistic
        {
	public static final boolean DEBUG = false;
	public static final boolean DOUBLEDEBUG = false;

	public static boolean BUILD_ALL = false;
	public static boolean SIMPLE = false;

	public static boolean NEWICK = false;

	public static int BEGIN_LEV = 1;

	public static int SN_TO_SPLIT = 0;	//! how many SN-sets should be split

	public static boolean PERCENTAGE = false;

	public static boolean REFLECT = false;

	public static int MAXLEV = -1;

	public static final void printHelp()
		{
		System.out.println("Usage: java Heuristic file [options]");
		System.out.println("Options are:");
		System.out.println("--simple : forces the program to only look for simple networks.");
		System.out.println("--snplit <num> : try splitting (if necessary) all subsets of size <num> of the maximum SN-sets. <num> should be greater than or equal to 0");
		System.out.println("--all : builds all simple networks consistent with the input (only works in conjunction with --simple).");
		System.out.println("--stop : does not try higher levels once a network has been found (only works in conjunction with --simple).");
		System.out.println("--enewick : outputs network(s) in eNewick format.");
		System.out.println("--beginlev <num> : when looking for simple networks, always start at level <num>. <num> should be greater than or equal to 1. (Should only really be used in conjunction with --simple.)");
		System.out.println("--alltriplets : this assumes that 'file' is not a triplet file, but a (stripped) DOT file (see documentation), and generates all the triplets consistent with the network described by the DOT file."); 
		System.out.println("--percentage : reports progress on how close to 100% triplet consistency has been obtained at a given level of the recursion.");
		System.out.println("--reflect : instead of checking consistency, check reflectivity.");
		System.out.println("--maxlev <num>: normally the program doesn't go further than level-(n-1) but with this option it continues until level max(n-1, <num>). (Should only really be used with --simple.)");
		System.out.println("--help : prints this message.");
		System.exit(0);
		}

	public static final void report( String text )
		{
		if(DEBUG) System.out.println(text);
		}

	public static final void doublereport( String text )
		{
		if(DOUBLEDEBUG) System.out.println("// "+text);
		}

        public static void main( String[] args )
                {
		//! Need to read the triplets in from somewhere...
		TripletSet t = new TripletSet();

		if( args.length == 0 )
			{
			System.out.println("ERROR: No input file specified.");
			System.exit(0);
			}
		else
			{


			if( args.length >= 2 )
				{
				String nick = args[1];
				if( nick.equals("--alltriplets") )
					{
					biDAG r = biDAG.buildDAGfromDOT( args[0] );

					if( r != null )
						{

						dagExplore dexp = new dagExplore( r );						

						System.out.println("// "+dexp.num_leaves+" leaves.");
						System.out.println("// "+dexp.num_nodes+" nodes.");

						for( int x=1; x<=dexp.num_leaves; x++ )
						for( int y=(x+1); y<=dexp.num_leaves; y++ )
						for( int z=1; z<=dexp.num_leaves; z++ )
							{
							if( (x==z) || (y==z) ) continue;
							if(dexp.consistent(x,y,z))
								{
								//! System.out.println( x+" "+y+" "+z);
								t.addTriplet(x,y,z);
								}
							}

						}
					else
						{
						System.out.println("ERROR: Couldn't parse the DOT file and/or build a DAG.");
						System.exit(0);
						}
					//! System.exit(0);
					}

				}

			String fileName = args[0];
			int recCount = 0;

			if( t.countTriplets() == 0 )
			try 	{ 
			    	FileReader fr = new FileReader(fileName);
			        BufferedReader br = new BufferedReader(fr);

			        String record = new String();
			        while ((record = br.readLine()) != null)
					{
              				recCount++;
					String[] tripData = record.split(" ");

					if( tripData[0] != null )
						{
						if( tripData[0].equals("//") ) continue;
						}

					if( tripData.length != 3 )
						{
						System.out.println("Read line: "+record);
						System.out.println("ERROR: Malformed line: each line should consist of three space-delimited integers, each greater than or equal to 1. Comment lines that begin with // are also permitted.");
						throw new IOException();
						}

					int a = Integer.parseInt(tripData[0]);
					int b = Integer.parseInt(tripData[1]);
					int c = Integer.parseInt(tripData[2]);

					if( (a==b) || (b==c) || (a==c) )
						{
						System.out.println("Read line: "+record);
						System.out.println("ERROR: Each line should consist of three DISTINCT integers.");
						throw new IOException();
						}

					if( (a<=0) || (b<=0) || (c<=0) )
						{
						System.out.println("Read line: "+record);
						System.out.println("ERROR: All leaves should be 1 or greater.");
						throw new IOException();
						}
	
					t.addTriplet(a,b,c);				
					}
				

           			} 
			catch (IOException e)
				{ 
		        	// catch possible io errors from readLine()
				if( fileName.equals("--help") )
					{
					Simplistic.printHelp();
					}
				else
					{
				        System.out.println("Problem reading file "+fileName);

					System.exit(0);
					}
				}
			}

		boolean stopsuccess=false;

		Simplistic.report("Finished reading input file.");

		if( args.length > 1 )
			{
			for( int scan=1; scan<args.length; scan++ )
				{
				String nick = args[scan];

				//! System.out.println("Processing "+nick);

				if( nick.equals("--enewick") )
					{
					NEWICK = true;
					System.out.println("// USER OPTION: Switching output to eNewick format.");
					}
				else
				if( nick.equals("--simple") )
					{
					Simplistic.SIMPLE = true;
					System.out.println("// USER OPTION: Will only try and build simple networks.");
					}
				else
				if( nick.equals("--all") )
					{
					Simplistic.BUILD_ALL = true;
					System.out.println("// USER OPTION: Building all networks (only has effect with --simple)");
					}
				else
				if( nick.equals("--snsplit") )
					{
					try 	{
						Simplistic.SN_TO_SPLIT = Integer.parseInt(args[scan+1]);

						if( Simplistic.SN_TO_SPLIT < 0 )
							{
							System.out.println("ERROR: argument to --snsplit must be 0 or greater.");
							System.exit(0);
							}

						System.out.println("// USER OPTION: Will try and split at most "+Simplistic.SN_TO_SPLIT+" maximal SN-sets.");
						}
						catch( Exception e )
						{
						Simplistic.SN_TO_SPLIT = 0;
						System.out.println("// Malformed argument to --snsplit, defaulting to 0");
						}
					}
				else
				if( nick.equals("--stop") )
					{		
					stopsuccess = true;
					System.out.println("// USER OPTION: Will not go to a higher level than necessary (only has effect with --simple)");
					}
				else
				if( nick.equals("--beginlev") )
					{
					try 	{
						Simplistic.BEGIN_LEV = Integer.parseInt(args[scan+1]);
						if( Simplistic.BEGIN_LEV < 1 )
							{
							System.out.println("ERROR: argument to --beginlev must be 1 or greater.");
							System.exit(0);
							}

						System.out.println("// USER OPTION: Will always start at level "+Simplistic.BEGIN_LEV+". (Should only really be used in conjunction with --simple.)");
						}
						catch( Exception e )
						{
						Simplistic.BEGIN_LEV = 1;
						System.out.println("// Malformed argument to --beginlev, defaulting to 1");
						}

					}
				else
				if( nick.equals("--help") )
					{
					Simplistic.printHelp();
					}
				else
				if( nick.equals("--percentage") )
					{
					Simplistic.PERCENTAGE = true;
					System.out.println("// USER OPTION: Will print consistency percentages.");
					}
				else
				if( nick.equals("--reflect") )
					{
					Simplistic.REFLECT = true;
					System.out.println("// USER OPTION: Switching from consistency checking to reflectivity checking.");
					}
				if( nick.equals("--maxlev") )
					{
					try 	{
						Simplistic.MAXLEV = Integer.parseInt(args[scan+1]);
						if( Simplistic.MAXLEV < 1 )
							{
							System.out.println("ERROR: argument to --maxlev must be 1 or greater.");
							System.exit(0);
							}

						System.out.println("// USER OPTION: Will continue until level max(n-1, "+Simplistic.MAXLEV+"). (Should only really be used in conjunction with --simple.)");
						}
						catch( Exception e )
						{
						Simplistic.MAXLEV = -1;
						System.out.println("// Malformed argument to --maxlev, defaulting to n-1");
						}

					}


	
				}

			}

		t.finaliseAndOptimise();

		//! t.dumpTriplets();

		int errorTrip[] = new int[3];

		if( t.isClosed(errorTrip) == false )
			{
			System.out.println("ERROR: there are leaves in the triplet set that are unused;");
			System.out.println("Leaf "+errorTrip[0]+" does not appear in any triple (and maybe this is true for other leaves.)");
			System.out.println("Please ensure that the leaf range is precisely of the form [1...n] where all leaves in that interval appear in some triplet.");
			System.exit(0);
			}

		Simplistic.report("Leaf interval is closed 1..n, good.");

		if( t.isDense(errorTrip) == false )
			{
			System.out.println("ERROR: Not a dense input set.");
			System.out.println("{"+errorTrip[0]+","+errorTrip[1]+","+errorTrip[2]+"} is missing (and maybe more.)");
			System.exit(0);
			}

		Simplistic.report("Triplets are dense, good.");

		Simplistic.report("Attempting to build HEURISTIC network.");
		
		int at = 0;

		int max = t.tellLeaves() - 1;
	
		if( Simplistic.MAXLEV > max ) max = Simplistic.MAXLEV;

		if( Simplistic.SIMPLE )
			{
			for( int level=Simplistic.BEGIN_LEV ; level <= max ; level++ )
				{
				System.out.println("// Trying to build simple level "+level+" networks.");
				Vector sol = t.buildSimpleNetworks(level);
				if( sol != null )
					{
					Enumeration e = sol.elements();
					while( e.hasMoreElements() )
						{
						biDAG b = (biDAG) e.nextElement();
						b.resetVisited();	//! does this work if we have ripped nodes out???
						if( !Simplistic.NEWICK ) b.dumpDAG(at++);
						else b.newickDump();
						}
					if( Simplistic.BUILD_ALL ) 
						{
						if( sol.size() == 1 ) System.out.println("// ENUM: There was thus only one solution");
						else System.out.println("// ENUM: There were thus multiple (maybe isomorphic) solutions, "+sol.size()+" in total.");
						}
					if( stopsuccess ) System.exit(0);
					}
				else System.out.println("// There were none.");
				}
			}
		else	{
			biDAG bignet = t.buildNetwork(0);
			if( bignet != null )
				{
				bignet.resetVisited();
				if( !Simplistic.NEWICK ) bignet.dumpDAG(0);
				else bignet.newickDump();
				}
			else System.out.println("// No network found, not even a heuristic network :(");
			}

                }
        }


//! ---------------------------------------------------------------------------------------
//! ---------------------------------------------------------------------------------------
//! ---------------------------------------------------------------------------------------
//! ---------------------------------------------------------------------------------------
//! ---------------------------------------------------------------------------------------

//! Question: how does dagExplore behave if there are dummy leaves?
//! We have to be very careful with how this behaves.

class dagExplore
{
//! We need these constants for the dynamic programming...
public static final int UNKNOWN = 0;
public static final int YES = 1;
public static final int NO = -1;

public int num_mustHits;	//! the number of edge-disjoint edge subsets that
				//! simply have to be subdivided. cherries + dummy leaves
public int num_leaves;
public int num_nodes;
public int num_edges;

public Vector nodes;
public Vector edges;
public Vector leaves;
public Vector dummy_nodes;

public biDAG nodearray[];	//! maps nodeNums -> biDAGs
public int leafarray[];		//! maps leafNums -> leafNums

public boolean adjmatrix[][];

//! for the algorithm of pawel
public int join[][];
public int cons[][][];

public int desc[][];

public biDAG root;

//! ------------ methods --------------

public dagExplore( biDAG network )
	{
	this.root = network;

	//! If this is not necessary then it will return
	//! immediately without wasting time.

	root.resetVisited();

	initiateExploration();
	}

//! ---------------------------------------------------------------------------

//! Using the information contained in the dagExplore, this clones the
//! biDAG that it represents.
//! changeLabels is a backmap that adjusts the labels (to make room for the ctbr that will be inserted)


public biDAG[] clonebiDAG(int changeLabels[])
	{
	biDAG newNodes[] = new biDAG[num_nodes];

	if( root.nodeNum != 0 )
		{
		System.out.println("I expected the root to have number 0...");
		System.exit(0);
		}

	for( int x=0; x < num_nodes; x++ )
		{
		newNodes[x] = new biDAG();
		}

	for( int x=0; x < num_nodes; x++ )
		{
		biDAG oldGuy = nodearray[x];
		biDAG newGuy = newNodes[x];		

		//! Only copy over necessar stuff...BE CAREFUL HERE

		newGuy.data = oldGuy.data;
		newGuy.isDummy = oldGuy.isDummy;
		newGuy.nodeNum = oldGuy.nodeNum;

		if( oldGuy.parent != null )
			{
			newGuy.parent = newNodes[ oldGuy.parent.nodeNum ];
			}
		if( oldGuy.child1 != null )
			{
			newGuy.child1 = newNodes[ oldGuy.child1.nodeNum ];
			}
		if( oldGuy.child2 != null )
			{
			newGuy.child2 = newNodes[ oldGuy.child2.nodeNum ];
			}
		if( oldGuy.secondParent != null )
			{
			newGuy.secondParent = newNodes[ oldGuy.secondParent.nodeNum ];
			}

		//! This simultaneously relabels the leaves toooo....
		if( oldGuy.data != 0 )
			{
			newGuy.data = changeLabels[oldGuy.data];
			//! System.out.println("Mapped leaf "+oldGuy.data+" to "+newGuy.data);
			}

		//! That should be enough...
		}

	 
	//! This should be the root......
	return newNodes;
	}


public Vector getEdges()
	{
	return edges;
	}

public Vector getDummies()
	{
	return dummy_nodes;
	}

private void initiateExploration()
	{
	nodes = new Vector();
	edges = new Vector();
	leaves = new Vector();
	dummy_nodes = new Vector();

	dagExplore.scan( root, nodes, edges, leaves, dummy_nodes );

	num_nodes = nodes.size();
	num_edges = edges.size();
	num_leaves = leaves.size();

	nodearray = new biDAG[ num_nodes ];	

	//! leafarray maps leaf numbers to node numbers...
	//! and leaves start at 1, hence the +1

	leafarray = new int[ num_leaves+1 ];

	//! this is a DIRECTED adjacency matrix...
	adjmatrix = new boolean[ num_nodes ][ num_nodes ];

	//! Here we use -1 because we want node numberings to start at 0
	int id = num_nodes - 1;

	for( int x=0; x<nodes.size(); x++ )
		{
		biDAG b = (biDAG) nodes.elementAt(x);

		//! reverse postorder numbering: topological sort!
		b.nodeNum = id--;

		//! System.out.println("Assigning number "+b.nodeNum+" to biDAG"+b);

		if( (b.secondParent == null) && (b.parent != null) && (b.child1 != null) && (b.child2 != null))
			{
			biDAG c1 = b.child1;
			biDAG c2 = b.child2;
			
			//! See if this is the root of a cherry...
			if( c1.isLeafNode() && (!c1.isDummy) && c2.isLeafNode() && (!c2.isDummy) ) num_mustHits++;
			}
		else
		if( b.isLeafNode() && b.isDummy )
			{
			biDAG p = b.parent;

			//! This is a dummy leaf that has a recomb node as parent, it has to be subdivided...
			if( (p.parent != null) && (p.secondParent != null) ) num_mustHits++;
			}		

		//! nodearray maps nodeNums to biDAGs...
		nodearray[ b.nodeNum ] = b;
		}

	for( int x=0; x<edges.size(); x++ )
		{
		biDAG e[] = (biDAG []) edges.elementAt(x);

		//! System.out.println("dagExplore found edge "+e[0]+" -> "+e[1]);

		if( (e[1].parent != e[0]) && (e[1].secondParent != e[0]) )
			{
			System.out.println("Catastrophic error in dagEncode.");
			System.exit(0);
			}

		int tail = e[0].nodeNum;
		int head = e[1].nodeNum;

		adjmatrix[tail][head] = true;
		}

	for( int x=0; x < leaves.size(); x++ )
		{
		biDAG b = (biDAG) leaves.elementAt(x);
		leafarray[b.data] = b.nodeNum;
		}

	cons = new int [num_nodes][num_nodes][num_nodes];
	join = new int [num_nodes][num_nodes];
	desc = new int [num_nodes][num_nodes];

	//! if necessary we can add all kinds of other look-up matrices, Vectors and so on...
	}

//! x and y are nodes, not leaves
//! asks: "is x a descendant of y?"
//! again uses memoisation

public boolean isDescendant( int x, int y )
	{
	if( x == y ) desc[x][y] = YES;

	if( desc[x][y] == YES ) return true;
	if( desc[x][y] == NO ) return false;

	//! So descendancy relation is not yet known for x, y
	//! Let's compute it and store the answer...

	biDAG X = nodearray[ x ];

	biDAG p1 = X.parent;

	if( p1 == null )
		{
		//! x is the root, y is not equal to x,
		//! so x cannot be a descendant of y

		desc[x][y] = NO;
		return false;
		}

	int xprime = p1.nodeNum;
	
	if( isDescendant( xprime, y ) )
		{
		desc[x][y] = YES;
		return true;
		}

	biDAG p2 = X.secondParent;

	if( p2 == null )
		{
		desc[x][y] = NO;
		return false;
		}

	xprime = p2.nodeNum;

	if( isDescendant( xprime, y ) )
		{
		desc[x][y] = YES;
		return true;
		}
	
	desc[x][y] = NO;
	return false;
	}


//! join is only internally used, so we assume the ints that you
//! give it refer to nodes, not leaves.

// "where join(x, z) is a predicate stating that N contains t != x and internally vertex-disjoint
// paths t → x and t → z."

public boolean isJoin( int x, int z )
	{
	if( x == z ) return false;	//! I think that's OK...or not? need to think about that...

	if( join[x][z] == YES ) return true;
	if( join[x][z] == NO ) return false;

	//! So it is not yet defined. Let's compute it...
	//! this requires some thinking...

	if( isDescendant( x, z ) )
		{
		join[x][z] = YES;
		return true;
		}

	//! So x is not a descendant of z. In this case, the only chance of a join is if there
	//! is an explicit ^ shape with t distinct from both x and z

	biDAG Z = nodearray[ z ];

	biDAG p1 = Z.parent;

	if( p1 == null )
		{
		//! then z is the root. In this case x is not a descendant of z, and there
		//! can be no 't' higher than z, so the answer is FALSE.

		join[x][z] = NO;
		return false;
		}

	int zprime = p1.nodeNum;

	if( isJoin( x, zprime ) )
		{
		join[x][z] = YES;
		return true;
		}

	biDAG p2 = Z.secondParent;

	if( p2 == null )
		{
		join[x][z] = NO;
		return false;
		}

	zprime = p2.nodeNum;

	if( isJoin( x, zprime ))
		{
		join[x][z] = YES;
		return true;
		}

	join[x][z] = NO;
	return false;
	}

//! this is the internal consistency checker: it
//! assumes that x, y, z refer to nodes not leaves...

private boolean con( int x, int y, int z )
	{
	if( (x==y) || (x==z) || (y==z ) )
		{
		cons[x][y][z] = NO;
		return false;
		}

	if( cons[x][y][z] == NO ) return false;
	if( cons[x][y][z] == YES ) return true;

	//! So cons[x][y][z] is UNKNOWN, we need to compute it!

	//! Remember, the node numberings are also the position
	//! in the topological sort. T'sort begins at 0.

	if( (x < z) && (y < z) )
		{
		biDAG Z = nodearray[z];

		biDAG p1 = Z.parent;

		if( p1 == null )
			{
			cons[x][y][z] = NO;
			return false;
			}

		int zprime = p1.nodeNum;

		if( con( x,y,zprime) )
			{	
			cons[x][y][z] = YES;
			return true;
			}

		biDAG p2 = Z.secondParent;

		if( p2 == null )
			{
			cons[x][y][z] = NO;
			return false;
			}

		zprime = p2.nodeNum;

		if( con( x,y,zprime ) )
			{
			cons[x][y][z] = YES;
			return true;
			}

		cons[x][y][z] = NO;
		return false;
		}

	
	if( (x<y) && (z<y) )
		{
		if( adjmatrix[x][y] && isJoin(x,z) )
			{
			cons[x][y][z] = YES;
			return true;
			}

		biDAG Y = nodearray[y];

		biDAG p1 = Y.parent;

		if( p1 == null )
			{
			//! Is this correct?

			cons[x][y][z] = NO;
			return false;
			}

		int yprime = p1.nodeNum;

		if( (yprime != x) && (yprime !=z) )
			{
			if( con(x,yprime,z) )
				{
				cons[x][y][z] = YES;
				return true;
				}

			}

		biDAG p2 = Y.secondParent;

		if( p2 == null )
			{
			cons[x][y][z] = NO;
			return false;
			}
	
		yprime = p2.nodeNum;

		if( (yprime != x) && (yprime !=z) )
			{
			if( con(x,yprime,z) )
				{
				cons[x][y][z] = YES;
				return true;
				}
			}

		cons[x][y][z] = NO;
		return false;
		}

	if( (y<x) && (z<x) )
		{
		if( adjmatrix[y][x] && isJoin(y,z) )
			{
			cons[x][y][z] = YES;
			return true;
			}

		biDAG X = nodearray[x];

		biDAG p1 = X.parent;

		if( p1 == null )
			{
			//! Is this correct?

			cons[x][y][z] = NO;
			return false;
			}

		int xprime = p1.nodeNum;

		if( (xprime != z) && (xprime != y) )
			{
			if( con(xprime,y,z) )
				{
				cons[x][y][z] = YES;
				return true;
				}

			}

		biDAG p2 = X.secondParent;

		if( p2 == null )
			{
			cons[x][y][z] = NO;
			return false;
			}
	
		xprime = p2.nodeNum;

		if( (xprime != z) && (xprime !=y) )
			{
			if( con(xprime,y,z) )
				{
				cons[x][y][z] = YES;
				return true;
				}
			}

		cons[x][y][z] = NO;
		return false;
		}

	System.out.println("Shouldn't actually get here...");

	cons[x][y][z] = NO;
	return false;
	}

//! this is the external consistency checker:
//! x, y and z refer to LEAVES !!

public boolean consistent( int x, int y, int z )
	{
	if( (x==0) || (y==0) || (z==0) )
		{
		System.out.println("Leaf with 0 number?");
		System.exit(0);
		}

	if( (x==y) || (x==z) || (z==y) ) return false;

	//! System.out.println("Internal: "+leafarray[x] + " " +leafarray[y] + "|" + leafarray[z] );

	return( con( leafarray[x], leafarray[y], leafarray[z] ) );
	}

public static void scan( biDAG b, Vector nodes, Vector edges, Vector leaves, Vector dummy_nodes  )
	{
	if( b.visited == true ) return;

	Simplistic.doublereport("Visiting node "+b);

	b.visited = true;

	if( b.child1 != null )
		{
		biDAG myedge[] = new biDAG[2];
		myedge[0] = b;
		myedge[1] = b.child1;

		if( (myedge[1].parent != myedge[0]) && (myedge[1].secondParent != myedge[0]) )
			{
			System.out.println("I DON'T UNDERSTAND!");
			System.exit(0);
			}


		edges.addElement( myedge );

		dagExplore.scan( b.child1, nodes, edges, leaves, dummy_nodes );
		}
	
	if( b.child2 != null )
		{
		biDAG myedge[] = new biDAG[2];
		myedge[0] = b;
		myedge[1] = b.child2;

		if( (myedge[1].parent != myedge[0]) && (myedge[1].secondParent != myedge[0]) )
			{
			System.out.println("I DON'T UNDERSTAND EITHER!");
			System.exit(0);
			}

		edges.addElement( myedge );

		dagExplore.scan( b.child2, nodes, edges, leaves, dummy_nodes );
		}

	//! so it does not get added as a leaf...REMEMBER THIS!!!

	if( b.isLeafNode() && (!b.isDummy) ) leaves.addElement( b );

	if( b.isDummy ) dummy_nodes.addElement( b );

	//! post order...
	nodes.addElement( b );
	}

}











//! ----------------------
//! The root has no parent;
//! Leaves have no children, but do have data;
//! Nothing more than that...very general...can have cycles...
//! Is considered a leaf if 


class biDAG
{
public biDAG parent;
public biDAG child1, child2;
public int data;
public biDAG secondParent;
public int auxDat;
public boolean visited;

public boolean fixVisit;

public boolean isDummy;

KSet snset;

public int nodeNum;

public boolean newickVisit;
public int newickRecNum;
public boolean newickRecVisit;


//! These are things we need for the undirected
//! DFS which we use for testing biconnectivity

private biDAG adj[];
private int degree;
private biDAG pred;
private int colour;
private int discovery;
private int low;

private final static int WHITE = 0;
private final static int GREY = 1;

public biDAG()
	{
	parent = child1 = child2 = null;
	data = 0;
	secondParent = null;
	auxDat = 0;
	visited = false;
	snset = null;
	isDummy = false;

	newickVisit = false;
	newickRecNum = -1;
	newickRecVisit = false;

	pred = null;
	colour = WHITE;	
	}

//! ----------------------------------------------------------------------
//! This is used for the undirected DFS...

public void buildAdjacency()
	{
	adj = new biDAG[3];

	int at = 0;

	if( parent != null ) adj[at++] = parent;
	if( secondParent != null ) adj[at++] = secondParent;
	if( child1 != null ) adj[at++] = child1;
	if( child2 != null ) adj[at++] = child2;

	if( at == 4 )
		{
		System.out.println("Node with degree 4?!?!?");
		System.exit(0);
		}
	degree = at;
	}

//! ----------------------------------------------------------------------

public boolean isBiconnected()
	{
	//! Nou dit wordt leuk...
	//! remember: we are dealing here with UNDIRECTED graph

	int time[] = new int[1];
	time[0] = -1;

	//! I assume that 'this' is the root
	boolean result = this.biconVisit( time );

	return result;
	}

private boolean biconVisit( int time[] )
	{
	//! System.out.println("Inside node "+this);
	//! if( secondParent != null ) System.out.println("I am recombination node.");	

	this.buildAdjacency();

	time[0]++;
	colour = GREY;

	discovery = time[0];
	low = time[0];

	//! System.out.println("I am discovered at time "+discovery);

	int dfsdegree = 0;

	for( int x=0; x<degree; x++ )
		{
		biDAG next = adj[x];

		if( next == pred )
			{
			//! System.out.println("Not entering that neighbour: is our DFS parent.");
			continue;	//! to ensure we don't retrace our steps
			}

		if( next.colour == WHITE )
			{
			//! System.out.println("Neighbour is white.");

			if( next.isLeafNode() )
				{
				//! System.out.println("Not entering that neighbour: is a leaf.");
				continue;	//! don't bother with leaves...
				}

			next.pred = this;

			dfsdegree++;
			if( (dfsdegree > 1) && (this.parent == null) )
				{
				//! System.out.println("Root is articulation node.");
				return false;
				}
											//! Root is only allowed
											//! to have degree 1 in DFS tree

			boolean result = next.biconVisit( time );
			if( result == false ) return false;				//! exit the recursion zsm

			if( next.low < this.low )
				{
				this.low = next.low;
				//! System.out.println("Updating low of "+this+" to "+this.low);
				}

			if( (next.low >= this.discovery) && (this.parent != null) )
				{
				//! System.out.println("A subtree rooted at "+this+" can't escape.");
				return false;
				}
			}
		else
			{
			//! next.colour == GREY, so already visited...
			//! System.out.println("Neighbour is grey.");
			//! System.out.println("(Neighbour was "+next+")");

			if( next.discovery < this.low ) this.low = next.discovery;

			//! System.out.println("Back edge: updating low of "+this+" to "+this.low);
			}

		}
	return true;
	}

//! ----------------------------------------------------------------------

public static biDAG buildDAGfromDOT( String fileName )
	{
	int recCount = 0;

	Vector left = new Vector();
	Vector right = new Vector();

	try 	{ 
	    	FileReader fr = new FileReader(fileName);
	        BufferedReader br = new BufferedReader(fr);

	        String record = new String();
	        while ((record = br.readLine()) != null)
			{
			String[] tripData = record.split(" ");

			if( tripData.length != 3 ) continue;
			if( !tripData[1].equals("->") ) continue;

			recCount++;

			int a = Integer.parseInt(tripData[0]);

			left.addElement( new Integer(a) );

			int c = Integer.parseInt(tripData[2]);

			right.addElement( new Integer(c) );

			//! System.out.println(recCount+" ::: " + a + " * " +c);
       			} 
		}
		catch (IOException e)
			{ 
	        	// catch possible io errors from readLine()
		        System.out.println("ERROR: Problem reading DOT file "+fileName);
			System.exit(0);
			}

		int numEdges = left.size();

		int edges[][] = new int[numEdges][2];
		
		for( int fill = 0; fill < numEdges; fill++ )
			{
			edges[fill][0] = ((Integer)left.elementAt(fill)).intValue();
			edges[fill][1] = ((Integer)right.elementAt(fill)).intValue();
			//! System.out.println("Got edge : "+edges[fill][0]+" --> "+edges[fill][1]);
			}

	//! So, let's build it!!

	Vector dagvec = new Vector();		

	int threshhold = 1000;

	for( int e = 0; e < numEdges; e++ )
		{
		int lnum = edges[e][0];
		int rnum = edges[e][1];

		biDAG lf = findNode( dagvec, lnum );
		biDAG rf = findNode( dagvec, rnum );

		//! System.out.println("Adding edge "+lnum+" ---> "+rnum);

		if( lf == null )
			{
			biDAG nl = new biDAG();
			//! System.out.println("Creating "+lnum);
			if( lnum < threshhold )
				{
				nl.data = lnum;
				nl.auxDat = lnum;
				}
			else
				{
				nl.auxDat = lnum;
				}

			lf = nl;
			dagvec.addElement(nl);
			}

		if( rf == null )
			{
			biDAG nr = new biDAG();
			//! System.out.println("Creating "+rnum);
			if( rnum < threshhold )
				{
				nr.data = rnum;
				nr.auxDat = rnum;
				}
			else
				{
				nr.auxDat = rnum;
				}
			rf = nr;
			dagvec.addElement(nr);
			}

		//! Ok, so now we're OK

		if( lf.child1 == null )
			{
			lf.child1 = rf;
			}
		else
		if( lf.child2 == null )
			{
			lf.child2 = rf;
			}
		else
			{
			System.out.println("We messed up.");
			System.exit(0);
			}

		if( rf.parent == null )
			{
			rf.parent = lf;
			}
		else
		if( rf.secondParent == null )
			{
			rf.secondParent = lf;
			}
		else
			{
			System.out.println("We messed up AGAIN.");
			}

		}

	biDAG r = findRoot( dagvec );

	return r;
	}

public static biDAG findRoot( Vector dag )
	{
	Enumeration e = dag.elements();
	while( e.hasMoreElements() )
		{
		biDAG b = (biDAG) e.nextElement();
		if( (b.parent==null) && (b.secondParent==null) )
			{
			return b;
			}
		}
	return null;
	}


public static biDAG findNode( Vector dag, int num )
	{
	if( num == 0 )
		{
		System.out.println("Not interested in nodes that have value 0.");
		System.exit(0);
		}

	Enumeration e = dag.elements();
	while( e.hasMoreElements() )
		{
		biDAG b = (biDAG) e.nextElement();
		if( b.data != 0 )
			{
			if( b.data == num ) return b;
			}
		else
			{
			if( b.auxDat == num ) return b;
			}
		}
	return null;
	}

public void resetFixLeaves()
	{
	if( fixVisit = false ) return;

	fixVisit = false;
	if( child1 != null ) child1.resetFixLeaves();
	if( child2 != null ) child2.resetFixLeaves();
	}

public void dagFixLeaves(int map[])
	{
	if( fixVisit ) return;
	fixVisit = true;

	if( this.isLeafNode() )
		{
		data = map[data];
		return;
		}
	else
		{
		if( child1 != null ) child1.dagFixLeaves(map);
		if( child2 != null ) child2.dagFixLeaves(map);
		}
	}

//! We assume that dagMap has indexing space for 1...l where l
//! is the number of leaves. ALSO ASSUMES THAT 'VISITED' is UNUSED/RESET

public void getDAGLeafMap( biDAG[] dagMap )
	{
	if( visited == true ) return;

	if( data != 0 )
		{
		dagMap[data] = this;
		}
	visited = true;

	if( child1 != null ) child1.getDAGLeafMap( dagMap );
	if( child2 != null ) child2.getDAGLeafMap( dagMap );
	}


public void treeFixLeaves(int map[])
	{
	//! Changes the leaf numberings from l to map[l];
	//! Note that we assume that l>=1; 
	
	if( data != 0 ) 
		{
		if( (child1 != null) || (child2 != null) )
			{
			System.out.println("Error 4");
			}
		data = map[data];
		return;
		}
	else
		{
		child1.treeFixLeaves(map);
		child2.treeFixLeaves(map);
		return;
		}

	}

public static biDAG cherry12()
	{
	//! Returns a cherry, using 3 nodes,
	//! with leaf numbers 1,2.

	biDAG p = new biDAG();
	biDAG c1 = new biDAG();
	biDAG c2 = new biDAG();
	
	p.child1 = c1;
	p.child2 = c2;
	
	c1.parent = p;
	c1.data = 1;

	c2.parent = p;
	c2.data = 2;

	return p;
	}

//! this will leave child2 empty for other fun...

public static biDAG subdivide( biDAG edgetail, biDAG edgehead )
	{
	biDAG subd = new biDAG();

	subd.parent = edgetail;
	subd.secondParent = null;

	subd.child1 = edgehead;
	subd.child2 = null;

	if( edgetail.child1 == edgetail.child2 )
		{
		System.out.println("Uh oh, multi-edge...");
		System.exit(0);
		}

	if( edgetail.child1 == edgehead )
		{
		edgetail.child1 = subd;
		}
	else
	if( edgetail.child2 == edgehead )
		{
		edgetail.child2 = subd;
		}
	else
		{
		System.out.println("Mega mess up.");
		System.exit(0);
		}

	if( edgehead.parent == edgehead.secondParent )
		{
		System.out.println("Another multi-edge problem...");
		System.exit(0);
		}

	if( edgehead.parent == edgetail )
		{
		edgehead.parent = subd;
		}
	else
	if( edgehead.secondParent == edgetail )
		{
		edgehead.secondParent = subd;
		}
	else
		{	
		System.out.println("Mega mess up II.");
		System.out.println("Edgetail: "+edgetail);
		System.out.println("Edgehead: "+edgehead);
		System.out.println(edgehead.parent);
		System.out.println(edgehead.secondParent);
		}	

	return subd;
	}


public void dump()
	{
	if( data != 0 ) System.out.print(data);
	else
		{
		if( (child1 == null) || (child2==null) )
			{
			System.out.println("Error 5");
			if(child1 == null ) System.out.println("child1 is null");
			if(child2 == null ) System.out.println("child2 is null");
			System.exit(0);
			}
		System.out.print("[");
		child1.dump();
		System.out.print(",");
		child2.dump();
		System.out.print("]");
		}
	}
	
public void resetVisited()
	{
	Simplistic.doublereport("In resetVisited()");

	if( visited == false ) return;

	visited = false;

	if( child1 != null )
		{
		child1.resetVisited();
		}
	if( child2 != null )
		{
		child2.resetVisited();
		}
	}

public void newickDump()
	{
	int counter[] = new int[1];
	counter[0] = 1;

	numberRecombNodes(counter);	

	System.out.println(this.doNewick() +";");
	}

//! ---------------------------------------------------

public void numberRecombNodes( int counter[] )
	{
	if( newickRecVisit == true ) return;

	newickRecVisit = true;

	if( child1 != null )
		{
		child1.numberRecombNodes(counter);
		}

	if( child2 != null )
		{
		child2.numberRecombNodes(counter);
		}

	if( (parent != null) && (secondParent != null) )
		{
		newickRecNum = counter[0];
		counter[0]++;
		}
	return;
	}

//! --------------------------------------------------------

public String doNewick()
	{
	this.newickVisit = true;
	
	if( this.isLeafNode() )
		{
		return( this.data + "");
		}

	//! Ok, so it's either a leaf node or a recomb node...

	String lstring = null;
	String rstring = null;

	if( child1 != null )
		{
		if( child1.newickVisit == true )
			{
			lstring = "#H" + child1.newickRecNum;
			}
		else
		lstring = child1.doNewick();
		}

	if( child2 != null )
		{
		if( child2.newickVisit == true )
			{
			rstring = "#H" + child2.newickRecNum;
			}
		else
		rstring = child2.doNewick();
		}

	boolean benRecomb = ((parent != null) && (secondParent != null));

	if( (child1 != null) && (child2 != null ) )
		{
		return("(" + lstring + "," + rstring + ")");
		}
	else
	if( benRecomb )
		{
		return("(" + lstring + ")#H"+newickRecNum);
		}
	else
	System.out.println("FOUT!");

	return("BOOM!");

	}


//! This prints the dag (rooted at this node) in the funky .DOT format 
//! Still needs to be refined...NOTE THAT THIS HAS SIDE EFFECTS ON AUXDAT
//! AND VISITED SO ONLY CALL IF UNUSED OR "RESET"!!!!

public void dumpDAG(int index)
	{
	//! Hmmm, a little bit tricky...

	int num[] = new int[1];

	//! This numbering is arbitrary, just to draw a distinction from leaves;
	num[0] = 1000;

	System.out.println("strict digraph G"+index+" {");

	//! First, we'll number ALL the nodes....
	this.numVisit(num);

	//! Ok, so there is a numbering

	this.printOutgoingArcs();
	System.out.println("}");
	}

private void printOutgoingArcs()
	{
	if( child1 != null ) System.out.println( this.auxDat + " -> " + child1.auxDat );
	if( child2 != null ) System.out.println( this.auxDat + " -> " + child2.auxDat );
	this.visited = true;

	if( child1 != null )
		{
		if( child1.visited == false ) child1.printOutgoingArcs();
		}

	if( child2 != null )	
		{
		if( child2.visited == false ) child2.printOutgoingArcs();
		}

	}

private void numVisit(int num[])
	{
	//! If already been visited, finished
	if( this.auxDat != 0 ) return;

	this.auxDat = num[0];

	if( this.data != 0 )
		{
		this.auxDat = this.data;
		System.out.println(this.auxDat + " [shape=circle, width=0.3];");
		}
	else
		{
		System.out.println(this.auxDat + " [shape=point];");
		}

	num[0]++;

	if( this.child1 != null ) child1.numVisit(num);
	if( this.child2 != null ) child2.numVisit(num);
	}


//! This is hacked, might not work so well...

public boolean isLeafNode()
	{
	if( (this.child1 == null) && (this.child2 == null) )
		{
		return true;
		}
	return false;
	}


}

// ---------------------------------------------------------------------

//! gejat van http://www.merriampark.com/perm.htm

class PermutationGenerator {

  private int[] a;
  private BigInteger numLeft;
  private BigInteger total;

  //-----------------------------------------------------------
  // Constructor. WARNING: Don't make n too large.
  // Recall that the number of permutations is n!
  // which can be very large, even when n is as small as 20 --
  // 20! = 2,432,902,008,176,640,000 and
  // 21! is too big to fit into a Java long, which is
  // why we use BigInteger instead.
  //----------------------------------------------------------

  public PermutationGenerator (int n) {
    if (n < 1) {
      throw new IllegalArgumentException ("Min 1");
    }
    a = new int[n];
    total = getFactorial (n);
    reset ();
  }

  //------
  // Reset
  //------

  public void reset () {
    for (int i = 0; i < a.length; i++) {
      a[i] = i;
    }
    numLeft = new BigInteger (total.toString ());
  }

  //------------------------------------------------
  // Return number of permutations not yet generated
  //------------------------------------------------

  public BigInteger getNumLeft () {
    return numLeft;
  }

  //------------------------------------
  // Return total number of permutations
  //------------------------------------

  public BigInteger getTotal () {
    return total;
  }

  //-----------------------------
  // Are there more permutations?
  //-----------------------------

  public boolean hasMore () {
    return numLeft.compareTo (BigInteger.ZERO) == 1;
  }

  //------------------
  // Compute factorial
  //------------------

  private static BigInteger getFactorial (int n) {
    BigInteger fact = BigInteger.ONE;
    for (int i = n; i > 1; i--) {
      fact = fact.multiply (new BigInteger (Integer.toString (i)));
    }
    return fact;
  }

  //--------------------------------------------------------
  // Generate next permutation (algorithm from Rosen p. 284)
  //--------------------------------------------------------

  public int[] getNext () {

    if (numLeft.equals (total)) {
      numLeft = numLeft.subtract (BigInteger.ONE);
      return a;
    }

    int temp;

    // Find largest index j with a[j] < a[j+1]

    int j = a.length - 2;
    while (a[j] > a[j+1]) {
      j--;
    }

    // Find index k such that a[k] is smallest integer
    // greater than a[j] to the right of a[j]

    int k = a.length - 1;
    while (a[j] > a[k]) {
      k--;
    }

    // Interchange a[j] and a[k]

    temp = a[k];
    a[k] = a[j];
    a[j] = temp;

    // Put tail end of permutation after jth position in increasing order

    int r = a.length - 1;
    int s = j + 1;

    while (r > s) {
      temp = a[s];
      a[s] = a[r];
      a[r] = temp;
      r--;
      s++;
    }

    numLeft = numLeft.subtract (BigInteger.ONE);
    return a;

  }

}

class CombinationGenerator {

  private int[] a;
  private int n;
  private int r;
  private BigInteger numLeft;
  private BigInteger total;

  //------------
  // Constructor
  //------------

  public CombinationGenerator (int n, int r) {
    if (r > n) {
      throw new IllegalArgumentException ();
    }
    if (n < 1) {
      throw new IllegalArgumentException ();
    }
    this.n = n;
    this.r = r;
    a = new int[r];
    BigInteger nFact = getFactorial (n);
    BigInteger rFact = getFactorial (r);
    BigInteger nminusrFact = getFactorial (n - r);
    total = nFact.divide (rFact.multiply (nminusrFact));
    reset ();
  }

  //------
  // Reset
  //------

  public void reset () {
    for (int i = 0; i < a.length; i++) {
      a[i] = i;
    }
    numLeft = new BigInteger (total.toString ());
  }

  //------------------------------------------------
  // Return number of combinations not yet generated
  //------------------------------------------------

  public BigInteger getNumLeft () {
    return numLeft;
  }

  //-----------------------------
  // Are there more combinations?
  //-----------------------------

  public boolean hasMore () {
    return numLeft.compareTo (BigInteger.ZERO) == 1;
  }

  //------------------------------------
  // Return total number of combinations
  //------------------------------------

  public BigInteger getTotal () {
    return total;
  }

  //------------------
  // Compute factorial
  //------------------

  private static BigInteger getFactorial (int n) {
    BigInteger fact = BigInteger.ONE;
    for (int i = n; i > 1; i--) {
      fact = fact.multiply (new BigInteger (Integer.toString (i)));
    }
    return fact;
  }

  //--------------------------------------------------------
  // Generate next combination (algorithm from Rosen p. 286)
  //--------------------------------------------------------

  public int[] getNext () {

    if (numLeft.equals (total)) {
      numLeft = numLeft.subtract (BigInteger.ONE);
      return a;
    }

    int i = r - 1;
    while (a[i] == n - r + i) {
      i--;
    }
    a[i] = a[i] + 1;
    for (int j = i + 1; j < r; j++) {
      a[j] = a[i] + j - i;
    }

    numLeft = numLeft.subtract (BigInteger.ONE);
    return a;

  }
}

