/********************************************************** This software is part of J.-S. Caux's ABACUS library. Copyright (c) J.-S. Caux. ----------------------------------------------------------- File: src/BETHE/Offsets.cc Purpose: defines functions in Offsets class. IN DEVELOPMENT ***********************************************************/ #include "ABACUS.h" using namespace std; namespace ABACUS { // Function definitions: class Offsets Offsets::Offsets () : base(), Tableau(Vect()), type_id(0LL), id(0LL), maxid(0LL) {}; Offsets::Offsets (const Offsets& RefOffset) // copy constructor : base(RefOffset.base), Tableau(Vect (2 * base.Nrap.size() + 2)), type_id(RefOffset.type_id), id(RefOffset.id), maxid(RefOffset.maxid) { for (int i = 0; i < 2 * base.Nrap.size() + 2; ++i) Tableau[i] = RefOffset.Tableau[i]; } Offsets::Offsets (const Heis_Base& RefBase, long long int req_type_id) // sets all tableaux to empty ones, with nparticles(req_type_id) at each level { // Build nparticles vector from req_type_id Vect nparticles(0, 2* RefBase.Nrap.size() + 2); long long int factor = pow_ulli (10LL, nparticles.size() - 1); long long int id_eff = req_type_id; for (int i = 0; i < nparticles.size(); ++i) { nparticles[nparticles.size() - 1 - i] = id_eff/factor; id_eff -= factor * nparticles[nparticles.size() - 1 - i]; factor /= 10LL; } // Check if we've got the right vector... long long int idcheck = Offsets_type_id (nparticles); if (idcheck != req_type_id) ABACUSerror("idcheck != req_type_id in Offsets constructor."); (*this) = Offsets(RefBase, nparticles); } Offsets::Offsets (const Heis_Base& RefBase, Vect nparticles) // sets all tableaux to empty ones, with nparticles at each level : base(RefBase), Tableau(Vect (2 * base.Nrap.size() + 2)), type_id(Offsets_type_id (nparticles)), id(0LL), maxid(0LL) { // Checks on nparticles vector: if (nparticles.size() != 2 * base.Nrap.size() + 2) ABACUSerror("Wrong nparticles.size in Offsets constructor."); //if (base.Nrap[0] != (nparticles[3] + nparticles[2] + base.Mdown - nparticles[0] - nparticles[1])) ABACUSerror("Wrong Nrap[0] in Offsets constructor."); if (nparticles[3] + nparticles[2] != nparticles[0] + nparticles[1]) { cout << nparticles[0] << "\t" << nparticles[1] << "\t" << nparticles[2] << "\t" << nparticles[3] << endl; ABACUSerror("Wrong Npar[0-3] in Offsets constructor."); } for (int base_level = 1; base_level < base.Nrap.size(); ++ base_level) if (base.Nrap[base_level] != nparticles[2*base_level + 2] + nparticles[2*base_level + 3]) { cout << base_level << "\t" << base.Nrap[base_level] << "\t" << nparticles[2*base_level + 2] << "\t" << nparticles[2*base_level + 3] << endl; ABACUSerror("Wrong Nrap[] in Offsets constructor."); } // nparticles[0,1]: number of holes on R and L side in GS interval if (nparticles[0] > (base.Nrap[0] + 1)/2) ABACUSerror("nparticles[0] too large in Offsets constructor."); if (nparticles[1] > base.Nrap[0]/2) ABACUSerror("nparticles[1] too large in Offsets constructor."); // nparticles[2,3]: number of particles of type 0 on R and L side out of GS interval if (nparticles[2] > (base.Ix2_max[0] - base.Nrap[0] + 1)/2) ABACUSerror("nparticles[2] too large in Offsets constructor."); if (nparticles[3] > (base.Ix2_max[0] - base.Nrap[0] + 1)/2) ABACUSerror("nparticles[3] too large in Offsets constructor."); for (int base_level = 1; base_level < base.Nrap.size(); ++ base_level) if ((nparticles[2*base_level + 2] > 0 && nparticles[2*base_level + 2] > (base.Ix2_max[base_level] - ((base.Nrap[base_level] + 1) % 2) + 2)/2) //|| (nparticles[2*base_level + 3] > 0 && nparticles[2*base_level + 3] > (base.Ix2_max[base_level] - (base.Nrap[base_level] % 2) - 1)/2)) { || (nparticles[2*base_level + 3] > 0 && nparticles[2*base_level + 3] > base.Ix2_max[base_level] + 1 - (base.Ix2_max[base_level] - ((base.Nrap[base_level] + 1) % 2) + 2)/2)) { cout << base_level << "\t" << nparticles[2*base_level + 2] << "\t" << (base.Ix2_max[base_level] - ((base.Nrap[base_level] + 1) % 2) + 2)/2 << "\t" << nparticles[2*base_level + 3] << "\t" << (base.Ix2_max[base_level] - (base.Nrap[base_level] % 2) - 1)/2 << "\t" << (nparticles[2*base_level + 2] > 0) << "\t" << (nparticles[2*base_level + 2] > (base.Ix2_max[base_level] - ((base.Nrap[base_level] + 1) % 2) + 2)/2) //<< "\t" << (nparticles[2*base_level + 3] > 0) << "\t" << (nparticles[2*base_level + 3] > (base.Ix2_max[base_level] - (base.Nrap[base_level] % 2) - 1)/2) << "\t" << (nparticles[2*base_level + 3] > 0) << "\t" << (nparticles[2*base_level + 3] > base.Ix2_max[base_level] + 1 - (base.Ix2_max[base_level] - ((base.Nrap[base_level] + 1) % 2) + 2)/2) << endl; ABACUSerror("nparticles too large in Offsets constructor."); } // Check sum of rapidities // Holes in GS interval Tableau[0] = Young_Tableau(nparticles[0], (base.Nrap[0] + 1)/2 - nparticles[0]); Tableau[1] = Young_Tableau(nparticles[1], base.Nrap[0]/2 - nparticles[1], Tableau[0]); // Particles of type 0 out of GS interval Tableau[2] = Young_Tableau(nparticles[2], (base.Ix2_max[0] - base.Nrap[0] + 1)/2 - nparticles[2], Tableau[0]); Tableau[3] = Young_Tableau(nparticles[3], (base.Ix2_max[0] - base.Nrap[0] + 1)/2 - nparticles[3], Tableau[2]); // Tableaux of index i = 2,...: data about string type i/2-1. for (int base_level = 1; base_level < base.Nrap.size(); ++base_level) { Tableau[2*base_level + 2] = Young_Tableau(nparticles[2*base_level + 2], //(base.Ix2_max[base_level] - ((base.Nrap[base_level]) % 2) + 2)/2 - nparticles[2*base_level + 2], Tableau[2]); //(base.Ix2_max[base_level] - base.Nrap[base_level] % 2 + 2)/2 - nparticles[2*base_level + 2], Tableau[2]); (base.Ix2_max[base_level] - ((base.Nrap[base_level] + 1) % 2))/2 + 1 - nparticles[2*base_level + 2], Tableau[2]); Tableau[2*base_level + 3] = Young_Tableau(nparticles[2*base_level + 3], //(base.Ix2_max[base_level] - base.Nrap[base_level] % 2)/2 - nparticles[2*base_level + 3], Tableau[3]); (base.Ix2_max[base_level] - (base.Nrap[base_level] % 2) - 1)/2 + 1 - nparticles[2*base_level + 3], Tableau[3]); } maxid = 1LL; //id = Tableau[0].id; for (int i = 0; i < nparticles.size(); ++i) { maxid *= Tableau[i].maxid + 1LL; //id += maxid + Tableau[i].id; } maxid -= 1LL; } Offsets& Offsets::operator= (const Offsets& RefOffset) { if (this != &RefOffset) { base = RefOffset.base; Tableau = RefOffset.Tableau; type_id = RefOffset.type_id; id = RefOffset.id; maxid = RefOffset.maxid; } return(*this); } bool Offsets::operator<= (const Offsets& RefOffsets) { // Check whether all nonzero tableau row lengths in RefOffsets // are <= than those in *this bool answer = true; for (int level = 0; level < 4; ++level) { // check fundamental level only //for (int level = 0; level < 2 * base.Nrap.size() + 2; ++level) { // First check whether all rows which exist in both tableaux satisfy rule: for (int tableau_level = 0; tableau_level < ABACUS::min(Tableau[level].Nrows, RefOffsets.Tableau[level].Nrows); ++tableau_level) if (Tableau[level].Row_L[tableau_level] > RefOffsets.Tableau[level].Row_L[tableau_level]) answer = false; // Now check whether there exist extra rows violating rule: for (int tableau_level = ABACUS::min(Tableau[level].Nrows, RefOffsets.Tableau[level].Nrows); tableau_level < Tableau[level].Nrows; ++tableau_level) if (Tableau[level].Row_L[tableau_level] > 0) answer = false; } return(answer); } bool Offsets::operator>= (const Offsets& RefOffsets) { // Check whether all nonzero tableau row lengths in RefOffsets // are >= than those in *this bool answer = true; for (int level = 0; level < 4; ++level) { // check fundamental level only //for (int level = 0; level < 2 * base.Nrap.size() + 2; ++level) { // First check whether all rows which exist in both tableaux satisfy rule: for (int tableau_level = 0; tableau_level < ABACUS::min(Tableau[level].Nrows, RefOffsets.Tableau[level].Nrows); ++tableau_level) if (Tableau[level].Row_L[tableau_level] < RefOffsets.Tableau[level].Row_L[tableau_level]) answer = false; // Now check whether there exist extra rows violating rule: for (int tableau_level = ABACUS::min(Tableau[level].Nrows, RefOffsets.Tableau[level].Nrows); tableau_level < RefOffsets.Tableau[level].Nrows; ++tableau_level) if (RefOffsets.Tableau[level].Row_L[tableau_level] > 0) answer = false; } return(answer); } void Offsets::Compute_type_id () { type_id = 0LL; for (int i = 0; i < 2*base.Nrap.size() + 2; ++i) { Tableau[i].Compute_id(); type_id += Tableau[i].Nrows * pow_ulli(10LL, i); } } void Offsets::Set_to_id (long long int idnr) { // The idnr of the Offset is given by // sub_id[0] + (total number of tableaux of type 0) * (sub_id[1] + (total number of tableaux of type 1) * (sub_id[2] + ... // + total number of tableaux of type (2*base.Nrap.size()) * sub_id[2*base.Nrap.size() + 1] if (idnr > maxid) { cout << idnr << "\t" << maxid << endl; ABACUSerror("idnr too large in offsets.Set_to_id."); } id = idnr; Vect sub_id(0LL, 2*base.Nrap.size() + 2); long long int idnr_eff = idnr; long long int temp_prod = 1LL; Vect result_choose(2*base.Nrap.size() + 2); for (int i = 0; i <= 2*base.Nrap.size(); ++i) { //result_choose[i] = choose_lli(Tableau[i].Nrows + Tableau[i].Ncols, Tableau[i].Nrows); result_choose[i] = Tableau[i].maxid + 1LL; temp_prod *= result_choose[i]; } for (int i = 2*base.Nrap.size() + 1; i > 0; --i) { sub_id[i] = idnr_eff/temp_prod; idnr_eff -= sub_id[i] * temp_prod; temp_prod /= result_choose[i-1]; } sub_id[0] = idnr_eff; // what's left goes to the bottom... for (int i = 0; i <= 2*base.Nrap.size() + 1; ++i) { //cout << "level = " << i << " Tableau.id = " << sub_id[i] << endl; if ((Tableau[i].Nrows * Tableau[i].Ncols == 0) && (sub_id[i] != 0)) ABACUSerror("index too large in offset.Set_to_id."); if (Tableau[i].id != sub_id[i]) Tableau[i].Set_to_id(sub_id[i]); } Compute_type_id (); return; } void Offsets::Compute_id () { long long int prod_maxid = 1LL; id = 0LL; for (int i = 0; i < 2*base.Nrap.size() + 2; ++i) { Tableau[i].Compute_id(); id += Tableau[i].id * prod_maxid; prod_maxid *= Tableau[i].maxid + 1LL; } } Vect Offsets::Descendents (bool fixed_iK) { // From a given vector of Young tableaux specifying a particular eigenstate, // this function provides the full set of descendents (either at the same momentum if // fixed_iK == true, or not) by returning a vector of all descendent id's (leaving the // base and type invariant), which can then be used for further calculations. // This set of descendents is meant to be used when calculating either partition functions // or zero-temperature correlation functions. // IMPORTANT ASSUMPTIONS: // - all even sectors consistently increase/decrease momentum for increasing tableau row length // - all odd sectors consistently decrease/increase momentum for increasing tableau row length // FOR FIXED MOMENTUM: // all tableau levels `above' the lowest occupied one are descended as for fixed_iK == false, // and the lowest sector's highest tableau level's row length is modified (increased or decreased by one // unit if possible) such that the iK of Tableau_desc == iK of Tableau_ref. // The logic behind this is that for a state with nexc excitations, we let run nexc - 1 of the // excitations, and the lowest one is fixed in place by the momentum constraint, if possible. Vect Tableau_ref = (*this).Tableau; Vect Tableau_desc = Tableau_ref; } } // namespace ABACUS