Skip to content

Commit 8b6700f

Browse files
author
Thien Nguyen
committed
Work on ORNL-QCI#110
Add layer/gate count option Signed-off-by: Thien Nguyen <[email protected]>
1 parent 4b879ee commit 8b6700f

File tree

3 files changed

+84
-39
lines changed

3 files changed

+84
-39
lines changed

tnqvm/visitors/exatn-gen/ExatnGenVisitor.cpp

+76-34
Original file line numberDiff line numberDiff line change
@@ -15,14 +15,14 @@
1515
*
1616
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
1717
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18-
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19-
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
20-
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21-
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22-
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
23-
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24-
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25-
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
18+
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19+
*ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT,
20+
*INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
21+
*BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22+
*DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
23+
*OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
24+
*NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
25+
*EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
2626
*
2727
* Contributors:
2828
* Implementation - Thien Nguyen;
@@ -223,7 +223,8 @@ void ExatnGenVisitor<TNQVM_COMPLEX_TYPE>::initialize(
223223
// Note: If xacc::verbose is not set, we always set ExaTN logging level to
224224
// 0.
225225
exatn::resetClientLoggingLevel(xacc::verbose ? xacc::getLoggingLevel() : 0);
226-
exatn::resetRuntimeLoggingLevel(xacc::verbose ? xacc::getLoggingLevel() : 0);
226+
exatn::resetRuntimeLoggingLevel(xacc::verbose ? xacc::getLoggingLevel()
227+
: 0);
227228

228229
xacc::subscribeLoggingLevel([](int level) {
229230
exatn::resetClientLoggingLevel(xacc::verbose ? level : 0);
@@ -250,11 +251,21 @@ void ExatnGenVisitor<TNQVM_COMPLEX_TYPE>::initialize(
250251

251252
// Default number of layers
252253
m_layersReconstruct = 4;
253-
if (options.keyExists<int>("reconstruct-layers")) {
254-
m_layersReconstruct = options.get<int>("reconstruct-layers");
254+
m_countByGates = false;
255+
m_layerTracker.clear();
256+
if (options.keyExists<int>("reconstruct-gates")) {
257+
m_layersReconstruct = options.get<int>("reconstruct-gates");
258+
xacc::info("Reconstruct tensor network every " +
259+
std::to_string(m_layersReconstruct) + " 2-body gates.");
260+
m_countByGates = true;
261+
} else {
262+
if (options.keyExists<int>("reconstruct-layers")) {
263+
m_layersReconstruct = options.get<int>("reconstruct-layers");
264+
xacc::info("Reconstruct tensor network every " +
265+
std::to_string(m_layersReconstruct) + " layers.");
266+
}
255267
}
256-
xacc::info("Reconstruct tensor network every " +
257-
std::to_string(m_layersReconstruct) + " layers.");
268+
258269
m_reconstructTol = 1e-3;
259270
m_maxBondDim = 512;
260271
m_reconstructionFidelity = 1.0;
@@ -276,7 +287,7 @@ void ExatnGenVisitor<TNQVM_COMPLEX_TYPE>::initialize(
276287
}
277288
if (options.stringExists("reconstruct-builder")) {
278289
m_reconstructBuilder = options.getString("reconstruct-builder");
279-
xacc::info("Reconstruct with: " + m_reconstructBuilder + " builder.");
290+
xacc::info("Reconstruct with: " + m_reconstructBuilder + " builder.");
280291
}
281292
}
282293

@@ -589,7 +600,7 @@ void ExatnGenVisitor<TNQVM_COMPLEX_TYPE>::appendGateTensor(
589600
const xacc::Instruction &in_gateInstruction, GateParams &&... in_params) {
590601
// Count gate layer if this is a multi-qubit gate.
591602
if (in_gateInstruction.nRequiredBits() > 1) {
592-
++m_layerCounter;
603+
updateLayerCounter(in_gateInstruction);
593604
}
594605
const auto gateName = GetGateName(GateType);
595606
const GateInstanceIdentifier gateInstanceId(gateName, in_params...);
@@ -674,7 +685,7 @@ void ExatnGenVisitor<TNQVM_COMPLEX_TYPE>::reconstructCircuitTensor() {
674685
if (m_layersReconstruct <= 0) {
675686
return;
676687
}
677-
if (m_layerCounter > m_layersReconstruct) {
688+
if (m_layerCounter >= m_layersReconstruct) {
678689
xacc::info("Reconstruct Tensor Expansion");
679690
auto target = std::make_shared<exatn::TensorExpansion>(m_tensorExpansion);
680691
// List of Approximate tensors to delete:
@@ -685,18 +696,20 @@ void ExatnGenVisitor<TNQVM_COMPLEX_TYPE>::reconstructCircuitTensor() {
685696
const std::vector<int> qubitTensorDim(m_buffer->size(), 2);
686697
auto rootTensor = std::make_shared<exatn::Tensor>("ROOT", qubitTensorDim);
687698
auto &networkBuildFactory = *(exatn::numerics::NetworkBuildFactory::get());
688-
auto builder = networkBuildFactory.createNetworkBuilderShared(m_reconstructBuilder);
699+
auto builder =
700+
networkBuildFactory.createNetworkBuilderShared(m_reconstructBuilder);
689701
builder->setParameter("max_bond_dim", m_maxBondDim);
690702
auto approximant = [&]() {
691703
if (m_initReconstructionRandom || !m_previousOptExpansion) {
692-
auto approximantTensorNetwork = exatn::makeSharedTensorNetwork("Approx", rootTensor, *builder);
704+
auto approximantTensorNetwork =
705+
exatn::makeSharedTensorNetwork("Approx", rootTensor, *builder);
693706
for (auto iter = approximantTensorNetwork->cbegin();
694707
iter != approximantTensorNetwork->cend(); ++iter) {
695708
const auto &tensorName = iter->second.getTensor()->getName();
696709
if (tensorName != "ROOT") {
697710
auto tensor = iter->second.getTensor();
698-
const bool created = exatn::createTensorSync(
699-
tensor, getExatnElementType());
711+
const bool created =
712+
exatn::createTensorSync(tensor, getExatnElementType());
700713
assert(created);
701714
const bool initialized = exatn::initTensorRnd(tensor->getName());
702715
assert(initialized);
@@ -706,8 +719,10 @@ void ExatnGenVisitor<TNQVM_COMPLEX_TYPE>::reconstructCircuitTensor() {
706719
}
707720
}
708721
approximantTensorNetwork->markOptimizableAllTensors();
709-
auto approximant_expansion = std::make_shared<exatn::TensorExpansion>("Approx");
710-
approximant_expansion->appendComponent(approximantTensorNetwork, TNQVM_COMPLEX_TYPE{1.0, 0.0});
722+
auto approximant_expansion =
723+
std::make_shared<exatn::TensorExpansion>("Approx");
724+
approximant_expansion->appendComponent(approximantTensorNetwork,
725+
TNQVM_COMPLEX_TYPE{1.0, 0.0});
711726
approximant_expansion->conjugate();
712727
return approximant_expansion;
713728
} else {
@@ -729,7 +744,7 @@ void ExatnGenVisitor<TNQVM_COMPLEX_TYPE>::reconstructCircuitTensor() {
729744
// Run the reconstructor:
730745
bool reconstructSuccess = exatn::sync();
731746
assert(reconstructSuccess);
732-
//exatn::TensorNetworkReconstructor::resetDebugLevel(2); //debug
747+
// exatn::TensorNetworkReconstructor::resetDebugLevel(2); //debug
733748
reconstructor.resetLearningRate(1.0);
734749
double residual_norm, fidelity;
735750
const auto startOpt = std::chrono::system_clock::now();
@@ -878,7 +893,7 @@ const double ExatnGenVisitor<TNQVM_COMPLEX_TYPE>::getExpectationValueZ(
878893
assert(success);
879894
// std::cout << "After renormalize:\n";
880895
// ketvector.printIt();
881-
896+
882897
exatn::TensorExpansion ketWithObs(ketvector, *m_obsTensorOperator);
883898
// std::cout << "Tensor Expansion:\n";
884899
// ketWithObs.printIt();
@@ -919,7 +934,8 @@ const double ExatnGenVisitor<TNQVM_COMPLEX_TYPE>::getExpectationValueZ(
919934
// std::cout << "Component coeff: " << component.coefficient << "\n";
920935
const std::complex<double> renormalizedComponentExpVal =
921936
tensor_body_val * component.coefficient;
922-
// std::cout << "renormalizedComponentExpVal: " << renormalizedComponentExpVal << "\n";
937+
// std::cout << "renormalizedComponentExpVal: " <<
938+
// renormalizedComponentExpVal << "\n";
923939
return renormalizedComponentExpVal.real();
924940
}
925941
xacc::error("Unable to map execution data for sub-composite: " +
@@ -944,9 +960,9 @@ ExatnGenVisitor<TNQVM_COMPLEX_TYPE>::computeWaveFuncSlice(
944960
const auto bitVal = in_bitString[i];
945961
const std::string braQubitName = "QB" + std::to_string(i);
946962
if (bitVal == 0) {
947-
const bool created = exatn::createTensor(
948-
in_processGroup, braQubitName, getExatnElementType(),
949-
exatn::TensorShape{2});
963+
const bool created =
964+
exatn::createTensor(in_processGroup, braQubitName,
965+
getExatnElementType(), exatn::TensorShape{2});
950966
assert(created);
951967
// Bit = 0
952968
const bool initialized = exatn::initTensorData(
@@ -955,9 +971,9 @@ ExatnGenVisitor<TNQVM_COMPLEX_TYPE>::computeWaveFuncSlice(
955971
assert(initialized);
956972
pairings.emplace_back(std::make_pair(i, i + nbOpenLegs));
957973
} else if (bitVal == 1) {
958-
const bool created = exatn::createTensor(
959-
in_processGroup, braQubitName, getExatnElementType(),
960-
exatn::TensorShape{2});
974+
const bool created =
975+
exatn::createTensor(in_processGroup, braQubitName,
976+
getExatnElementType(), exatn::TensorShape{2});
961977
assert(created);
962978
// Bit = 1
963979
const bool initialized = exatn::initTensorData(
@@ -967,9 +983,9 @@ ExatnGenVisitor<TNQVM_COMPLEX_TYPE>::computeWaveFuncSlice(
967983
pairings.emplace_back(std::make_pair(i, i + nbOpenLegs));
968984
} else if (bitVal == -1) {
969985
// Add an Id tensor
970-
const bool created = exatn::createTensor(
971-
in_processGroup, braQubitName, getExatnElementType(),
972-
exatn::TensorShape{2, 2});
986+
const bool created = exatn::createTensor(in_processGroup, braQubitName,
987+
getExatnElementType(),
988+
exatn::TensorShape{2, 2});
973989
assert(created);
974990
const bool initialized = exatn::initTensorData(
975991
braQubitName, std::vector<TNQVM_COMPLEX_TYPE>{
@@ -1018,5 +1034,31 @@ ExatnGenVisitor<TNQVM_COMPLEX_TYPE>::computeWaveFuncSlice(
10181034
}
10191035
return waveFnSlice;
10201036
}
1037+
template <typename TNQVM_COMPLEX_TYPE>
1038+
void ExatnGenVisitor<TNQVM_COMPLEX_TYPE>::updateLayerCounter(
1039+
const xacc::Instruction &in_gateInstruction) {
1040+
auto &gate = const_cast<xacc::Instruction &>(in_gateInstruction);
1041+
assert(gate.bits().size() == 2);
1042+
if (m_countByGates) {
1043+
++m_layerCounter;
1044+
} else {
1045+
bool canCombine = true;
1046+
const auto q1 = gate.bits()[0];
1047+
const auto q2 = gate.bits()[1];
1048+
1049+
for (const auto& [bit1, bit2]: m_layerTracker) {
1050+
if ((q1 == bit1 || q1 == bit2) || (q2 == bit1 || q2 == bit2)) {
1051+
canCombine = false;
1052+
break;
1053+
}
1054+
}
1055+
if (canCombine) {
1056+
m_layerTracker.emplace(std::make_pair(q1, q2));
1057+
} else {
1058+
++m_layerCounter;
1059+
m_layerTracker.clear();
1060+
}
1061+
}
1062+
}
10211063
} // end namespace tnqvm
10221064
#endif // TNQVM_HAS_EXATN

tnqvm/visitors/exatn-gen/ExatnGenVisitor.hpp

+4-1
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
// +-----------------------------+------------------------------------------------------------------------+-------------+--------------------------+
3333
// | Initialization Parameter | Parameter Description | type | default |
3434
// +=============================+========================================================================+=============+==========================+
35-
// | reconstruct-layers | Perform reconstruction after this number of consecutive 2-q gates | int | -1 (no reconstruct) |
35+
// | reconstruct-gates | Perform reconstruction after this number of consecutive 2-q gates | int | -1 (no reconstruct) |
3636
// +-----------------------------+------------------------------------------------------------------------+-------------+--------------------------+
3737
// | reconstruct-tolerance | Reconstruction convergence tolerance | double | 1e-4 |
3838
// +-----------------------------+------------------------------------------------------------------------+-------------+--------------------------+
@@ -141,10 +141,13 @@ class ExatnGenVisitor : public TNQVMVisitor {
141141
const exatn::ProcessGroup &in_processGroup) const;
142142

143143
private:
144+
void updateLayerCounter(const xacc::Instruction &in_gateInstruction);
145+
std::set<std::pair<size_t, size_t>> m_layerTracker;
144146
std::shared_ptr<exatn::TensorNetwork> m_qubitNetwork;
145147
exatn::TensorExpansion m_tensorExpansion;
146148
std::shared_ptr<exatn::TensorExpansion> m_previousOptExpansion;
147149
int m_layersReconstruct;
150+
bool m_countByGates;
148151
double m_reconstructTol;
149152
int m_layerCounter;
150153
int m_maxBondDim;

tnqvm/visitors/exatn-gen/tests/ExaTnGenTester.cpp

+4-4
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ TEST(ExaTnGenTester, checkVqeH2) {
120120

121121
TEST(ExaTnGenTester, checkVqeH3) {
122122
auto accelerator = xacc::getAccelerator(
123-
"tnqvm", {{"tnqvm-visitor", "exatn-gen"}, {"reconstruct-layers", -1}});
123+
"tnqvm", {{"tnqvm-visitor", "exatn-gen"}, {"reconstruct-gates", -1}});
124124
// Create the N=3 deuteron Hamiltonian
125125
auto H_N_3 = xacc::quantum::getObservable(
126126
"pauli",
@@ -172,7 +172,7 @@ TEST(ExaTnGenTester, checkBitstringAmpl) {
172172
auto program = ir->getComposite("test1");
173173
auto accelerator =
174174
xacc::getAccelerator("tnqvm", {{"tnqvm-visitor", "exatn-gen:float"},
175-
{"reconstruct-layers", 2},
175+
{"reconstruct-gates", 2},
176176
{"reconstruct-tolerance", 0.01},
177177
{"bitstring", bitstring}});
178178
auto qreg = xacc::qalloc(8);
@@ -196,7 +196,7 @@ TEST(ExaTnGenTester, checkWavefunctionSlice) {
196196
auto program = ir->getComposite("test1");
197197
auto accelerator =
198198
xacc::getAccelerator("tnqvm", {{"tnqvm-visitor", "exatn-gen:float"},
199-
{"reconstruct-layers", 2},
199+
{"reconstruct-gates", 2},
200200
{"reconstruct-tolerance", 0.01},
201201
{"bitstring", bitstring}});
202202
auto qreg = xacc::qalloc(8);
@@ -220,7 +220,7 @@ TEST(ExaTnGenTester, checkVqeH3Approx) {
220220
// Use very high tolerance to save test time
221221
auto accelerator =
222222
xacc::getAccelerator("tnqvm", {{"tnqvm-visitor", "exatn-gen"},
223-
{"reconstruct-layers", 4},
223+
{"reconstruct-gates", 4},
224224
{"reconstruct-tolerance", 0.01}});
225225
xacc::set_verbose(true);
226226
xacc::qasm(R"(

0 commit comments

Comments
 (0)