#define ll long long
ll prims(int n)
{
ll ans;
vector<bool> used (n);
#define INF 1000000000000LL
vector<ll> min_e (n, INF), sel_e (n, -1);
min_e[0]=-1*INF;
ll dis=1;
for(int i=0;i<n;i++)
{
int v=-1;
for(int j=0;j<n;j++)
{
if (!used[j] && (v == -1 || min_e[j] < min_e[v]))
v = j;
}
used[v] = true;
if(sel_e[v]!=-1)
cout << v << " " << sel_e[v] << endl;
for (int to=0; to<n; ++to)
if (g[v][to] < min_e[to]) {
min_e[to] = g[v][to];
sel_e[to] = v;
}
}
for(int i=0;i<n;i++) cout<<i<<" "<<sel_e[i]<<" "<<g[i][sel_e[i]]<<endl;
return dis;
}
I am trying to apply Prim's algorithm for a dense undirected graph for negative edge weights but I am unable to understand why it is producing wrong outputs for nearly all cases. I am using an adjacency matrix g[N][N] for storing the edges.
Actually the output for my current code is a minimum spanning tree with cycles. Why is the cycle checking mechanism not working?