error : terminate called after throwing an instance of 'std::bad_alloc' what(): std::bad_alloc;

why my code gives this bad_alloc error ,even though have allocated enough memory?
i’m not able to figure it out.
what mistake am i doing in the following code.

void dfs(ll root , vector<vector<ll>> graph , vector<ll>a , ll wt , ll &ans)
    for(auto it : graph[root])
        ans = max(ans , (wt - a[it]) );
        wt = max( wt , a[it]);
        dfs(it , graph , a , wt , ans);

int main() {
    ll n;
    vector<ll>a(n+1 , 0);
    for(ll i=0 ;i<n;i++)
            ll wt;cin>>wt;a[i]=wt;
    ll root = -1;
    for(ll i=0 ;i<n;i++)
        ll p;cin>>p;
        if(p == -1)root = i;
    ll ans = INT_MIN;
    ll wt = a[root];
    dfs(root , v , a , wt , ans);
    return 0;

>Solution :

dfs takes a copy of the vectors graph and a every time it is called recursively, and this probably exhausts your memory.

Pass references instead:

  void dfs(ll root , vector<vector<ll>> const &graph , vector<ll> const &a , ll wt , ll &ans)

As a side note: is there a reason you return the result in a reference parameter instead of a return value? To my mind it would be more natural to use

  ll dfs(ll root , vector<vector<ll>> const &graph , vector<ll> const &a , ll wt)

and return ans from dfs.

Leave a Reply