思路:dp[i]表示从1——i任务划分完后的最小值,dp[i]肯定是从上一个划分来的上一个划分分为2部分一遍是1——j随便划分也就是dp[j],剩下的部分就从j到i那么我们就需要求从j到i的费用,因为从j挺我们首先要加上机器启动时间这个启动时间时间会对后面全部造成影响我们直接算费用就是s*(sumc[n]-sumc[j])然后就是从j到i的每个任务的费用就是sum[i]*(sumc[i]-sumc[j]),可得状态转移方程是 d p [ i ] = m i n ( d p [ i ] , d p [ j ] + s u m t [ i ] ∗ ( s u m c [ i ] − s u m c [ j ] ) + m ∗ ( s u m c [ n ] − s u m c [ j ] ) ) dp[i]=min(dp[i],dp[j]+sumt[i]*(sumc[i]-sumc[j])+m*(sumc[n]-sumc[j])) dp[i]=min(dp[i],dp[j]+sumt[i]∗(sumc[i]−sumc[j])+m∗(sumc[n]−sumc[j]))
#pragma GCC optimize(2)#include<cstdio>#include<cstring>#include<algorithm>#include<set>#include<iostream>#include<vector>#include<queue>#include<map>#include<stack>#include<iomanip>#include<cstring>#include<time.h>usingnamespace std;typedeflonglong ll;#define SIS std::ios::sync_with_stdio(false)#define space putchar(' ')#define enter putchar('\n')#define lson root<<1#define rson root<<1|1typedef pair<int,int> PII;constint mod=1e9+7;constint N=2e6+10;constint M=1e5+10;constint inf=0x3f3f3f3f;constint maxx=2e5+7;constdouble eps=1e-6;intgcd(int a,int b){
return b==0?a:gcd(b,a%b);}
ll lcm(ll a,ll b){
return a*(b/gcd(a,b));}template<classT>voidread(T &x){
char c;bool op =0;while(c =getchar(), c <'0'|| c >'9')if(c =='-')
op =1;
x = c -'0';while(c =getchar(), c >='0'&& c <='9')
x = x *10+ c -'0';if(op)
x =-x;}template<classT>voidwrite(T x){
if(x <0)
x =-x,putchar('-');if(x >=10)write(x /10);putchar('0'+ x %10);}
ll qsm(int a,int b,int p){
ll res=1%p;while(b){
if(b&1)
res=res*a%p;
a=1ll*a*a%p;
b>>=1;}return res;}int n, m,k;int dp[5005];int sumt[5005],sumc[5005];intmain(){
scanf("%d%d",&n,&m);for(int i=1;i<=n;i++){
scanf("%d%d",&sumt[i],&sumc[i]);
sumt[i]+=sumt[i-1];
sumc[i]+=sumc[i-1];}memset(dp,inf,sizeof dp);
dp[0]=0;for(int i=1;i<=n;i++){
for(int j=0;j<i;j++){
dp[i]=min(dp[i],dp[j]+sumt[i]*(sumc[i]-sumc[j])+m*(sumc[n]-sumc[j]));}}printf("%d",dp[n]);return0;}