算法1时间复杂度:O(n³)
int MaxSubseqSum1(int A[],int N){
int ThisSum,MaxSum = 0;
int i,j,k;
for (i=0;i<N;i++){
for(j=i;j<N;j++){
ThisSum = 0;
for(k = i;k<=j;k++){/*子列从i开始,到j结束*/
ThisSum += A[k];
if(ThisSum>MaxSum){
MaxSum = ThisSum;
}
}
}
}
return MaxSum;
}
算法2时间复杂度:O(n²)
int MaxSubseqSum2(int A[],int N){
int ThisSum,MaxSum = 0;
int i,j;
for (i=0;i<N;i++){
// 子列起始i
ThisSum = 0;
for(j = i;j<N;j++){
// 子列结束j
ThisSum += A[j];
if(ThisSum > MaxSum){
MaxSum = ThisSum;
}
}
}
return MaxSum;
}
算法三时间复杂度:O(nlog(n))
T(n) = 2T(n/2)+ O(n) = 2(2T(n/2²)+Cn/2)+Cn=2^k ×T(n/2^k) +Ckn
令n = 2^k,T(n) = nT(1)+Cnlogn,当两个复杂度相加时,取大的,
该算法时间复杂度为O(nlog(n))
int Max3(int A,int B,int C){
return A>B?(A>C?A:C):(B>C?B:C);
}
// 分治法
int DevideAndConquer(int A[],int left,int right){
int MaxLeftSum,MaxRightSum;
int MaxLeftBorderSum,MaxRightBorderSum;
int LeftBorderSum,RightBorderSum;
int i,center;
/*递归结束条件*/
if(left == right){
if (A[left]> 0) return A[left];
else return 0;
}
center = (left + right)/2;
// 递归获取两边子列的最大和
MaxLeftSum = DevideAndConquer(A,left,center);
MaxRightSum = DevideAndConquer(A,center+1,right);
/*下面求跨越边界的子列最大和*/
MaxLeftBorderSum = 0;
LeftBorderSum = 0;
for(i = center;i>=left;i--){
LeftBorderSum += A[i];
if(LeftBorderSum > MaxLeftBorderSum){
MaxLeftBorderSum = LeftBorderSum;
}
}
MaxRightBorderSum = 0;
RightBorderSum = 0;
for(i = center+1;i<=right;i++){
RightBorderSum += A[i];
if(RightBorderSum > MaxRightBorderSum){
MaxRightBorderSum = RightBorderSum;
}
}
// 获取最大子列和
return Max3(MaxLeftSum,MaxRightSum,MaxLeftBorderSum+MaxRightBorderSum);
}
int MaxSubseqSum3(int A[],int N){
// 与前两种算法接口保持一致
return DevideAndConquer(A,0,N-1);
}
算法四时间复杂度:
O(n)
//在线处理
int MaxSubseqSum4(int A[], int N){
int ThisSum,MaxSum;
int i ;
ThisSum = MaxSum =0;
for(i=0;i<N;i++){
ThisSum += A[i];
if(ThisSum>MaxSum)
MaxSum = ThisSum;
else if(ThisSum<0){
//当前和小于0,继续累加只会减小后面的值,弃用,从下一个重新累加
ThisSum = 0;
}
}
return MaxSum;
}
// 最大子列和问题的四种算法分析及时间复杂度的统计
#include
#include
#include
#define S 1000
clock_t start,stop;
double duration;
int A[S];
// 算法1:
int MaxSubseqSum1(int A[],int N){
int ThisSum,MaxSum = 0;
int i,j,k;
for (i=0;i<N;i++){
for(j=i;j<N;j++){
ThisSum = 0;
for(k = i;k<=j;k++){
ThisSum += A[k];
if(ThisSum>MaxSum){
MaxSum = ThisSum;
}
}
}
}
return MaxSum;
}
// 算法二:
int MaxSubseqSum2(int A[],int N){
int ThisSum,MaxSum = 0;
int i,j;
for (i=0;i<N;i++){
// 子列起始i
ThisSum = 0;
for(j = i;j<N;j++){
// 子列结束j
ThisSum += A[j];
if(ThisSum > MaxSum){
MaxSum = ThisSum;
}
}
}
return MaxSum;
}
int Max3(int A,int B,int C){
return A>B?(A>C?A:C):(B>C?B:C);
}
// 算法三:分治法
int DevideAndConquer(int A[],int left,int right){
int MaxLeftSum,MaxRightSum;
int MaxLeftBorderSum,MaxRightBorderSum;
int LeftBorderSum,RightBorderSum;
int i,center;
if(left == right){
if (A[left]> 0) return A[left];
else return 0;
}
center = (left + right)/2;
// 递归获取两边子列的最大和
MaxLeftSum = DevideAndConquer(A,left,center);
MaxRightSum = DevideAndConquer(A,center+1,right);
/*下面求跨越边界的子列最大和*/
MaxLeftBorderSum = 0;
LeftBorderSum = 0;
for(i = center;i>=left;i--){
LeftBorderSum += A[i];
if(LeftBorderSum > MaxLeftBorderSum){
MaxLeftBorderSum = LeftBorderSum;
}
}
MaxRightBorderSum = 0;
RightBorderSum = 0;
for(i = center+1;i<=right;i++){
RightBorderSum += A[i];
if(RightBorderSum > MaxRightBorderSum){
MaxRightBorderSum = RightBorderSum;
}
}
// 获取最大子列和
return Max3(MaxLeftSum,MaxRightSum,MaxLeftBorderSum+MaxRightBorderSum);
}
int MaxSubseqSum3(int A[],int N){
// 与前两种算法接口保持一致
return DevideAndConquer(A,0,N-1);
}
// 算法四
int MaxSubseqSum4(int A[], int N){
int ThisSum,MaxSum;
int i ;
ThisSum = MaxSum =0;
for(i=0;i<N;i++){
ThisSum += A[i];
if(ThisSum>MaxSum)
MaxSum = ThisSum;
else if(ThisSum<0){
ThisSum = 0;
}
}
return MaxSum;
}
// 获取随机数存在数组A[S]中
void get_rand_A(){
int i;
for(i=1;i<=S;i++){
srand((unsigned int)time(NULL)*i);
A[i-1] = rand()%(101)-50; /*生成-50到50之间的随机数S个*/
printf("%d\n",A[i-1]);
}
}
int main(){
int i;
get_rand_A();
// 算法一的执行时间统计
start = clock();
MaxSubseqSum1(A,S);
stop = clock();
duration = ((double)(stop - start))/CLOCKS_PER_SEC;
printf("tick = %f\n",(double)(stop - start));
printf("duration = %6.2e\n",duration);
// 算法二的执行时间统计
start = clock();
MaxSubseqSum2(A,S);
stop = clock();
duration = ((double)(stop - start))/CLOCKS_PER_SEC;
printf("tick2 = %f\n",(double)(stop - start));
printf("duration2 = %6.2e\n",duration);
// 算法三的执行时间统计
start = clock();
MaxSubseqSum3(A,S);
stop = clock();
duration = ((double)(stop-start))/CLOCKS_PER_SEC;
printf("tick3 = %f\n",(double)(stop - start));
printf("duration3 = %6.2e\n",duration);
// 算法四的执行时间统计
start = clock();
MaxSubseqSum4(A,S);
stop = clock();
duration = ((double)(stop - start))/CLOCKS_PER_SEC;
printf("tick4 = %f\n",(double)(stop - start));
printf("duration4 = %6.2e\n",duration);
return 0;
}
当数列个数设为1000时,测试结果如下:tick1、2、3、4对应四种算法的时间。