oj题,求错误
#include<stdio.h>#include<stdlib.h>
#include<math.h>
#define X 0.000001
int i=1;
int main()
{
double a,b,c;
double t,t1,x1,x2,x3;
while(1)
{
scanf("%lf",&a);
if(fabs(a)<X) break;
else
{ scanf("%lf%lf",&b,&c);
t=b*b-4*a*c;
t1=sqrt(fabs(b*b-4*a*c));
x1=-t1/(2*a);
x2=+t1/(2*a);
x3=-b/(2*a);
if(a == 1||a== -1)printf("Case %d :\nx^2",i);
else printf("Case %d :\n%gx^2",i,fabs(a));
if(a>X)
{
if(b>X) printf(" + %gx",b);
else if(fabs(b)<X) ;
else printf(" - %gx",-b);
if(c>X) printf(" + %g = 0\n",c);
else if(fabs(c)<X) printf(" = 0\n");
else printf(" - %g = 0\n",-c);
}
if(a<X)
{
if(b>X) printf(" - %gx",b);
else if(fabs(b)<X) ;
else printf(" + %gx",-b);
if(c>X) printf(" - %g = 0\n",c);
else if(fabs(c)<X) printf(" = 0\n");
else printf(" + %g = 0\n",-c);
}
if(fabs(b)<X&&fabs(c)<X) {printf("only one real root : 0\n\n");continue;}
else{
if(t>X)printf("two real roots : %g, %g\n\n",x3+x2,x3+x1);
if(fabs(t)<X)printf("only one real root : %g\n\n",x1+x3);
if(t<-X)
{
printf("two imaginary roots : ");
if(fabs(x3)<X&&x2>X&&x2!=1) printf("%gi, ",x2);
if(fabs(x3)<X&&x2>X&&x2==1) printf("i, ");
if(fabs(x3)<X&&x2<-X&&x2!=-1) printf("%gi, ",x2);
if(fabs(x3)<X&&x2<-X&&x2==-1) printf("-i, ");
if(fabs(x3)>X&&x2<-X&&x2!=-1) printf("%g-%gi, ",x3,fabs(x2));
if(fabs(x3)>X&&x2<-X&&x2==-1) printf("%g-i, ",x3,fabs(x2));
if(fabs(x3)>X&&x2>X&&x2==1) printf("%g+i, ",x3,x2);
if(fabs(x3)>X&&x2>X&&x2!=1) printf("%g+%gi, ",x3,x2);
/* second */
if(fabs(x3)<X&&x1>X&&x1!=1) printf("%gi\n\n",x1);
if(fabs(x3)<X&&x1>X&&x1==1) printf("i\n\n");
if(fabs(x3)<X&&x1<-X&&x1!=-1) printf("%gi\n\n",x1);
if(fabs(x3)<X&&x1<-X&&x1==-1) printf("-i\n\n");
if(fabs(x3)>X&&x1<-X&&x1!=-1) printf("%g-%gi\n\n",x3,fabs(x1));
if(fabs(x3)>X&&x1<-X&&x1==-1) printf("%g-i\n\n",x3,fabs(x1));
if(fabs(x3)>X&&x1>X&&x1==1) printf("%g+i\n\n",x3,x1);
if(fabs(x3)>X&&x1>X&&x1!=1) printf("%g+%gi\n\n",x3,x1);
}
}
}
i++;
}
}