好吧,被大白书上的入门题给卡了。=_=||
已知LCM(A, B) = C,已知A和C,求最小的B
一开始我想当然地以为B = C / A,后来发现这时候的B不一定满足gcd(A, B) = 1
A要不断地除去gcd(A, B),直到满足gcd(A, B) = 1
B最后就应该乘上A除去的值
#include <cstdio> typedef long long LL; LL gcd(LL a, LL b)
{ return b == ? a : gcd(b, a%b); } int main()
{
int T;
scanf("%lld", &T);
LL a, c;
while(T--)
{
scanf("%lld%lld", &a, &c);
if(c % a == )
{
LL b = c / a;
LL g = gcd(a, b);
LL t = ;
while(g != )
{
a /= g;
t *= g;
g = gcd(a, b);
}
printf("%lld\n", b*t);
}
else puts("NO SOLUTION");
} return ;
}
代码君