Реферат: Основы алгоритмического языка С++

aByte = shortInt1 + shortInt2;

anInt = shortInt1 - shortInt2;

aLong = shortInt1 * shortInt2;

aChar = aLong + 5; // автоматическое преобразование

// в символьный тип

aReal = shortInt1 * shortInt2 + 0.5;

cout << "shortInt1 = " << shortInt1 << endl

<< "shortInt2 = " << shortInt2 << endl

<< "aByte = " << aByte << endl

<< "anInt = " << anInt << endl

<< "aLong = " << aLong << endl

<< "aChar is " << aChar << endl

<< "aReal = " << aReal << endl << endl << endl;

// дейтсвия выполняются с приведением типа

aByte = (unsigned short) (shortInt1 + shortInt2);

anInt = (int) (shortInt1 - shortInt2);

aLong = (long) (shortInt1 * shortInt2);

aChar = (unsigned char) (aLong + 5);

aReal = (float) (shortInt1 * shortInt2 + 0.5);

cout << "shortInt1 = " << shortInt1 << endl

<< "shortInt2 = " << shortInt2 << endl

<< "aByte = " << aByte << endl

<< "anInt = " << anInt << endl

<< "aLong = " << aLong << endl

<< "aChar is " << aChar << endl

<< "aReal = " << aReal << endl << endl << endl;

return 0;

}

/* Результаты:

К-во Просмотров: 906
Бесплатно скачать Реферат: Основы алгоритмического языка С++