a = 1010...x...1101
0000..010..0000 <------------- Or this bit pattern
OR ---------------
1010...1...1101
|
We can best use the base 2, 8 or 16 number notation to denote bit patterns !!!
int main( int argc, char* argv[] )
{
char a = 1; /* 1 = 00000001 */
a = a | 0b00000100; /* 00000100 - set 2nd bit */
a = a | 0b00010000; /* 00010000 - set 4th bit */
printf( "a = %d\n", a );
}
|
|
Note:
|
int main( int argc, char* argv[] )
{
char a = 31; /* a = 00011111 (= 31) */
a = a & (~0b00010000); /* Now: a = 00001111 (= 15) */
printf( "a = %d\n", a );
a = a & (~0b0000100); /* Now: a = 00001011 (= 11) */
printf( "a = %d\n", a );
}
|
How to run the program:
|
|
|
int main( int argc, char* argv[] )
{
char a = 31; /* a = 00011111 (= 31) */
a = a ^ 0b00010000; /* Now: a = 00001111 (= 15) */
printf( "a = %d\n", a );
a = a ^ 0b00010000; /* Now: a = 00011111 (= 31) */
printf( "a = %d\n", a );
}
|
How to run the program:
|
|
int main( int argc, char* argv[] )
{
char a = .....; /* Any bit pattern */
if ( (a & 0b00001000) == 0 ) /* Test is 3rd bit is set */
printf( "bit is 0\n" );
else
printf( "bit is 1\n" );
}
|
How to run the program:
|