Question

I'm create CRC-CCITT Encode (Polynomail 0x1021 and Initial Value is 0xFFFF) It is correct 8bit, 16bit, 24bit, 32bit ... (1 Byte, 2Byte, 3Byte ...) But not correct answer 12bit, 20bit, 28bit (not Byte)

I find this algorithm

function crc(bit array bitString[1..len], int polynomial) {
    shiftRegister := initial value //  00000000 OR 11111111
    for i from 1 to len {
        if (shiftRegister top bit) xor bitString[i] = 1
            shiftRegister := (shiftRegister left shift 1) xor polynomial
        else
            shiftRegister := shiftRegister left shift 1
    }
    return shiftRegister
}

and my source code is like that

    // crc_ccitt.h
    #define _CRT_SECURE_NO_WARNINGS
    #include <stdio.h>
    #include <string.h>

    static const char CRC_CCITT[] = "0001000000100001";
    static char Shift_Register[] = "1111111111111111";

    char* getCRC(char *, int);
    void leftShift(char *);
    void xorCalc();

    char* getCRC(char* dataCode, int dataCodeLength) {
      int i;

      for (i = 0; i < dataCodeLength; i++) {
        if ((Shift_Register[0] == '1' && dataCode[i] == '0') 
          || (Shift_Register[0] == '0' && dataCode[i] == '1')) {
            leftShift(Shift_Register);
            xorCalc();
        } else {
          leftShift(Shift_Register);
        }
        printf("%c%c%c%c ", Shift_Register[0], Shift_Register[1], Shift_Register[2],         Shift_Register[3]);
        printf("%c%c%c%c ", Shift_Register[4], Shift_Register[5], Shift_Register[6], Shift_Register[7]);
        printf("%c%c%c%c ", Shift_Register[8], Shift_Register[9], Shift_Register[10], Shift_Register[11]);
        printf("%c%c%c%c ", Shift_Register[12], Shift_Register[13], Shift_Register[14], Shift_Register[15]);
        printf("\n");
      }

      return Shift_Register;
    }

    void leftShift(char *Shift_Register) {
      memmove(Shift_Register, Shift_Register + 1, strlen(Shift_Register) - 1);
      memset(Shift_Register + strlen(Shift_Register) - 1, '0', 1);
    }


    void xorCalc() {
      int i;

      for (i = 0; i < 16; ++i) {
        if ((Shift_Register[i] == '1' && CRC_CCITT[i] == '0')
          || (Shift_Register[i] == '0' && CRC_CCITT[i] == '1')) {
            Shift_Register[i] = '1';
        } else {
          Shift_Register[i] = '0';
        }
      }
    }

    // crc_ccitt.c
    #include "crc_ccitt.h"

    int main() {
      char dataCode[256];
      char *CRC = "";
      printf("Input Data Code: ");
      gets(dataCode);

      /*
      puts(dataCode);
      rightShift(dataCode);
      puts(dataCode);
      */

      CRC = getCRC(dataCode, strlen(dataCode));
      //printf("%s", CRC);
      return 0;
    }

I confirm CRC Encode value with this page http://www.lammertbies.nl/comm/info/crc-calculation.html

For example

Input 0001111111110010(1FF2)

page : C11F

my program : 1100 0001 0001 1111 (C11F)

But

Input 000111111111 (1FF)

page : FFAD

my program : 1101 0011 0000 1111 (D30F)

What of lack of my program??

Or, is algorithm is wrong??

Was it helpful?

Solution

Program OK
Algorithm OK
User input is likely the error.

Your CRC "0001000000100001" is for a 16-bit operation.
Your failed input is "000111111111", 12 bits. Instead try using "0000000111111111" or "0001111111110000" (pad on the left or right)

Note: I would have tried it myself but you did not provide crc_ccitt.h

Licensed under: CC-BY-SA with attribution
Not affiliated with StackOverflow
scroll top