# Multiplication of Matrix in C

## C program to Multiply two matrices in C

• In this problem, we are required to multiply two matrices of same or varied dimensions.
• First the order of both the matrices is taken as input and in correspondence to it the elements of the matrices are taken as input.
• The elements of the matrices are then multiplied accordingly.
`#include<stdio.h>#include<stdlib.h>int main (){    int a[10][10], b[10][10], mul[10][10], r, c, i, j, k;    printf ("enter the number of row=");    scanf ("%d", &r);    printf ("enter the number of column=");    scanf ("%d", &c);    printf ("enter the first matrix element=\n");      for (i = 0; i < r; i++)    {        for (j = 0; j < c; j++)	    {	        scanf ("%d", &a[i][j]);	    }    }        printf ("enter the second matrix element=\n");    for(i = 0; i < r; i++)    {        for(j = 0; j < c; j++)	    {	        scanf ("%d", &b[i][j]);	    }    }    printf ("multiply of the matrix=\n");    for (i = 0; i < r; i++)    {        for (j = 0; j < c; j++)	    {	        mul[i][j] = 0;	        for (k = 0; k < c; k++)	        {	            mul[i][j] += a[i][k] * b[k][j];	        }	    }    }      for (i = 0; i < r; i++)    {        for (j = 0; j < c; j++)	    {	        printf ("%d\t", mul[i][j]);	    }        printf ("\n");    }    return 0;}`