summaryrefslogtreecommitdiff
path: root/851/CH2
diff options
context:
space:
mode:
authorpriyanka2015-06-24 15:03:17 +0530
committerpriyanka2015-06-24 15:03:17 +0530
commitb1f5c3f8d6671b4331cef1dcebdf63b7a43a3a2b (patch)
treeab291cffc65280e58ac82470ba63fbcca7805165 /851/CH2
downloadScilab-TBC-Uploads-b1f5c3f8d6671b4331cef1dcebdf63b7a43a3a2b.tar.gz
Scilab-TBC-Uploads-b1f5c3f8d6671b4331cef1dcebdf63b7a43a3a2b.tar.bz2
Scilab-TBC-Uploads-b1f5c3f8d6671b4331cef1dcebdf63b7a43a3a2b.zip
initial commit / add all books
Diffstat (limited to '851/CH2')
-rwxr-xr-x851/CH2/EX2.1/Example2_1.sce18
-rwxr-xr-x851/CH2/EX2.2/Example2_2.sce27
-rwxr-xr-x851/CH2/EX2.3/Example2_3.sce24
-rwxr-xr-x851/CH2/EX2.4/Example2_4.sce22
-rwxr-xr-x851/CH2/EX2.5/Example2_5.sce11
-rwxr-xr-x851/CH2/EX2.6/Example2_6.sce20
-rwxr-xr-x851/CH2/EX2.7/Example2_7.sce26
7 files changed, 148 insertions, 0 deletions
diff --git a/851/CH2/EX2.1/Example2_1.sce b/851/CH2/EX2.1/Example2_1.sce
new file mode 100755
index 000000000..0771e8c39
--- /dev/null
+++ b/851/CH2/EX2.1/Example2_1.sce
@@ -0,0 +1,18 @@
+//clear//
+//Caption:Entropy of Binary Memoryless source
+//Example 2.1: Entropy of Binary Memoryless Source
+//page 18
+clear;
+close;
+clc;
+Po = 0:0.01:1;
+H_Po = zeros(1,length(Po));
+for i = 2:length(Po)-1
+ H_Po(i) = -Po(i)*log2(Po(i))-(1-Po(i))*log2(1-Po(i));
+end
+//plot
+plot2d(Po,H_Po)
+xlabel('Symbol Probability, Po')
+ylabel('H(Po)')
+title('Entropy function H(Po)')
+plot2d3('gnn',0.5,1)
diff --git a/851/CH2/EX2.2/Example2_2.sce b/851/CH2/EX2.2/Example2_2.sce
new file mode 100755
index 000000000..48edfe12b
--- /dev/null
+++ b/851/CH2/EX2.2/Example2_2.sce
@@ -0,0 +1,27 @@
+//clear//
+//caption:Second order Extension of Discrete Memoryless Source
+//Example 2.2:Entropy of Discrete Memoryless source
+//page 19
+clear;
+clc;
+P0 = 1/4; //probability of source alphabet S0
+P1 = 1/4; //probability of source alphabet S1
+P2 = 1/2; //probability of source alphabet S2
+H_Ruo = P0*log2(1/P0)+P1*log2(1/P1)+P2*log2(1/P2);
+disp('Entropy of Discrete Memoryless Source')
+disp('bits',H_Ruo)
+//Second order Extension of discrete Memoryless source
+P_sigma = [P0*P0,P0*P1,P0*P2,P1*P0,P1*P1,P1*P2,P2*P0,P2*P1,P2*P2];
+disp('Table 2.1 Alphabet Particulars of Second-order Extension of a Discrete Memoryless Source')
+disp('_________________________________________________________________________________')
+disp('Sequence of Symbols of ruo2:')
+disp(' S0*S0 S0*S1 S0*S2 S1*S0 S1*S1 S1*S2 S2*S0 S2*S1 S2*S2')
+disp(P_sigma,'Probability p(sigma), i =0,1.....8')
+disp('_________________________________________________________________________________')
+disp(' ')
+H_Ruo_Square =0;
+for i = 1:length(P_sigma)
+ H_Ruo_Square = H_Ruo_Square+P_sigma(i)*log2(1/P_sigma(i));
+end
+disp('bits', H_Ruo_Square,'H(Ruo_Square)=')
+disp('H(Ruo_Square) = 2*H(Ruo)')
diff --git a/851/CH2/EX2.3/Example2_3.sce b/851/CH2/EX2.3/Example2_3.sce
new file mode 100755
index 000000000..7a55a63d9
--- /dev/null
+++ b/851/CH2/EX2.3/Example2_3.sce
@@ -0,0 +1,24 @@
+//clear//
+//Caption:Entropy, Average length, Variance of Huffman Encoding
+//Example 2.3: Huffman Encoding: Calculation of
+// (a)Average code-word length 'L'
+//(b)Entropy 'H'
+clear;
+clc;
+P0 = 0.4; //probability of codeword '00'
+L0 = 2; //length of codeword S0
+P1 = 0.2; //probability of codeword '10'
+L1 = 2; //length of codeword S1
+P2 = 0.2; //probility of codeword '11'
+L2 = 2; //length of codeword S2
+P3 = 0.1; //probility of codeword '010'
+L3 = 3; //length of codeword S3
+P4 =0.1; //probility of codeword '011'
+L4 = 3; //length of codeword S4
+L = P0*L0+P1*L1+P2*L2+P3*L3+P4*L4;
+H_Ruo = P0*log2(1/P0)+P1*log2(1/P1)+P2*log2(1/P2)+P3*log2(1/P3)+P4*log2(1/P4);
+disp('bits',L,'Average code-word Length L')
+disp('bits',H_Ruo,'Entropy of Huffman coding result H')
+disp('percent',((L-H_Ruo)/H_Ruo)*100,'Average code-word length L exceeds the entropy H(Ruo) by only')
+sigma_1 = P0*(L0-L)^2+P1*(L1-L)^2+P2*(L2-L)^2+P3*(L3-L)^2+P4*(L4-L)^2;
+disp(sigma_1,'Varinace of Huffman code')
diff --git a/851/CH2/EX2.4/Example2_4.sce b/851/CH2/EX2.4/Example2_4.sce
new file mode 100755
index 000000000..b28d1cbfa
--- /dev/null
+++ b/851/CH2/EX2.4/Example2_4.sce
@@ -0,0 +1,22 @@
+//clear//
+//Caption:Entropy, Average length, Variance of Huffman Encoding
+//Example2.4: Illustrating nonuniquess of the Huffman Encoding
+// Calculation of (a)Average code-word length 'L' (b)Entropy 'H'
+clear;
+clc;
+P0 = 0.4; //probability of codeword '1'
+L0 = 1; //length of codeword S0
+P1 = 0.2; //probability of codeword '01'
+L1 = 2; //length of codeword S1
+P2 = 0.2; //probility of codeword '000'
+L2 = 3; //length of codeword S2
+P3 = 0.1; //probility of codeword '0010'
+L3 = 4; //length of codeword S3
+P4 =0.1; //probility of codeword '0011'
+L4 = 4; //length of codeword S4
+L = P0*L0+P1*L1+P2*L2+P3*L3+P4*L4;
+H_Ruo = P0*log2(1/P0)+P1*log2(1/P1)+P2*log2(1/P2)+P3*log2(1/P3)+P4*log2(1/P4);
+disp('bits',L,'Average code-word Length L')
+disp('bits',H_Ruo,'Entropy of Huffman coding result H')
+sigma_2 = P0*(L0-L)^2+P1*(L1-L)^2+P2*(L2-L)^2+P3*(L3-L)^2+P4*(L4-L)^2;
+disp(sigma_2,'Varinace of Huffman code')
diff --git a/851/CH2/EX2.5/Example2_5.sce b/851/CH2/EX2.5/Example2_5.sce
new file mode 100755
index 000000000..84e451759
--- /dev/null
+++ b/851/CH2/EX2.5/Example2_5.sce
@@ -0,0 +1,11 @@
+//clear//
+//Caption:Binary Symmetric Channel
+//Example2.5: Binary Symmetric Channel
+clear;
+clc;
+close;
+p = 0.4; //probability of correct reception
+pe = 1-p;//probility of error reception (i.e)transition probility
+disp(p,'probility of 0 receiving if a 0 is sent = probility of 1 receiving if a 1 is sent=')
+disp('Transition probility')
+disp(pe,'probility of 0 receiving if a 1 is sent = probility of 1 receiving if a 0 is sent=')
diff --git a/851/CH2/EX2.6/Example2_6.sce b/851/CH2/EX2.6/Example2_6.sce
new file mode 100755
index 000000000..70a69f628
--- /dev/null
+++ b/851/CH2/EX2.6/Example2_6.sce
@@ -0,0 +1,20 @@
+//clear//
+//Caption:Channel Capacity of a Binary Symmetric Channel
+//Example2.6:Channel Capacity of Binary Symmetri Channel
+clear;
+close;
+clc;
+p = 0:0.01:0.5;
+for i =1:length(p)
+ if(i~=1)
+ C(i) = 1+p(i)*log2(p(i))+(1-p(i))*log2((1-p(i)));
+ elseif(i==1)
+ C(i) =1;
+ elseif(i==length(p))
+ C(i)=0;
+ end
+end
+plot2d(p,C,5)
+xlabel('Transition Probility, p')
+ylabel('Channel Capacity, C')
+title('Figure 2.10 Variation of channel capacity of a binary symmetric channel with transition probility p')
diff --git a/851/CH2/EX2.7/Example2_7.sce b/851/CH2/EX2.7/Example2_7.sce
new file mode 100755
index 000000000..44fbd1d4b
--- /dev/null
+++ b/851/CH2/EX2.7/Example2_7.sce
@@ -0,0 +1,26 @@
+//clear//
+//Caption:Significance of the Channel Coding theorem
+//Example2.7: Significance of the channel coding theorem
+//Average Probility of Error of Repetition Code
+clear;
+clc;
+close;
+p =10^-2;
+pe_1 =p; //Average Probility of error for code rate r = 1
+pe_3 = 3*p^2*(1-p)+p^3;//probility of error for code rate r =1/3
+pe_5 = 10*p^3*(1-p)^2+5*p^4*(1-p)+p^5;//error for code rate r =1/5
+pe_7 = ((7*6*5)/(1*2*3))*p^4*(1-p)^3+(42/2)*p^5*(1-p)^2+7*p^6*(1-p)+p^7;//error for code rate r =1/7
+r = [1,1/3,1/5,1/7];
+pe = [pe_1,pe_3,pe_5,pe_7];
+a=gca();
+a.data_bounds=[0,0;1,0.01];
+plot2d(r,pe,5)
+xlabel('Code rate, r')
+ylabel('Average Probability of error, Pe')
+title('Figure 2.12 Illustrating significance of the channel coding theorem')
+legend('Repetition codes')
+xgrid(1)
+disp('Table 2.3 Average Probility of Error for Repetition Code')
+disp('_______________________________________________________________')
+disp(r,'Code Rate, r =1/n',pe,'Average Probility of Error, Pe')
+disp('_______________________________________________________________')