Below are the solutions to these exercises on plotting-data.

#################### # # # Exercise 1 # # # #################### #***This lines are setup of the the tutorial if(!require(ggplot2)){install.packages(ggplot2, dep=T)} if(!require(reshape2)){install.packages(reshape2, dep=T)} # import datafiles # "C:/yourDir.../PAICOL.csv"#CHANGE the directory where the data is stored river_data <- read.csv("D:/TRABAJO/BLOG/post1/PAICOL.csv") # Convert strings values into dates river_data$DATE=as.Date(river_data$DATE,origin=river_data$DATE[1]) #*** # Create a point plot ggplot(river_data, aes(x = LEVEL, y = RAIN)) + geom_point()

#################### # # # Exercise 2 # # # #################### # Overlap a 2d histogram ggplot(river_data, aes(x = LEVEL, y = RAIN)) + geom_point()+ geom_bin2d()

#################### # # # Exercise 3 # # # #################### # Create a 2d histogram of 10 bins p = ggplot(river_data, aes(x = LEVEL, y = RAIN)) + geom_bin2d(bins = 10) #################### # # # Exercise 4 # # # #################### # Grab the layer with the joint probability pxy_m=layer_data(p, 1) #################### # # # Exercise 5 # # # #################### # Create a matrix pxy= acast(pxy_m, x~y, value.var="count") #################### # # # Exercise 6 # # # #################### # guarantee that the summation of # the probability distribution is = 1 pxy=pxy/sum(pxy) #################### # # # Exercise 7 # # # #################### # Estimate the marginal probability py=colSums(pxy) px=rowSums(pxy) #################### # # # Exercise 8 # # # #################### # Estimate entropy Hx=-sum(px*log2(px)) Hy=-sum(py*log2(py)) #################### # # # Exercise 9 # # # #################### #Only positive probabilities posOK=pxy>0 # Estimate Joint entropy Hxy=-sum(pxy[posOK]*log2(pxy[posOK])) #################### # # # Exercise 10 # # # #################### # Estimate Mutual Information MI=Hx+Hy-Hxy

## Leave a Reply