# HG changeset patch
# User davidvanzessen
# Date 1394463977 14400
# Node ID 99834201251f34babe713d2afcf5c03a2cbbee20
# Parent  8803239b17707c73c3c30d04cd90780a9b507513
Uploaded
diff -r 8803239b1770 -r 99834201251f RScript.r
--- a/RScript.r	Tue Jan 28 07:07:22 2014 -0500
+++ b/RScript.r	Mon Mar 10 11:06:17 2014 -0400
@@ -243,14 +243,14 @@
 	}
 
     clonalityFrameSplit = split(clonalityFrame, f=clonalityFrame[,c("Sample", "Replicate")])
-    lapply(clonalityFrameSplit, FUN=ClonalitySampleReplicatePrint)
+    #lapply(clonalityFrameSplit, FUN=ClonalitySampleReplicatePrint)
 
     ClonalitySamplePrint <- function(dat){
 	    write.table(dat, paste("clonality_", unique(dat$Sample) , ".tsv", sep=""), sep="\t",quote=F,row.names=F,col.names=T)
 	}
 
     clonalityFrameSplit = split(clonalityFrame, f=clonalityFrame[,"Sample"])
-    lapply(clonalityFrameSplit, FUN=ClonalitySamplePrint)
+    #lapply(clonalityFrameSplit, FUN=ClonalitySamplePrint)
 
 	clonalFreq = data.frame(data.table(clonalityFrame)[, list(Type=.N), by=c("Sample", "VDJCDR3")])
 	clonalFreqCount = data.frame(data.table(clonalFreq)[, list(Count=.N), by=c("Sample", "Type")])
@@ -319,28 +319,28 @@
 if("Functionality" %in% colnames(test))
 {
 	newData = data.frame(data.table(PROD)[,list(unique=.N, 
-											VH.DEL=mean(X3V.REGION.trimmed.nt.nb),
-											P1=mean(P3V.nt.nb),
-											N1=mean(N1.REGION.nt.nb),
-											P2=mean(P5D.nt.nb),
-											DEL.DH=mean(X5D.REGION.trimmed.nt.nb),
-											DH.DEL=mean(X3D.REGION.trimmed.nt.nb),
-											P3=mean(P3D.nt.nb),
-											N2=mean(N2.REGION.nt.nb),
-											P4=mean(P5J.nt.nb),
-											DEL.JH=mean(X5J.REGION.trimmed.nt.nb),
-											Total.Del=(	mean(X3V.REGION.trimmed.nt.nb) + 
-														mean(X5D.REGION.trimmed.nt.nb) + 
-														mean(X3D.REGION.trimmed.nt.nb) +
-														mean(X5J.REGION.trimmed.nt.nb)),
-														
-											Total.N=(	mean(N1.REGION.nt.nb) +
-														mean(N2.REGION.nt.nb)),
-														
-											Total.P=(	mean(P3V.nt.nb) +
-														mean(P5D.nt.nb) +
-														mean(P3D.nt.nb) +
-														mean(P5J.nt.nb))),
-											by=c("Sample")])
+				VH.DEL=mean(X3V.REGION.trimmed.nt.nb),
+				P1=mean(P3V.nt.nb),
+				N1=mean(N1.REGION.nt.nb),
+				P2=mean(P5D.nt.nb),
+				DEL.DH=mean(X5D.REGION.trimmed.nt.nb),
+				DH.DEL=mean(X3D.REGION.trimmed.nt.nb),
+				P3=mean(P3D.nt.nb),
+				N2=mean(N2.REGION.nt.nb),
+				P4=mean(P5J.nt.nb),
+				DEL.JH=mean(X5J.REGION.trimmed.nt.nb),
+				Total.Del=(	mean(X3V.REGION.trimmed.nt.nb) + 
+							mean(X5D.REGION.trimmed.nt.nb) + 
+							mean(X3D.REGION.trimmed.nt.nb) +
+							mean(X5J.REGION.trimmed.nt.nb)),
+							
+				Total.N=(	mean(N1.REGION.nt.nb) +
+							mean(N2.REGION.nt.nb)),
+							
+				Total.P=(	mean(P3V.nt.nb) +
+							mean(P5D.nt.nb) +
+							mean(P3D.nt.nb) +
+							mean(P5J.nt.nb))),
+				by=c("Sample")])
 	write.table(newData, "junctionAnalysis.csv" , sep=",",quote=F,na="-",row.names=F,col.names=F)
-}
\ No newline at end of file
+}
diff -r 8803239b1770 -r 99834201251f r_wrapper.sh
--- a/r_wrapper.sh	Tue Jan 28 07:07:22 2014 -0500
+++ b/r_wrapper.sh	Mon Mar 10 11:06:17 2014 -0400
@@ -1,50 +1,54 @@
 #!/bin/bash
-echo $1
-echo $2
-echo $3
 
 inputFile=$1
-outputFile=$2
 outputDir=$3
+outputFile=$3/index.html #$2
 clonalType=$4
 dir="$(cd "$(dirname "$0")" && pwd)"
 mkdir $3
 Rscript --verbose $dir/RScript.r $inputFile $outputDir $outputDir $clonalType 2>&1
-echo "" >> $2
-echo "
Sample unique VH.DEL P1 N1 P2 DEL.DH DH.DEL P3 N2 P4 DEL.JH Total.Del Total.N Total.P $Sample $unique $VHDEL $P1 $N1 $P2 $DELDH $DHDEL $P3 $N2 $P4 $DELJH $TotalDel $TotalN $TotalP 
" >> $outputFile
-fi
+cp $dir/tabber.js $outputDir
+cp $dir/style.css $outputDir
+cp $dir/script.js $outputDir
+echo "Tip: Open it in a new tab (middle mouse button or right mouse button -> 'open in new tab' on the link above) " > $2
+echo "" >> $outputFile
+echo "
" >> $outputFile
+echo "
" >> $outputFile
+echo "
" >> $outputFile
 
 samples=`cat $outputDir/samples.txt`
 count=1
-echo "
$clonalType " >> $outputFile
 for sample in $samples; do
-	clonalityScore="$(cat $outputDir/ClonalityScore_$sample.csv)"
-	echo "
" >> $outputFile
-	echo "
$sample " >> $outputFile
+	echo "
" >> $outputFile
 	
-	echo "$hasReplicateColumn"
-	#if its a 'new' merged file with replicate info
-	if [[ "$hasReplicateColumn" == "Yes" ]] ; then
-		echo " Clonality Score: $clonalityScore 
" >> $outputFile
+	count=$((count+1))
+done
+echo "
" >> $outputFile
+	for sample in $samples; do
+		clonalityScore="$(cat $outputDir/ClonalityScore_$sample.csv)"
+		echo "
" >> $outputFile
+		echo "Clonality Score: $clonalityScore Replicate ID Number of Reads Reads Squared Replicate ID Number of Reads Reads Squared $replicate $reads $squared $replicate $reads $squared Sum $readsSum $squaredSum 
Coincidence Type Raw Coincidence Freq Coincidence Weight Coincidences, Weighted Coincidence Type Raw Coincidence Freq Coincidence Weight Coincidences, Weighted $type $count $weight $weightedCount 
V-D Heatmap: V-J Heatmap: D-J Heatmap: " >> $outputFile
-	mv "$outputDir/HeatmapVD_$sample.png" "$outputDir/VD_$sample.png"
-	echo " " >> $outputFile
-	count=$((count+1))
+		echo "
" >> $outputFile
+	done
+	echo "
Sample unique VH.DEL P1 N1 P2 DEL.DH DH.DEL P3 N2 P4 DEL.JH Total.Del Total.N Total.P $Sample $unique $VHDEL $P1 $N1 $P2 $DELDH $DHDEL $P3 $N2 $P4 $DELJH $TotalDel $TotalN $TotalP 
" >> $outputFile
+echo "
" >> $outputFile
+echo "
" >> $outputFile
+echo "
" >> $outputFile
+echo "
" >> $outputFile
+echo "
" >> $outputFile
+echo "Description Link The dataset used to generate the frequency graphs and the heatmaps (Unique based on clonaltype, $clonalType) Download The dataset used to calculate clonality score (Unique based on clonaltype, $clonalType) Download 
" >> $outputFile
+echo "
nav';for(arg in argsObj){this[arg]=argsObj[arg];}
+this.REclassMain=new RegExp('\\b'+this.classMain+'\\b','gi');this.REclassMainLive=new RegExp('\\b'+this.classMainLive+'\\b','gi');this.REclassTab=new RegExp('\\b'+this.classTab+'\\b','gi');this.REclassTabDefault=new RegExp('\\b'+this.classTabDefault+'\\b','gi');this.REclassTabHide=new RegExp('\\b'+this.classTabHide+'\\b','gi');this.tabs=new Array();if(this.div){this.init(this.div);this.div=null;}}
+tabberObj.prototype.init=function(e)
+{var
+childNodes,i,i2,t,defaultTab=0,DOM_ul,DOM_li,DOM_a,aId,headingElement;if(!document.getElementsByTagName){return false;}
+if(e.id){this.id=e.id;}
+this.tabs.length=0;childNodes=e.childNodes;for(i=0;i/gi," ");t.headingText=t.headingText.replace(/<[^>]+>/g,"");}
+break;}}}
+if(!t.headingText){t.headingText=i+1;}
+DOM_li=document.createElement("li");t.li=DOM_li;DOM_a=document.createElement("a");DOM_a.appendChild(document.createTextNode(t.headingText));DOM_a.href="javascript:void(null);";DOM_a.title=t.headingText;DOM_a.onclick=this.navClick;DOM_a.tabber=this;DOM_a.tabberIndex=i;if(this.addLinkId&&this.linkIdFormat){aId=this.linkIdFormat;aId=aId.replace(//gi,this.id);aId=aId.replace(//gi,i);aId=aId.replace(//gi,i+1);aId=aId.replace(//gi,t.headingText.replace(/[^a-zA-Z0-9\-]/gi,''));DOM_a.id=aId;}
+DOM_li.appendChild(DOM_a);DOM_ul.appendChild(DOM_li);}
+e.insertBefore(DOM_ul,e.firstChild);e.className=e.className.replace(this.REclassMain,this.classMainLive);this.tabShow(defaultTab);if(typeof this.onLoad=='function'){this.onLoad({tabber:this});}
+return this;};tabberObj.prototype.navClick=function(event)
+{var
+rVal,a,self,tabberIndex,onClickArgs;a=this;if(!a.tabber){return false;}
+self=a.tabber;tabberIndex=a.tabberIndex;a.blur();if(typeof self.onClick=='function'){onClickArgs={'tabber':self,'index':tabberIndex,'event':event};if(!event){onClickArgs.event=window.event;}
+rVal=self.onClick(onClickArgs);if(rVal===false){return false;}}
+self.tabShow(tabberIndex);return false;};tabberObj.prototype.tabHideAll=function()
+{var i;for(i=0;i